@@ -63,7 +63,7 @@ pub struct Outcome {
63
63
pub pack_size : u64 ,
64
64
}
65
65
66
- #[ derive( Debug , PartialEq , Eq , Hash , Ord , PartialOrd , Clone ) ]
66
+ #[ derive( Debug , PartialEq , Eq , Hash , Ord , PartialOrd , Clone , Copy ) ]
67
67
#[ cfg_attr( feature = "serde1" , derive( serde:: Serialize , serde:: Deserialize ) ) ]
68
68
pub enum SafetyCheck {
69
69
/// Don't verify the validity of the checksums stored in the index and pack file
@@ -75,13 +75,36 @@ pub enum SafetyCheck {
75
75
/// All of the above, and only log object decode errors.
76
76
///
77
77
/// Useful if there is a damaged pack and you would like to traverse as many objects as possible.
78
- SkipFileAndObjectChecksumVerificationNoAbortOnDecodeError ,
78
+ SkipFileAndObjectChecksumVerificationAndNoAbortOnDecodeError ,
79
79
80
80
/// Perform all available safety checks before operating on the pack and
81
81
/// abort if any of them fails
82
82
All ,
83
83
}
84
84
85
+ impl SafetyCheck {
86
+ pub fn file_checksum ( & self ) -> bool {
87
+ match self {
88
+ SafetyCheck :: All => true ,
89
+ _ => false ,
90
+ }
91
+ }
92
+ pub fn object_checksum ( & self ) -> bool {
93
+ match self {
94
+ SafetyCheck :: All | SafetyCheck :: SkipFileChecksumVerification => true ,
95
+ _ => false ,
96
+ }
97
+ }
98
+ pub fn fatal_decode_error ( & self ) -> bool {
99
+ match self {
100
+ SafetyCheck :: All
101
+ | SafetyCheck :: SkipFileChecksumVerification
102
+ | SafetyCheck :: SkipFileAndObjectChecksumVerification => true ,
103
+ SafetyCheck :: SkipFileAndObjectChecksumVerificationAndNoAbortOnDecodeError => false ,
104
+ }
105
+ }
106
+ }
107
+
85
108
impl Default for SafetyCheck {
86
109
fn default ( ) -> Self {
87
110
SafetyCheck :: All
@@ -136,7 +159,7 @@ impl index::File {
136
159
Context {
137
160
algorithm,
138
161
thread_limit,
139
- check : _ ,
162
+ check,
140
163
} : Context ,
141
164
progress : Option < P > ,
142
165
new_processor : impl Fn ( ) -> Processor + Send + Sync ,
@@ -156,38 +179,45 @@ impl index::File {
156
179
{
157
180
let mut root = progress:: DoOrDiscard :: from ( progress) ;
158
181
159
- let progress = root. add_child ( "Sha1 of index" ) ;
160
- let verify_self = move || self . verify_checksum ( progress) ;
182
+ let id = if check. file_checksum ( ) {
183
+ let progress = root. add_child ( "Sha1 of index" ) ;
184
+ let verify_self = move || self . verify_checksum ( progress) ;
161
185
162
- if self . pack_checksum ( ) != pack. checksum ( ) {
163
- return Err ( Error :: PackMismatch {
164
- actual : pack. checksum ( ) ,
165
- expected : self . pack_checksum ( ) ,
166
- } ) ;
167
- }
168
- let mut progress = root. add_child ( "Sha1 of pack" ) ;
169
- let ( pack_res, id) = parallel:: join (
170
- move || {
171
- let throughput = TimeThroughput :: new ( pack. data_len ( ) ) ;
172
- let res = pack. verify_checksum ( ) ;
173
- progress. done ( throughput) ;
174
- res
175
- } ,
176
- verify_self,
177
- ) ;
178
- pack_res?;
179
- let id = id?;
186
+ if self . pack_checksum ( ) != pack. checksum ( ) {
187
+ return Err ( Error :: PackMismatch {
188
+ actual : pack. checksum ( ) ,
189
+ expected : self . pack_checksum ( ) ,
190
+ } ) ;
191
+ }
192
+ let mut progress = root. add_child ( "Sha1 of pack" ) ;
193
+ let ( pack_res, id) = parallel:: join (
194
+ move || {
195
+ let throughput = TimeThroughput :: new ( pack. data_len ( ) ) ;
196
+ let res = pack. verify_checksum ( ) ;
197
+ progress. done ( throughput) ;
198
+ res
199
+ } ,
200
+ verify_self,
201
+ ) ;
202
+ pack_res?;
203
+ id?
204
+ } else {
205
+ self . index_checksum ( )
206
+ } ;
180
207
181
208
match algorithm {
182
- Algorithm :: Lookup => self . traverse_with_lookup ( thread_limit, new_processor, make_cache, root, pack) ,
183
- Algorithm :: DeltaTreeLookup => self . traverse_with_index_lookup ( thread_limit, new_processor, root, pack) ,
209
+ Algorithm :: Lookup => self . traverse_with_lookup ( check, thread_limit, new_processor, make_cache, root, pack) ,
210
+ Algorithm :: DeltaTreeLookup => {
211
+ self . traverse_with_index_lookup ( check, thread_limit, new_processor, root, pack)
212
+ }
184
213
}
185
214
. map ( |stats| ( id, stats) )
186
215
}
187
216
188
217
#[ allow( clippy:: too_many_arguments) ]
189
218
pub ( crate ) fn process_entry_dispatch < C , P > (
190
219
& self ,
220
+ check : SafetyCheck ,
191
221
pack : & pack:: data:: File ,
192
222
cache : & mut C ,
193
223
buf : & mut Vec < u8 > ,
@@ -223,32 +253,34 @@ impl index::File {
223
253
let object_kind = entry_stats. kind ;
224
254
let consumed_input = entry_stats. compressed_size ;
225
255
226
- let header_size = crate :: loose:: object:: header:: encode ( object_kind, buf. len ( ) as u64 , & mut header_buf[ ..] )
227
- . expect ( "header buffer to be big enough" ) ;
228
- let mut hasher = git_features:: hash:: Sha1 :: default ( ) ;
229
- hasher. update ( & header_buf[ ..header_size] ) ;
230
- hasher. update ( buf. as_slice ( ) ) ;
256
+ if check. object_checksum ( ) {
257
+ let header_size = crate :: loose:: object:: header:: encode ( object_kind, buf. len ( ) as u64 , & mut header_buf[ ..] )
258
+ . expect ( "header buffer to be big enough" ) ;
259
+ let mut hasher = git_features:: hash:: Sha1 :: default ( ) ;
260
+ hasher. update ( & header_buf[ ..header_size] ) ;
261
+ hasher. update ( buf. as_slice ( ) ) ;
231
262
232
- let actual_oid = owned:: Id :: new_sha1 ( hasher. digest ( ) ) ;
233
- if actual_oid != index_entry. oid {
234
- return Err ( Error :: PackObjectMismatch {
235
- actual : actual_oid,
236
- expected : index_entry. oid ,
237
- offset : index_entry. pack_offset ,
238
- kind : object_kind,
239
- } ) ;
240
- }
241
- if let Some ( desired_crc32) = index_entry. crc32 {
242
- let header_size = ( pack_entry_data_offset - index_entry. pack_offset ) as usize ;
243
- let actual_crc32 = pack. entry_crc32 ( index_entry. pack_offset , header_size + consumed_input) ;
244
- if actual_crc32 != desired_crc32 {
245
- return Err ( Error :: Crc32Mismatch {
246
- actual : actual_crc32,
247
- expected : desired_crc32,
263
+ let actual_oid = owned:: Id :: new_sha1 ( hasher. digest ( ) ) ;
264
+ if actual_oid != index_entry. oid {
265
+ return Err ( Error :: PackObjectMismatch {
266
+ actual : actual_oid,
267
+ expected : index_entry. oid ,
248
268
offset : index_entry. pack_offset ,
249
269
kind : object_kind,
250
270
} ) ;
251
271
}
272
+ if let Some ( desired_crc32) = index_entry. crc32 {
273
+ let header_size = ( pack_entry_data_offset - index_entry. pack_offset ) as usize ;
274
+ let actual_crc32 = pack. entry_crc32 ( index_entry. pack_offset , header_size + consumed_input) ;
275
+ if actual_crc32 != desired_crc32 {
276
+ return Err ( Error :: Crc32Mismatch {
277
+ actual : actual_crc32,
278
+ expected : desired_crc32,
279
+ offset : index_entry. pack_offset ,
280
+ kind : object_kind,
281
+ } ) ;
282
+ }
283
+ }
252
284
}
253
285
processor ( object_kind, buf. as_slice ( ) , & index_entry, & entry_stats, progress) ?;
254
286
Ok ( entry_stats)
0 commit comments