@@ -129,12 +129,16 @@ impl SyncContext {
129129 async fn write_metadata ( & mut self ) -> Result < ( ) > {
130130 let path = format ! ( "{}-info" , self . db_path) ;
131131
132- let contents = serde_json:: to_vec ( & MetadataJson {
132+ let mut metadata = MetadataJson {
133+ hash : 0 ,
133134 version : METADATA_VERSION ,
134135 durable_frame_num : self . durable_frame_num ,
135136 generation : self . generation ,
136- } )
137- . unwrap ( ) ;
137+ } ;
138+
139+ metadata. set_hash ( ) ;
140+
141+ let contents = serde_json:: to_vec ( & metadata) . unwrap ( ) ;
138142
139143 atomic_write ( path, & contents[ ..] ) . await . unwrap ( ) ;
140144
@@ -153,6 +157,9 @@ impl SyncContext {
153157
154158 let metadata = serde_json:: from_slice :: < MetadataJson > ( & contents[ ..] ) . unwrap ( ) ;
155159
160+ metadata. verify_hash ( ) ?;
161+
162+ // TODO(lucio): convert this into a proper error
156163 assert_eq ! (
157164 metadata. version, METADATA_VERSION ,
158165 "Reading metadata from a different version than expected"
@@ -167,11 +174,44 @@ impl SyncContext {
167174
168175#[ derive( serde:: Serialize , serde:: Deserialize ) ]
169176struct MetadataJson {
177+ hash : u32 ,
170178 version : u32 ,
171179 durable_frame_num : u32 ,
172180 generation : u32 ,
173181}
174182
183+ impl MetadataJson {
184+ fn calculate_hash ( & self ) -> u32 {
185+ let mut hasher = crc32fast:: Hasher :: new ( ) ;
186+
187+ // Hash each field in a consistent order
188+ hasher. update ( & self . version . to_le_bytes ( ) ) ;
189+ hasher. update ( & self . durable_frame_num . to_le_bytes ( ) ) ;
190+ hasher. update ( & self . generation . to_le_bytes ( ) ) ;
191+
192+ hasher. finalize ( )
193+ }
194+
195+ fn set_hash ( & mut self ) {
196+ self . hash = self . calculate_hash ( ) ;
197+ }
198+
199+ fn verify_hash ( & self ) -> Result < ( ) > {
200+ let calculated_hash = self . calculate_hash ( ) ;
201+
202+ if self . hash == calculated_hash {
203+ Ok ( ( ) )
204+ } else {
205+ // TODO(lucio): convert this into a proper error rather than
206+ // an panic.
207+ panic ! (
208+ "metadata hash mismatch, expected={}, got={}" ,
209+ self . hash, calculated_hash
210+ ) ;
211+ }
212+ }
213+ }
214+
175215async fn atomic_write < P : AsRef < Path > > ( path : P , data : & [ u8 ] ) -> Result < ( ) > {
176216 // Create a temporary file in the same directory as the target file
177217 let directory = path. as_ref ( ) . parent ( ) . unwrap ( ) ;
@@ -195,3 +235,61 @@ async fn atomic_write<P: AsRef<Path>>(path: P, data: &[u8]) -> Result<()> {
195235
196236 Ok ( ( ) )
197237}
238+
239+ // TODO(lucio): for the tests to work we need proper error handling which
240+ // will be done in follow up.
241+ #[ cfg( test) ]
242+ mod tests {
243+ use super :: * ;
244+
245+ #[ test]
246+ #[ ignore]
247+ fn test_hash_verification ( ) {
248+ let mut metadata = MetadataJson {
249+ hash : 0 ,
250+ version : 1 ,
251+ durable_frame_num : 100 ,
252+ generation : 5 ,
253+ } ;
254+
255+ assert ! ( metadata. verify_hash( ) . is_err( ) ) ;
256+
257+ metadata. set_hash ( ) ;
258+
259+ assert ! ( metadata. verify_hash( ) . is_ok( ) ) ;
260+ }
261+
262+ #[ test]
263+ #[ ignore]
264+ fn test_hash_tampering ( ) {
265+ let mut metadata = MetadataJson {
266+ hash : 0 ,
267+ version : 1 ,
268+ durable_frame_num : 100 ,
269+ generation : 5 ,
270+ } ;
271+
272+ // Create metadata with hash
273+ metadata. set_hash ( ) ;
274+
275+ // Tamper with a field
276+ metadata. version = 2 ;
277+
278+ // Verify should fail
279+ assert ! ( metadata. verify_hash( ) . is_err( ) ) ;
280+
281+ metadata. version = 1 ;
282+ metadata. generation = 42 ;
283+
284+ assert ! ( metadata. verify_hash( ) . is_err( ) ) ;
285+
286+ metadata. generation = 5 ;
287+ metadata. durable_frame_num = 42 ;
288+
289+ assert ! ( metadata. verify_hash( ) . is_err( ) ) ;
290+
291+ metadata. durable_frame_num = 100 ;
292+
293+ assert ! ( metadata. verify_hash( ) . is_ok( ) ) ;
294+ }
295+ }
0 commit comments