This repository was archived by the owner on Sep 8, 2025. It is now read-only.
File tree Expand file tree Collapse file tree 1 file changed +36
-0
lines changed Expand file tree Collapse file tree 1 file changed +36
-0
lines changed Original file line number Diff line number Diff line change @@ -322,6 +322,9 @@ def main(args):
322322 if args .justblocks :
323323 return
324324
325+ scan_state (gethdb , leveldb )
326+ return
327+
325328 state_root = canonical_head .state_root
326329 logger .info (f'starting state trie import: { humanize_hash (state_root )} ' )
327330
@@ -357,6 +360,39 @@ def main(args):
357360 loger .info ('successfully imported state trie and all storage tries' )
358361
359362
363+ def scan_state (gethdb : GethDatabase , trinitydb : LevelDB ):
364+ """
365+ Imports state, but by indiscriminately copying over everything which might be part of
366+ the state trie. This copies more data than necessary, but is likely to be much faster
367+ than iterating all state.
368+ """
369+ logger .debug ('scan_state: bulk-importing state entries' )
370+
371+ iterator = gethdb .db .iterator (
372+ start = b'\x00 ' * 32 ,
373+ stop = b'\xff ' * 32 ,
374+ include_start = True ,
375+ include_stop = True ,
376+ )
377+
378+ imported_entries = 0
379+ skipped_keys = 0
380+ bucket = b'\x00 ' * 2
381+ for key , value in iterator :
382+ if len (key ) != 32 :
383+ skipped_keys += 1
384+ continue
385+ trinitydb [key ] = value
386+ imported_entries += 1
387+
388+ if key >= bucket :
389+ logger .debug (f'imported: { bucket .hex ()} skipped={ skipped_keys } ' )
390+ if bucket == b'\xff ' * 2 :
391+ break
392+ bucket = (int .from_bytes (bucket , 'big' ) + 1 ).to_bytes (2 , 'big' )
393+
394+ logger .info (f'scan_state: successfully imported { imported_entries } state entries' )
395+
360396
361397if __name__ == "__main__" :
362398 logging .basicConfig (
You can’t perform that action at this time.
0 commit comments