@@ -72,21 +72,6 @@ struct SyntaxContextData {
7272}
7373
7474impl SyntaxContextData {
75- fn new (
76- ( parent, outer_expn, outer_transparency) : SyntaxContextKey ,
77- opaque : SyntaxContext ,
78- opaque_and_semiopaque : SyntaxContext ,
79- ) -> SyntaxContextData {
80- SyntaxContextData {
81- outer_expn,
82- outer_transparency,
83- parent,
84- opaque,
85- opaque_and_semiopaque,
86- dollar_crate_name : kw:: DollarCrate ,
87- }
88- }
89-
9075 fn root ( ) -> SyntaxContextData {
9176 SyntaxContextData {
9277 outer_expn : ExpnId :: root ( ) ,
@@ -140,7 +125,7 @@ impl !PartialOrd for LocalExpnId {}
140125/// with a non-default mode. With this check in place, we can avoid the need
141126/// to maintain separate versions of `ExpnData` hashes for each permutation
142127/// of `HashingControls` settings.
143- fn assert_default_hashing_controls < CTX : HashStableContext > ( ctx : & CTX , msg : & str ) {
128+ fn assert_default_hashing_controls ( ctx : & impl HashStableContext , msg : & str ) {
144129 match ctx. hashing_controls ( ) {
145130 // Note that we require that `hash_spans` be set according to the global
146131 // `-Z incremental-ignore-spans` option. Normally, this option is disabled,
@@ -408,7 +393,7 @@ impl HygieneData {
408393 }
409394 }
410395
411- fn with < T , F : FnOnce ( & mut HygieneData ) -> T > ( f : F ) -> T {
396+ fn with < R > ( f : impl FnOnce ( & mut HygieneData ) -> R ) -> R {
412397 with_session_globals ( |session_globals| f ( & mut session_globals. hygiene_data . borrow_mut ( ) ) )
413398 }
414399
@@ -618,8 +603,14 @@ impl HygieneData {
618603 } ;
619604
620605 // Fill the full data, now that we have it.
621- self . syntax_context_data [ ctxt. as_u32 ( ) as usize ] =
622- SyntaxContextData :: new ( key, opaque, opaque_and_semiopaque) ;
606+ self . syntax_context_data [ ctxt. as_u32 ( ) as usize ] = SyntaxContextData {
607+ outer_expn : expn_id,
608+ outer_transparency : transparency,
609+ parent,
610+ opaque,
611+ opaque_and_semiopaque,
612+ dollar_crate_name : kw:: DollarCrate ,
613+ } ;
623614 ctxt
624615 }
625616}
@@ -1278,49 +1269,47 @@ impl HygieneEncodeContext {
12781269 self . latest_ctxts
12791270 ) ;
12801271
1281- // Consume the current round of SyntaxContexts.
1282- // Drop the lock() temporary early
1283- let latest_ctxts = { mem:: take ( & mut * self . latest_ctxts . lock ( ) ) } ;
1284-
1285- // It's fine to iterate over a HashMap, because the serialization
1286- // of the table that we insert data into doesn't depend on insertion
1287- // order
1272+ // Consume the current round of syntax contexts.
1273+ // Drop the lock() temporary early.
1274+ // It's fine to iterate over a HashMap, because the serialization of the table
1275+ // that we insert data into doesn't depend on insertion order.
12881276 #[ allow( rustc:: potential_query_instability) ]
1289- for_all_ctxts_in ( latest_ctxts. into_iter ( ) , |index, ctxt, data| {
1277+ let latest_ctxts = { mem:: take ( & mut * self . latest_ctxts . lock ( ) ) } . into_iter ( ) ;
1278+ let all_ctxt_data: Vec < _ > = HygieneData :: with ( |data| {
1279+ latest_ctxts
1280+ . map ( |ctxt| ( ctxt, data. syntax_context_data [ ctxt. 0 as usize ] . key ( ) ) )
1281+ . collect ( )
1282+ } ) ;
1283+ for ( ctxt, ctxt_key) in all_ctxt_data {
12901284 if self . serialized_ctxts . lock ( ) . insert ( ctxt) {
1291- encode_ctxt ( encoder, index , data ) ;
1285+ encode_ctxt ( encoder, ctxt . 0 , & ctxt_key ) ;
12921286 }
1293- } ) ;
1294-
1295- let latest_expns = { mem:: take ( & mut * self . latest_expns . lock ( ) ) } ;
1287+ }
12961288
1297- // Same as above, this is fine as we are inserting into a order-independent hashset
1289+ // Same as above, but for expansions instead of syntax contexts.
12981290 #[ allow( rustc:: potential_query_instability) ]
1299- for_all_expns_in ( latest_expns. into_iter ( ) , |expn, data, hash| {
1291+ let latest_expns = { mem:: take ( & mut * self . latest_expns . lock ( ) ) } . into_iter ( ) ;
1292+ let all_expn_data: Vec < _ > = HygieneData :: with ( |data| {
1293+ latest_expns
1294+ . map ( |expn| ( expn, data. expn_data ( expn) . clone ( ) , data. expn_hash ( expn) ) )
1295+ . collect ( )
1296+ } ) ;
1297+ for ( expn, expn_data, expn_hash) in all_expn_data {
13001298 if self . serialized_expns . lock ( ) . insert ( expn) {
1301- encode_expn ( encoder, expn, data , hash ) ;
1299+ encode_expn ( encoder, expn, & expn_data , expn_hash ) ;
13021300 }
1303- } ) ;
1301+ }
13041302 }
13051303 debug ! ( "encode_hygiene: Done serializing SyntaxContextData" ) ;
13061304 }
13071305}
13081306
1309- #[ derive( Default ) ]
13101307/// Additional information used to assist in decoding hygiene data
1311- struct HygieneDecodeContextInner {
1312- // Maps serialized `SyntaxContext` ids to a `SyntaxContext` in the current
1313- // global `HygieneData`. When we deserialize a `SyntaxContext`, we need to create
1314- // a new id in the global `HygieneData`. This map tracks the ID we end up picking,
1315- // so that multiple occurrences of the same serialized id are decoded to the same
1316- // `SyntaxContext`. This only stores `SyntaxContext`s which are completely decoded.
1317- remapped_ctxts : Vec < Option < SyntaxContext > > ,
1318- }
1319-
13201308#[ derive( Default ) ]
1321- /// Additional information used to assist in decoding hygiene data
13221309pub struct HygieneDecodeContext {
1323- inner : Lock < HygieneDecodeContextInner > ,
1310+ // A cache mapping raw serialized per-crate syntax context ids to corresponding decoded
1311+ // `SyntaxContext`s in the current global `HygieneData`.
1312+ remapped_ctxts : Lock < Vec < Option < SyntaxContext > > > ,
13241313}
13251314
13261315/// Register an expansion which has been decoded from the on-disk-cache for the local crate.
@@ -1391,10 +1380,10 @@ pub fn decode_expn_id(
13911380// to track which `SyntaxContext`s we have already decoded.
13921381// The provided closure will be invoked to deserialize a `SyntaxContextData`
13931382// if we haven't already seen the id of the `SyntaxContext` we are deserializing.
1394- pub fn decode_syntax_context < D : Decoder , F : FnOnce ( & mut D , u32 ) -> SyntaxContextKey > (
1383+ pub fn decode_syntax_context < D : Decoder > (
13951384 d : & mut D ,
13961385 context : & HygieneDecodeContext ,
1397- decode_data : F ,
1386+ decode_data : impl FnOnce ( & mut D , u32 ) -> SyntaxContextKey ,
13981387) -> SyntaxContext {
13991388 let raw_id: u32 = Decodable :: decode ( d) ;
14001389 if raw_id == 0 {
@@ -1403,11 +1392,10 @@ pub fn decode_syntax_context<D: Decoder, F: FnOnce(&mut D, u32) -> SyntaxContext
14031392 return SyntaxContext :: root ( ) ;
14041393 }
14051394
1395+ // Look into the cache first.
14061396 // Reminder: `HygieneDecodeContext` is per-crate, so there are no collisions between
14071397 // raw ids from different crate metadatas.
1408- if let Some ( ctxt) = context. inner . lock ( ) . remapped_ctxts . get ( raw_id as usize ) . copied ( ) . flatten ( )
1409- {
1410- // This has already been decoded.
1398+ if let Some ( ctxt) = context. remapped_ctxts . lock ( ) . get ( raw_id as usize ) . copied ( ) . flatten ( ) {
14111399 return ctxt;
14121400 }
14131401
@@ -1417,40 +1405,16 @@ pub fn decode_syntax_context<D: Decoder, F: FnOnce(&mut D, u32) -> SyntaxContext
14171405 let ctxt =
14181406 HygieneData :: with ( |hygiene_data| hygiene_data. alloc_ctxt ( parent, expn_id, transparency) ) ;
14191407
1420- let mut inner = context. inner . lock ( ) ;
1408+ let mut remapped_ctxts = context. remapped_ctxts . lock ( ) ;
14211409 let new_len = raw_id as usize + 1 ;
1422- if inner . remapped_ctxts . len ( ) < new_len {
1423- inner . remapped_ctxts . resize ( new_len, None ) ;
1410+ if remapped_ctxts. len ( ) < new_len {
1411+ remapped_ctxts. resize ( new_len, None ) ;
14241412 }
1425- inner . remapped_ctxts [ raw_id as usize ] = Some ( ctxt) ;
1413+ remapped_ctxts[ raw_id as usize ] = Some ( ctxt) ;
14261414
14271415 ctxt
14281416}
14291417
1430- fn for_all_ctxts_in < F : FnMut ( u32 , SyntaxContext , & SyntaxContextKey ) > (
1431- ctxts : impl Iterator < Item = SyntaxContext > ,
1432- mut f : F ,
1433- ) {
1434- let all_data: Vec < _ > = HygieneData :: with ( |data| {
1435- ctxts. map ( |ctxt| ( ctxt, data. syntax_context_data [ ctxt. 0 as usize ] . key ( ) ) ) . collect ( )
1436- } ) ;
1437- for ( ctxt, data) in all_data. into_iter ( ) {
1438- f ( ctxt. 0 , ctxt, & data) ;
1439- }
1440- }
1441-
1442- fn for_all_expns_in (
1443- expns : impl Iterator < Item = ExpnId > ,
1444- mut f : impl FnMut ( ExpnId , & ExpnData , ExpnHash ) ,
1445- ) {
1446- let all_data: Vec < _ > = HygieneData :: with ( |data| {
1447- expns. map ( |expn| ( expn, data. expn_data ( expn) . clone ( ) , data. expn_hash ( expn) ) ) . collect ( )
1448- } ) ;
1449- for ( expn, data, hash) in all_data. into_iter ( ) {
1450- f ( expn, & data, hash) ;
1451- }
1452- }
1453-
14541418impl < E : SpanEncoder > Encodable < E > for LocalExpnId {
14551419 fn encode ( & self , e : & mut E ) {
14561420 self . to_expn_id ( ) . encode ( e) ;
@@ -1463,10 +1427,10 @@ impl<D: SpanDecoder> Decodable<D> for LocalExpnId {
14631427 }
14641428}
14651429
1466- pub fn raw_encode_syntax_context < E : Encoder > (
1430+ pub fn raw_encode_syntax_context (
14671431 ctxt : SyntaxContext ,
14681432 context : & HygieneEncodeContext ,
1469- e : & mut E ,
1433+ e : & mut impl Encoder ,
14701434) {
14711435 if !context. serialized_ctxts . lock ( ) . contains ( & ctxt) {
14721436 context. latest_ctxts . lock ( ) . insert ( ctxt) ;
0 commit comments