@@ -451,7 +451,46 @@ impl RwLock {
451451
452452 #[ inline]
453453 pub unsafe fn downgrade ( & self ) {
454- todo ! ( )
454+ // Atomically set to read-locked with a single reader, without any waiting threads.
455+ if let Err ( mut state) = self . state . compare_exchange (
456+ without_provenance_mut ( LOCKED ) ,
457+ without_provenance_mut ( LOCKED | SINGLE ) ,
458+ Release ,
459+ Relaxed ,
460+ ) {
461+ // Attempt to grab the queue lock.
462+ loop {
463+ let next = state. map_addr ( |addr| addr | QUEUE_LOCKED ) ;
464+ match self . state . compare_exchange ( state, next, AcqRel , Relaxed ) {
465+ Err ( new_state) => state = new_state,
466+ Ok ( new_state) => {
467+ assert_eq ! (
468+ new_state. mask( !MASK ) . addr( ) ,
469+ LOCKED | QUEUED | QUEUE_LOCKED ,
470+ "{:p}" ,
471+ new_state
472+ ) ;
473+ state = new_state;
474+ break ;
475+ }
476+ }
477+ }
478+
479+ assert_eq ! ( state. mask( !MASK ) . addr( ) , LOCKED | QUEUED | QUEUE_LOCKED ) ;
480+
481+ // SAFETY: We have the queue lock so all safety contracts are fulfilled.
482+ let tail = unsafe { add_backlinks_and_find_tail ( to_node ( state) ) . as_ref ( ) } ;
483+
484+ // Increment the reader count from 0 to 1.
485+ assert_eq ! (
486+ tail. next. 0 . fetch_byte_add( SINGLE , AcqRel ) . addr( ) ,
487+ 0 ,
488+ "Reader count was not zero while we had the write lock"
489+ ) ;
490+
491+ // Release the queue lock.
492+ self . state . fetch_byte_sub ( QUEUE_LOCKED , Release ) ;
493+ }
455494 }
456495
457496 /// # Safety
@@ -545,6 +584,7 @@ impl RwLock {
545584 loop {
546585 let prev = unsafe { current. as_ref ( ) . prev . get ( ) } ;
547586 unsafe {
587+ // There must be threads waiting.
548588 Node :: complete ( current) ;
549589 }
550590 match prev {
0 commit comments