@@ -21,7 +21,7 @@ use std::ptr;
2121use std:: borrow:: Cow ;
2222
2323use rustc:: ty:: { self , Instance , ParamEnv , query:: TyCtxtAt } ;
24- use rustc:: ty:: layout:: { self , Align , TargetDataLayout , Size , HasDataLayout } ;
24+ use rustc:: ty:: layout:: { Align , TargetDataLayout , Size , HasDataLayout } ;
2525pub use rustc:: mir:: interpret:: { truncate, write_target_uint, read_target_uint} ;
2626use rustc_data_structures:: fx:: { FxHashSet , FxHashMap } ;
2727
@@ -30,7 +30,7 @@ use syntax::ast::Mutability;
3030use super :: {
3131 Pointer , AllocId , Allocation , ConstValue , GlobalId ,
3232 EvalResult , Scalar , EvalErrorKind , AllocType , PointerArithmetic ,
33- Machine , MemoryAccess , AllocMap , MayLeak , ScalarMaybeUndef , ErrorHandled ,
33+ Machine , MemoryAccess , AllocMap , MayLeak , ScalarMaybeUndef , AllocationExtra , ErrorHandled ,
3434} ;
3535
3636#[ derive( Debug , PartialEq , Eq , Copy , Clone , Hash ) ]
@@ -297,6 +297,17 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
297297 } )
298298 }
299299 }
300+
301+ /// Convenience forwarding method for `Allocation::check_bounds`.
302+ #[ inline( always) ]
303+ pub fn check_bounds (
304+ & self ,
305+ ptr : Pointer < M :: PointerTag > ,
306+ size : Size ,
307+ access : bool
308+ ) -> EvalResult < ' tcx > {
309+ self . get ( ptr. alloc_id ) ?. check_bounds ( self , ptr, size, access)
310+ }
300311}
301312
302313/// Allocation accessors
@@ -587,6 +598,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
587598impl < ' a , ' mir , ' tcx , M > Memory < ' a , ' mir , ' tcx , M >
588599where
589600 M : Machine < ' a , ' mir , ' tcx , PointerTag =( ) , AllocExtra =( ) > ,
601+ M :: AllocExtra : AllocationExtra < ( ) > ,
590602 M :: MemoryMap : AllocMap < AllocId , ( MemoryKind < M :: MemoryKinds > , Allocation ) > ,
591603{
592604 /// mark an allocation as static and initialized, either mutable or not
@@ -626,3 +638,254 @@ where
626638 Ok ( ( ) )
627639 }
628640}
641+
642+ /// Reading and writing
643+ impl < ' a , ' mir , ' tcx , M : Machine < ' a , ' mir , ' tcx > > Memory < ' a , ' mir , ' tcx , M > {
644+ pub fn copy (
645+ & mut self ,
646+ src : Scalar < M :: PointerTag > ,
647+ src_align : Align ,
648+ dest : Scalar < M :: PointerTag > ,
649+ dest_align : Align ,
650+ size : Size ,
651+ nonoverlapping : bool ,
652+ ) -> EvalResult < ' tcx > {
653+ self . copy_repeatedly ( src, src_align, dest, dest_align, size, 1 , nonoverlapping)
654+ }
655+
656+ pub fn copy_repeatedly (
657+ & mut self ,
658+ src : Scalar < M :: PointerTag > ,
659+ src_align : Align ,
660+ dest : Scalar < M :: PointerTag > ,
661+ dest_align : Align ,
662+ size : Size ,
663+ length : u64 ,
664+ nonoverlapping : bool ,
665+ ) -> EvalResult < ' tcx > {
666+ if size. bytes ( ) == 0 {
667+ // Nothing to do for ZST, other than checking alignment and non-NULLness.
668+ self . check_align ( src, src_align) ?;
669+ self . check_align ( dest, dest_align) ?;
670+ return Ok ( ( ) ) ;
671+ }
672+ let src = src. to_ptr ( ) ?;
673+ let dest = dest. to_ptr ( ) ?;
674+
675+ // first copy the relocations to a temporary buffer, because
676+ // `get_bytes_mut` will clear the relocations, which is correct,
677+ // since we don't want to keep any relocations at the target.
678+ // (`get_bytes_with_undef_and_ptr` below checks that there are no
679+ // relocations overlapping the edges; those would not be handled correctly).
680+ let relocations = {
681+ let relocations = self . relocations ( src, size) ?;
682+ let mut new_relocations = Vec :: with_capacity ( relocations. len ( ) * ( length as usize ) ) ;
683+ for i in 0 ..length {
684+ new_relocations. extend (
685+ relocations
686+ . iter ( )
687+ . map ( |& ( offset, reloc) | {
688+ ( offset + dest. offset - src. offset + ( i * size * relocations. len ( ) as u64 ) ,
689+ reloc)
690+ } )
691+ ) ;
692+ }
693+
694+ new_relocations
695+ } ;
696+
697+ let tcx = self . tcx . tcx ;
698+
699+ // This also checks alignment, and relocation edges on the src.
700+ let src_bytes = self
701+ . get ( src. alloc_id ) ?
702+ . get_bytes_with_undef_and_ptr ( tcx, src, size, src_align) ?
703+ . as_ptr ( ) ;
704+ let dest_bytes = self
705+ . get_mut ( dest. alloc_id ) ?
706+ . get_bytes_mut ( tcx, dest, size * length, dest_align) ?
707+ . as_mut_ptr ( ) ;
708+
709+ // SAFE: The above indexing would have panicked if there weren't at least `size` bytes
710+ // behind `src` and `dest`. Also, we use the overlapping-safe `ptr::copy` if `src` and
711+ // `dest` could possibly overlap.
712+ // The pointers above remain valid even if the `HashMap` table is moved around because they
713+ // point into the `Vec` storing the bytes.
714+ unsafe {
715+ assert_eq ! ( size. bytes( ) as usize as u64 , size. bytes( ) ) ;
716+ if src. alloc_id == dest. alloc_id {
717+ if nonoverlapping {
718+ if ( src. offset <= dest. offset && src. offset + size > dest. offset ) ||
719+ ( dest. offset <= src. offset && dest. offset + size > src. offset )
720+ {
721+ return err ! ( Intrinsic (
722+ "copy_nonoverlapping called on overlapping ranges" . to_string( ) ,
723+ ) ) ;
724+ }
725+ }
726+
727+ for i in 0 ..length {
728+ ptr:: copy ( src_bytes,
729+ dest_bytes. offset ( ( size. bytes ( ) * i) as isize ) ,
730+ size. bytes ( ) as usize ) ;
731+ }
732+ } else {
733+ for i in 0 ..length {
734+ ptr:: copy_nonoverlapping ( src_bytes,
735+ dest_bytes. offset ( ( size. bytes ( ) * i) as isize ) ,
736+ size. bytes ( ) as usize ) ;
737+ }
738+ }
739+ }
740+
741+ // copy definedness to the destination
742+ self . copy_undef_mask ( src, dest, size, length) ?;
743+ // copy the relocations to the destination
744+ self . get_mut ( dest. alloc_id ) ?. relocations . insert_presorted ( relocations) ;
745+
746+ Ok ( ( ) )
747+ }
748+
749+ pub fn read_c_str ( & self , ptr : Pointer < M :: PointerTag > ) -> EvalResult < ' tcx , & [ u8 ] > {
750+ self . get ( ptr. alloc_id ) ?. read_c_str ( self , ptr)
751+ }
752+
753+ pub fn check_bytes (
754+ & self ,
755+ ptr : Scalar < M :: PointerTag > ,
756+ size : Size ,
757+ allow_ptr_and_undef : bool ,
758+ ) -> EvalResult < ' tcx > {
759+ // Empty accesses don't need to be valid pointers, but they should still be non-NULL
760+ let align = Align :: from_bytes ( 1 , 1 ) . unwrap ( ) ;
761+ if size. bytes ( ) == 0 {
762+ self . check_align ( ptr, align) ?;
763+ return Ok ( ( ) ) ;
764+ }
765+ let ptr = ptr. to_ptr ( ) ?;
766+ self . get ( ptr. alloc_id ) ?. check_bytes ( self , ptr, size, allow_ptr_and_undef)
767+ }
768+
769+ pub fn read_bytes ( & self , ptr : Scalar < M :: PointerTag > , size : Size ) -> EvalResult < ' tcx , & [ u8 ] > {
770+ // Empty accesses don't need to be valid pointers, but they should still be non-NULL
771+ let align = Align :: from_bytes ( 1 , 1 ) . unwrap ( ) ;
772+ if size. bytes ( ) == 0 {
773+ self . check_align ( ptr, align) ?;
774+ return Ok ( & [ ] ) ;
775+ }
776+ let ptr = ptr. to_ptr ( ) ?;
777+ self . get ( ptr. alloc_id ) ?. get_bytes ( self , ptr, size, align)
778+ }
779+
780+ pub fn write_bytes ( & mut self , ptr : Scalar < M :: PointerTag > , src : & [ u8 ] ) -> EvalResult < ' tcx > {
781+ // Empty accesses don't need to be valid pointers, but they should still be non-NULL
782+ let align = Align :: from_bytes ( 1 , 1 ) . unwrap ( ) ;
783+ if src. is_empty ( ) {
784+ self . check_align ( ptr, align) ?;
785+ return Ok ( ( ) ) ;
786+ }
787+ let ptr = ptr. to_ptr ( ) ?;
788+ let tcx = self . tcx . tcx ;
789+ self . get_mut ( ptr. alloc_id ) ?. write_bytes ( tcx, ptr, src)
790+ }
791+
792+ pub fn write_repeat (
793+ & mut self ,
794+ ptr : Scalar < M :: PointerTag > ,
795+ val : u8 ,
796+ count : Size
797+ ) -> EvalResult < ' tcx > {
798+ // Empty accesses don't need to be valid pointers, but they should still be non-NULL
799+ let align = Align :: from_bytes ( 1 , 1 ) . unwrap ( ) ;
800+ if count. bytes ( ) == 0 {
801+ self . check_align ( ptr, align) ?;
802+ return Ok ( ( ) ) ;
803+ }
804+ let ptr = ptr. to_ptr ( ) ?;
805+ let tcx = self . tcx . tcx ;
806+ self . get_mut ( ptr. alloc_id ) ?. write_repeat ( tcx, ptr, val, count)
807+ }
808+
809+ /// Read a *non-ZST* scalar
810+ pub fn read_scalar (
811+ & self ,
812+ ptr : Pointer < M :: PointerTag > ,
813+ ptr_align : Align ,
814+ size : Size
815+ ) -> EvalResult < ' tcx , ScalarMaybeUndef < M :: PointerTag > > {
816+ self . get ( ptr. alloc_id ) ?. read_scalar ( self , ptr, ptr_align, size)
817+ }
818+
819+ pub fn read_ptr_sized (
820+ & self ,
821+ ptr : Pointer < M :: PointerTag > ,
822+ ptr_align : Align
823+ ) -> EvalResult < ' tcx , ScalarMaybeUndef < M :: PointerTag > > {
824+ self . read_scalar ( ptr, ptr_align, self . pointer_size ( ) )
825+ }
826+
827+ /// Write a *non-ZST* scalar
828+ pub fn write_scalar (
829+ & mut self ,
830+ ptr : Pointer < M :: PointerTag > ,
831+ ptr_align : Align ,
832+ val : ScalarMaybeUndef < M :: PointerTag > ,
833+ type_size : Size ,
834+ ) -> EvalResult < ' tcx > {
835+ let tcx = self . tcx . tcx ;
836+ self . get_mut ( ptr. alloc_id ) ?. write_scalar ( tcx, ptr, ptr_align, val, type_size)
837+ }
838+
839+ pub fn write_ptr_sized (
840+ & mut self ,
841+ ptr : Pointer < M :: PointerTag > ,
842+ ptr_align : Align ,
843+ val : ScalarMaybeUndef < M :: PointerTag >
844+ ) -> EvalResult < ' tcx > {
845+ let ptr_size = self . pointer_size ( ) ;
846+ self . write_scalar ( ptr, ptr_align, val, ptr_size)
847+ }
848+ }
849+
850+ /// Relocations
851+ impl < ' a , ' mir , ' tcx , M : Machine < ' a , ' mir , ' tcx > > Memory < ' a , ' mir , ' tcx , M > {
852+ /// Return all relocations overlapping with the given ptr-offset pair.
853+ fn relocations (
854+ & self ,
855+ ptr : Pointer < M :: PointerTag > ,
856+ size : Size ,
857+ ) -> EvalResult < ' tcx , & [ ( Size , ( M :: PointerTag , AllocId ) ) ] > {
858+ self . get ( ptr. alloc_id ) ?. relocations ( self , ptr, size)
859+ }
860+ }
861+
862+ /// Undefined bytes
863+ impl < ' a , ' mir , ' tcx , M : Machine < ' a , ' mir , ' tcx > > Memory < ' a , ' mir , ' tcx , M > {
864+ // FIXME: Add a fast version for the common, nonoverlapping case
865+ fn copy_undef_mask (
866+ & mut self ,
867+ src : Pointer < M :: PointerTag > ,
868+ dest : Pointer < M :: PointerTag > ,
869+ size : Size ,
870+ repeat : u64 ,
871+ ) -> EvalResult < ' tcx > {
872+ // The bits have to be saved locally before writing to dest in case src and dest overlap.
873+ assert_eq ! ( size. bytes( ) as usize as u64 , size. bytes( ) ) ;
874+
875+ let undef_mask = self . get ( src. alloc_id ) ?. undef_mask . clone ( ) ;
876+ let dest_allocation = self . get_mut ( dest. alloc_id ) ?;
877+
878+ for i in 0 ..size. bytes ( ) {
879+ let defined = undef_mask. get ( src. offset + Size :: from_bytes ( i) ) ;
880+
881+ for j in 0 ..repeat {
882+ dest_allocation. undef_mask . set (
883+ dest. offset + Size :: from_bytes ( i + ( size. bytes ( ) * j) ) ,
884+ defined
885+ ) ;
886+ }
887+ }
888+
889+ Ok ( ( ) )
890+ }
891+ }
0 commit comments