@@ -196,9 +196,9 @@ TEST(SanitizerCommon, DenseSizeClassMap) {
196196}
197197
198198template <class Allocator >
199- void TestSizeClassAllocator () {
199+ void TestSizeClassAllocator (uptr premapped_heap = 0 ) {
200200 Allocator *a = new Allocator;
201- a->Init (kReleaseToOSIntervalNever );
201+ a->Init (kReleaseToOSIntervalNever , premapped_heap );
202202 typename Allocator::AllocatorCache cache;
203203 memset (&cache, 0 , sizeof (cache));
204204 cache.Init (0 );
@@ -265,6 +265,25 @@ void TestSizeClassAllocator() {
265265}
266266
267267#if SANITIZER_CAN_USE_ALLOCATOR64
268+
269+ // Allocates kAllocatorSize aligned bytes on construction and frees it on
270+ // destruction.
271+ class ScopedPremappedHeap {
272+ public:
273+ ScopedPremappedHeap () {
274+ BasePtr = MmapNoReserveOrDie (2 * kAllocatorSize , " preallocated heap" );
275+ AlignedAddr = RoundUpTo (reinterpret_cast <uptr>(BasePtr), kAllocatorSize );
276+ }
277+
278+ ~ScopedPremappedHeap () { UnmapOrDie (BasePtr, kAllocatorSize ); }
279+
280+ uptr Addr () { return AlignedAddr; }
281+
282+ private:
283+ void *BasePtr;
284+ uptr AlignedAddr;
285+ };
286+
268287// These tests can fail on Windows if memory is somewhat full and lit happens
269288// to run them all at the same time. FIXME: Make them not flaky and reenable.
270289#if !SANITIZER_WINDOWS
@@ -276,6 +295,11 @@ TEST(SanitizerCommon, SizeClassAllocator64Dynamic) {
276295 TestSizeClassAllocator<Allocator64Dynamic>();
277296}
278297
298+ TEST (SanitizerCommon, SizeClassAllocator64DynamicPremapped) {
299+ ScopedPremappedHeap h;
300+ TestSizeClassAllocator<Allocator64Dynamic>(h.Addr ());
301+ }
302+
279303#if !SANITIZER_ANDROID
280304// FIXME(kostyak): find values so that those work on Android as well.
281305TEST (SanitizerCommon, SizeClassAllocator64Compact) {
@@ -320,9 +344,9 @@ TEST(SanitizerCommon, SizeClassAllocator32SeparateBatches) {
320344}
321345
322346template <class Allocator >
323- void SizeClassAllocatorMetadataStress () {
347+ void SizeClassAllocatorMetadataStress (uptr premapped_heap = 0 ) {
324348 Allocator *a = new Allocator;
325- a->Init (kReleaseToOSIntervalNever );
349+ a->Init (kReleaseToOSIntervalNever , premapped_heap );
326350 typename Allocator::AllocatorCache cache;
327351 memset (&cache, 0 , sizeof (cache));
328352 cache.Init (0 );
@@ -361,6 +385,11 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicMetadataStress) {
361385 SizeClassAllocatorMetadataStress<Allocator64Dynamic>();
362386}
363387
388+ TEST (SanitizerCommon, SizeClassAllocator64DynamicPremappedMetadataStress) {
389+ ScopedPremappedHeap h;
390+ SizeClassAllocatorMetadataStress<Allocator64Dynamic>(h.Addr ());
391+ }
392+
364393#if !SANITIZER_ANDROID
365394TEST (SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
366395 SizeClassAllocatorMetadataStress<Allocator64Compact>();
@@ -374,9 +403,10 @@ TEST(SanitizerCommon, SizeClassAllocator32CompactMetadataStress) {
374403}
375404
376405template <class Allocator >
377- void SizeClassAllocatorGetBlockBeginStress (u64 TotalSize) {
406+ void SizeClassAllocatorGetBlockBeginStress (u64 TotalSize,
407+ uptr premapped_heap = 0 ) {
378408 Allocator *a = new Allocator;
379- a->Init (kReleaseToOSIntervalNever );
409+ a->Init (kReleaseToOSIntervalNever , premapped_heap );
380410 typename Allocator::AllocatorCache cache;
381411 memset (&cache, 0 , sizeof (cache));
382412 cache.Init (0 );
@@ -408,6 +438,11 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicGetBlockBegin) {
408438 SizeClassAllocatorGetBlockBeginStress<Allocator64Dynamic>(
409439 1ULL << (SANITIZER_ANDROID ? 31 : 33 ));
410440}
441+ TEST (SanitizerCommon, SizeClassAllocator64DynamicPremappedGetBlockBegin) {
442+ ScopedPremappedHeap h;
443+ SizeClassAllocatorGetBlockBeginStress<Allocator64Dynamic>(
444+ 1ULL << (SANITIZER_ANDROID ? 31 : 33 ), h.Addr ());
445+ }
411446#if !SANITIZER_ANDROID
412447TEST (SanitizerCommon, SizeClassAllocator64CompactGetBlockBegin) {
413448 SizeClassAllocatorGetBlockBeginStress<Allocator64Compact>(1ULL << 33 );
@@ -624,10 +659,10 @@ TEST(SanitizerCommon, LargeMmapAllocator) {
624659}
625660
626661template <class PrimaryAllocator >
627- void TestCombinedAllocator () {
662+ void TestCombinedAllocator (uptr premapped_heap = 0 ) {
628663 typedef CombinedAllocator<PrimaryAllocator> Allocator;
629664 Allocator *a = new Allocator;
630- a->Init (kReleaseToOSIntervalNever );
665+ a->Init (kReleaseToOSIntervalNever , premapped_heap );
631666 std::mt19937 r;
632667
633668 typename Allocator::AllocatorCache cache;
@@ -698,6 +733,11 @@ TEST(SanitizerCommon, CombinedAllocator64Dynamic) {
698733 TestCombinedAllocator<Allocator64Dynamic>();
699734}
700735
736+ TEST (SanitizerCommon, CombinedAllocator64DynamicPremapped) {
737+ ScopedPremappedHeap h;
738+ TestCombinedAllocator<Allocator64Dynamic>(h.Addr ());
739+ }
740+
701741#if !SANITIZER_ANDROID
702742TEST (SanitizerCommon, CombinedAllocator64Compact) {
703743 TestCombinedAllocator<Allocator64Compact>();
@@ -714,12 +754,12 @@ TEST(SanitizerCommon, SKIP_ON_SOLARIS_SPARCV9(CombinedAllocator32Compact)) {
714754}
715755
716756template <class Allocator >
717- void TestSizeClassAllocatorLocalCache () {
757+ void TestSizeClassAllocatorLocalCache (uptr premapped_heap = 0 ) {
718758 using AllocatorCache = typename Allocator::AllocatorCache;
719759 AllocatorCache cache;
720760 Allocator *a = new Allocator ();
721761
722- a->Init (kReleaseToOSIntervalNever );
762+ a->Init (kReleaseToOSIntervalNever , premapped_heap );
723763 memset (&cache, 0 , sizeof (cache));
724764 cache.Init (0 );
725765
@@ -759,6 +799,11 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicLocalCache) {
759799 TestSizeClassAllocatorLocalCache<Allocator64Dynamic>();
760800}
761801
802+ TEST (SanitizerCommon, SizeClassAllocator64DynamicPremappedLocalCache) {
803+ ScopedPremappedHeap h;
804+ TestSizeClassAllocatorLocalCache<Allocator64Dynamic>(h.Addr ());
805+ }
806+
762807#if !SANITIZER_ANDROID
763808TEST (SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
764809 TestSizeClassAllocatorLocalCache<Allocator64Compact>();
@@ -891,9 +936,9 @@ void IterationTestCallback(uptr chunk, void *arg) {
891936}
892937
893938template <class Allocator >
894- void TestSizeClassAllocatorIteration () {
939+ void TestSizeClassAllocatorIteration (uptr premapped_heap = 0 ) {
895940 Allocator *a = new Allocator;
896- a->Init (kReleaseToOSIntervalNever );
941+ a->Init (kReleaseToOSIntervalNever , premapped_heap );
897942 typename Allocator::AllocatorCache cache;
898943 memset (&cache, 0 , sizeof (cache));
899944 cache.Init (0 );
@@ -942,6 +987,10 @@ TEST(SanitizerCommon, SizeClassAllocator64Iteration) {
942987TEST (SanitizerCommon, SizeClassAllocator64DynamicIteration) {
943988 TestSizeClassAllocatorIteration<Allocator64Dynamic>();
944989}
990+ TEST (SanitizerCommon, SizeClassAllocator64DynamicPremappedIteration) {
991+ ScopedPremappedHeap h;
992+ TestSizeClassAllocatorIteration<Allocator64Dynamic>(h.Addr ());
993+ }
945994#endif
946995#endif
947996
0 commit comments