@@ -196,9 +196,9 @@ TEST(SanitizerCommon, DenseSizeClassMap) {
196
196
}
197
197
198
198
template <class Allocator >
199
- void TestSizeClassAllocator () {
199
+ void TestSizeClassAllocator (uptr premapped_heap = 0 ) {
200
200
Allocator *a = new Allocator;
201
- a->Init (kReleaseToOSIntervalNever );
201
+ a->Init (kReleaseToOSIntervalNever , premapped_heap );
202
202
typename Allocator::AllocatorCache cache;
203
203
memset (&cache, 0 , sizeof (cache));
204
204
cache.Init (0 );
@@ -265,6 +265,25 @@ void TestSizeClassAllocator() {
265
265
}
266
266
267
267
#if SANITIZER_CAN_USE_ALLOCATOR64
268
+
269
+ // Allocates kAllocatorSize aligned bytes on construction and frees it on
270
+ // destruction.
271
+ class ScopedPremappedHeap {
272
+ public:
273
+ ScopedPremappedHeap () {
274
+ BasePtr = MmapNoReserveOrDie (2 * kAllocatorSize , " preallocated heap" );
275
+ AlignedAddr = RoundUpTo (reinterpret_cast <uptr>(BasePtr), kAllocatorSize );
276
+ }
277
+
278
+ ~ScopedPremappedHeap () { UnmapOrDie (BasePtr, kAllocatorSize ); }
279
+
280
+ uptr Addr () { return AlignedAddr; }
281
+
282
+ private:
283
+ void *BasePtr;
284
+ uptr AlignedAddr;
285
+ };
286
+
268
287
// These tests can fail on Windows if memory is somewhat full and lit happens
269
288
// to run them all at the same time. FIXME: Make them not flaky and reenable.
270
289
#if !SANITIZER_WINDOWS
@@ -276,6 +295,11 @@ TEST(SanitizerCommon, SizeClassAllocator64Dynamic) {
276
295
TestSizeClassAllocator<Allocator64Dynamic>();
277
296
}
278
297
298
+ TEST (SanitizerCommon, SizeClassAllocator64DynamicPremapped) {
299
+ ScopedPremappedHeap h;
300
+ TestSizeClassAllocator<Allocator64Dynamic>(h.Addr ());
301
+ }
302
+
279
303
#if !SANITIZER_ANDROID
280
304
// FIXME(kostyak): find values so that those work on Android as well.
281
305
TEST (SanitizerCommon, SizeClassAllocator64Compact) {
@@ -320,9 +344,9 @@ TEST(SanitizerCommon, SizeClassAllocator32SeparateBatches) {
320
344
}
321
345
322
346
template <class Allocator >
323
- void SizeClassAllocatorMetadataStress () {
347
+ void SizeClassAllocatorMetadataStress (uptr premapped_heap = 0 ) {
324
348
Allocator *a = new Allocator;
325
- a->Init (kReleaseToOSIntervalNever );
349
+ a->Init (kReleaseToOSIntervalNever , premapped_heap );
326
350
typename Allocator::AllocatorCache cache;
327
351
memset (&cache, 0 , sizeof (cache));
328
352
cache.Init (0 );
@@ -361,6 +385,11 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicMetadataStress) {
361
385
SizeClassAllocatorMetadataStress<Allocator64Dynamic>();
362
386
}
363
387
388
+ TEST (SanitizerCommon, SizeClassAllocator64DynamicPremappedMetadataStress) {
389
+ ScopedPremappedHeap h;
390
+ SizeClassAllocatorMetadataStress<Allocator64Dynamic>(h.Addr ());
391
+ }
392
+
364
393
#if !SANITIZER_ANDROID
365
394
TEST (SanitizerCommon, SizeClassAllocator64CompactMetadataStress) {
366
395
SizeClassAllocatorMetadataStress<Allocator64Compact>();
@@ -374,9 +403,10 @@ TEST(SanitizerCommon, SizeClassAllocator32CompactMetadataStress) {
374
403
}
375
404
376
405
template <class Allocator >
377
- void SizeClassAllocatorGetBlockBeginStress (u64 TotalSize) {
406
+ void SizeClassAllocatorGetBlockBeginStress (u64 TotalSize,
407
+ uptr premapped_heap = 0 ) {
378
408
Allocator *a = new Allocator;
379
- a->Init (kReleaseToOSIntervalNever );
409
+ a->Init (kReleaseToOSIntervalNever , premapped_heap );
380
410
typename Allocator::AllocatorCache cache;
381
411
memset (&cache, 0 , sizeof (cache));
382
412
cache.Init (0 );
@@ -408,6 +438,11 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicGetBlockBegin) {
408
438
SizeClassAllocatorGetBlockBeginStress<Allocator64Dynamic>(
409
439
1ULL << (SANITIZER_ANDROID ? 31 : 33 ));
410
440
}
441
+ TEST (SanitizerCommon, SizeClassAllocator64DynamicPremappedGetBlockBegin) {
442
+ ScopedPremappedHeap h;
443
+ SizeClassAllocatorGetBlockBeginStress<Allocator64Dynamic>(
444
+ 1ULL << (SANITIZER_ANDROID ? 31 : 33 ), h.Addr ());
445
+ }
411
446
#if !SANITIZER_ANDROID
412
447
TEST (SanitizerCommon, SizeClassAllocator64CompactGetBlockBegin) {
413
448
SizeClassAllocatorGetBlockBeginStress<Allocator64Compact>(1ULL << 33 );
@@ -624,10 +659,10 @@ TEST(SanitizerCommon, LargeMmapAllocator) {
624
659
}
625
660
626
661
template <class PrimaryAllocator >
627
- void TestCombinedAllocator () {
662
+ void TestCombinedAllocator (uptr premapped_heap = 0 ) {
628
663
typedef CombinedAllocator<PrimaryAllocator> Allocator;
629
664
Allocator *a = new Allocator;
630
- a->Init (kReleaseToOSIntervalNever );
665
+ a->Init (kReleaseToOSIntervalNever , premapped_heap );
631
666
std::mt19937 r;
632
667
633
668
typename Allocator::AllocatorCache cache;
@@ -698,6 +733,11 @@ TEST(SanitizerCommon, CombinedAllocator64Dynamic) {
698
733
TestCombinedAllocator<Allocator64Dynamic>();
699
734
}
700
735
736
+ TEST (SanitizerCommon, CombinedAllocator64DynamicPremapped) {
737
+ ScopedPremappedHeap h;
738
+ TestCombinedAllocator<Allocator64Dynamic>(h.Addr ());
739
+ }
740
+
701
741
#if !SANITIZER_ANDROID
702
742
TEST (SanitizerCommon, CombinedAllocator64Compact) {
703
743
TestCombinedAllocator<Allocator64Compact>();
@@ -714,12 +754,12 @@ TEST(SanitizerCommon, SKIP_ON_SOLARIS_SPARCV9(CombinedAllocator32Compact)) {
714
754
}
715
755
716
756
template <class Allocator >
717
- void TestSizeClassAllocatorLocalCache () {
757
+ void TestSizeClassAllocatorLocalCache (uptr premapped_heap = 0 ) {
718
758
using AllocatorCache = typename Allocator::AllocatorCache;
719
759
AllocatorCache cache;
720
760
Allocator *a = new Allocator ();
721
761
722
- a->Init (kReleaseToOSIntervalNever );
762
+ a->Init (kReleaseToOSIntervalNever , premapped_heap );
723
763
memset (&cache, 0 , sizeof (cache));
724
764
cache.Init (0 );
725
765
@@ -759,6 +799,11 @@ TEST(SanitizerCommon, SizeClassAllocator64DynamicLocalCache) {
759
799
TestSizeClassAllocatorLocalCache<Allocator64Dynamic>();
760
800
}
761
801
802
+ TEST (SanitizerCommon, SizeClassAllocator64DynamicPremappedLocalCache) {
803
+ ScopedPremappedHeap h;
804
+ TestSizeClassAllocatorLocalCache<Allocator64Dynamic>(h.Addr ());
805
+ }
806
+
762
807
#if !SANITIZER_ANDROID
763
808
TEST (SanitizerCommon, SizeClassAllocator64CompactLocalCache) {
764
809
TestSizeClassAllocatorLocalCache<Allocator64Compact>();
@@ -891,9 +936,9 @@ void IterationTestCallback(uptr chunk, void *arg) {
891
936
}
892
937
893
938
template <class Allocator >
894
- void TestSizeClassAllocatorIteration () {
939
+ void TestSizeClassAllocatorIteration (uptr premapped_heap = 0 ) {
895
940
Allocator *a = new Allocator;
896
- a->Init (kReleaseToOSIntervalNever );
941
+ a->Init (kReleaseToOSIntervalNever , premapped_heap );
897
942
typename Allocator::AllocatorCache cache;
898
943
memset (&cache, 0 , sizeof (cache));
899
944
cache.Init (0 );
@@ -942,6 +987,10 @@ TEST(SanitizerCommon, SizeClassAllocator64Iteration) {
942
987
TEST (SanitizerCommon, SizeClassAllocator64DynamicIteration) {
943
988
TestSizeClassAllocatorIteration<Allocator64Dynamic>();
944
989
}
990
+ TEST (SanitizerCommon, SizeClassAllocator64DynamicPremappedIteration) {
991
+ ScopedPremappedHeap h;
992
+ TestSizeClassAllocatorIteration<Allocator64Dynamic>(h.Addr ());
993
+ }
945
994
#endif
946
995
#endif
947
996
0 commit comments