@@ -304,7 +304,7 @@ static int CollectStaticTlsRanges(struct dl_phdr_info *info, size_t size,
304
304
return 0 ;
305
305
}
306
306
307
- static void GetStaticTlsRange (uptr *addr, uptr *size) {
307
+ static void GetStaticTlsRange (uptr *addr, uptr *size, uptr *align ) {
308
308
InternalMmapVector<TlsRange> ranges;
309
309
dl_iterate_phdr (CollectStaticTlsRanges, &ranges);
310
310
uptr len = ranges.size ();
@@ -318,17 +318,19 @@ static void GetStaticTlsRange(uptr *addr, uptr *size) {
318
318
// This may happen with musl if no module uses PT_TLS.
319
319
*addr = 0 ;
320
320
*size = 0 ;
321
+ *align = 1 ;
321
322
return ;
322
323
}
323
324
// Find the maximum consecutive ranges. We consider two modules consecutive if
324
325
// the gap is smaller than the alignment. The dynamic loader places static TLS
325
326
// blocks this way not to waste space.
326
327
uptr l = one;
328
+ *align = ranges[l].align ;
327
329
while (l != 0 && ranges[l].begin < ranges[l - 1 ].end + ranges[l - 1 ].align )
328
- --l;
330
+ *align = Max (*align, ranges[ --l]. align ) ;
329
331
uptr r = one + 1 ;
330
332
while (r != len && ranges[r].begin < ranges[r - 1 ].end + ranges[r - 1 ].align )
331
- ++r ;
333
+ *align = Max (*align, ranges[r++]. align ) ;
332
334
*addr = ranges[l].begin ;
333
335
*size = ranges[r - 1 ].end - ranges[l].begin ;
334
336
}
@@ -406,21 +408,31 @@ static void GetTls(uptr *addr, uptr *size) {
406
408
*size = 0 ;
407
409
}
408
410
#elif SANITIZER_LINUX
409
- GetStaticTlsRange (addr, size);
411
+ uptr align;
412
+ GetStaticTlsRange (addr, size, &align);
410
413
#if defined(__x86_64__) || defined(__i386__) || defined(__s390__)
414
+ if (SANITIZER_GLIBC) {
415
+ #if defined(__s390__)
416
+ align = Max<uptr>(align, 16 );
417
+ #else
418
+ align = Max<uptr>(align, 64 );
419
+ #endif
420
+ }
421
+ const uptr tp = RoundUpTo (*addr + *size, align);
422
+
411
423
// lsan requires the range to additionally cover the static TLS surplus
412
424
// (elf/dl-tls.c defines 1664). Otherwise there may be false positives for
413
425
// allocations only referenced by tls in dynamically loaded modules.
414
- if (SANITIZER_GLIBC) {
415
- *addr -= 1664 ;
416
- *size += 1664 ;
417
- }
426
+ if (SANITIZER_GLIBC)
427
+ *size += 1644 ;
428
+
418
429
// Extend the range to include the thread control block. On glibc, lsan needs
419
430
// the range to include pthread::{specific_1stblock,specific} so that
420
431
// allocations only referenced by pthread_setspecific can be scanned. This may
421
432
// underestimate by at most TLS_TCB_ALIGN-1 bytes but it should be fine
422
433
// because the number of bytes after pthread::specific is larger.
423
- *size += ThreadDescriptorSize ();
434
+ *addr = tp - RoundUpTo (*size, align);
435
+ *size = tp - *addr + ThreadDescriptorSize ();
424
436
#else
425
437
if (SANITIZER_GLIBC)
426
438
*size += 1664 ;
0 commit comments