548bcb
commit 8f7e09f4dbdb5c815a18b8285fbc5d5d7bc17d86
548bcb
Author: Szabolcs Nagy <szabolcs.nagy@arm.com>
548bcb
Date:   Thu Feb 11 11:29:23 2021 +0000
548bcb
548bcb
    x86_64: Avoid lazy relocation of tlsdesc [BZ #27137]
548bcb
    
548bcb
    Lazy tlsdesc relocation is racy because the static tls optimization and
548bcb
    tlsdesc management operations are done without holding the dlopen lock.
548bcb
    
548bcb
    This similar to the commit b7cf203b5c17dd6d9878537d41e0c7cc3d270a67
548bcb
    for aarch64, but it fixes a different race: bug 27137.
548bcb
    
548bcb
    Another issue is that ld auditing ignores DT_BIND_NOW and thus tries to
548bcb
    relocate tlsdesc lazily, but that does not work in a BIND_NOW module
548bcb
    due to missing DT_TLSDESC_PLT. Unconditionally relocating tlsdesc at
548bcb
    load time fixes this bug 27721 too.
548bcb
548bcb
diff --git a/sysdeps/x86_64/dl-machine.h b/sysdeps/x86_64/dl-machine.h
548bcb
index e308b662d245cc63..ef5740ba281c7282 100644
548bcb
--- a/sysdeps/x86_64/dl-machine.h
548bcb
+++ b/sysdeps/x86_64/dl-machine.h
548bcb
@@ -563,12 +563,21 @@ elf_machine_lazy_rel (struct link_map *map,
548bcb
     }
548bcb
   else if (__glibc_likely (r_type == R_X86_64_TLSDESC))
548bcb
     {
548bcb
-      struct tlsdesc volatile * __attribute__((__unused__)) td =
548bcb
-	(struct tlsdesc volatile *)reloc_addr;
548bcb
+      const Elf_Symndx symndx = ELFW (R_SYM) (reloc->r_info);
548bcb
+      const ElfW (Sym) *symtab = (const void *)D_PTR (map, l_info[DT_SYMTAB]);
548bcb
+      const ElfW (Sym) *sym = &symtab[symndx];
548bcb
+      const struct r_found_version *version = NULL;
548bcb
 
548bcb
-      td->arg = (void*)reloc;
548bcb
-      td->entry = (void*)(D_PTR (map, l_info[ADDRIDX (DT_TLSDESC_PLT)])
548bcb
-			  + map->l_addr);
548bcb
+      if (map->l_info[VERSYMIDX (DT_VERSYM)] != NULL)
548bcb
+	{
548bcb
+	  const ElfW (Half) *vernum =
548bcb
+	    (const void *)D_PTR (map, l_info[VERSYMIDX (DT_VERSYM)]);
548bcb
+	  version = &map->l_versions[vernum[symndx] & 0x7fff];
548bcb
+	}
548bcb
+
548bcb
+      /* Always initialize TLS descriptors completely at load time, in
548bcb
+	 case static TLS is allocated for it that requires locking.  */
548bcb
+      elf_machine_rela (map, reloc, sym, version, reloc_addr, skip_ifunc);
548bcb
     }
548bcb
   else if (__glibc_unlikely (r_type == R_X86_64_IRELATIVE))
548bcb
     {