6c0556
commit 8f7e09f4dbdb5c815a18b8285fbc5d5d7bc17d86
6c0556
Author: Szabolcs Nagy <szabolcs.nagy@arm.com>
6c0556
Date:   Thu Feb 11 11:29:23 2021 +0000
6c0556
6c0556
    x86_64: Avoid lazy relocation of tlsdesc [BZ #27137]
6c0556
    
6c0556
    Lazy tlsdesc relocation is racy because the static tls optimization and
6c0556
    tlsdesc management operations are done without holding the dlopen lock.
6c0556
    
6c0556
    This similar to the commit b7cf203b5c17dd6d9878537d41e0c7cc3d270a67
6c0556
    for aarch64, but it fixes a different race: bug 27137.
6c0556
    
6c0556
    Another issue is that ld auditing ignores DT_BIND_NOW and thus tries to
6c0556
    relocate tlsdesc lazily, but that does not work in a BIND_NOW module
6c0556
    due to missing DT_TLSDESC_PLT. Unconditionally relocating tlsdesc at
6c0556
    load time fixes this bug 27721 too.
6c0556
6c0556
diff --git a/sysdeps/x86_64/dl-machine.h b/sysdeps/x86_64/dl-machine.h
6c0556
index e308b662d245cc63..ef5740ba281c7282 100644
6c0556
--- a/sysdeps/x86_64/dl-machine.h
6c0556
+++ b/sysdeps/x86_64/dl-machine.h
6c0556
@@ -563,12 +563,21 @@ elf_machine_lazy_rel (struct link_map *map,
6c0556
     }
6c0556
   else if (__glibc_likely (r_type == R_X86_64_TLSDESC))
6c0556
     {
6c0556
-      struct tlsdesc volatile * __attribute__((__unused__)) td =
6c0556
-	(struct tlsdesc volatile *)reloc_addr;
6c0556
+      const Elf_Symndx symndx = ELFW (R_SYM) (reloc->r_info);
6c0556
+      const ElfW (Sym) *symtab = (const void *)D_PTR (map, l_info[DT_SYMTAB]);
6c0556
+      const ElfW (Sym) *sym = &symtab[symndx];
6c0556
+      const struct r_found_version *version = NULL;
6c0556
 
6c0556
-      td->arg = (void*)reloc;
6c0556
-      td->entry = (void*)(D_PTR (map, l_info[ADDRIDX (DT_TLSDESC_PLT)])
6c0556
-			  + map->l_addr);
6c0556
+      if (map->l_info[VERSYMIDX (DT_VERSYM)] != NULL)
6c0556
+	{
6c0556
+	  const ElfW (Half) *vernum =
6c0556
+	    (const void *)D_PTR (map, l_info[VERSYMIDX (DT_VERSYM)]);
6c0556
+	  version = &map->l_versions[vernum[symndx] & 0x7fff];
6c0556
+	}
6c0556
+
6c0556
+      /* Always initialize TLS descriptors completely at load time, in
6c0556
+	 case static TLS is allocated for it that requires locking.  */
6c0556
+      elf_machine_rela (map, reloc, sym, version, reloc_addr, skip_ifunc);
6c0556
     }
6c0556
   else if (__glibc_unlikely (r_type == R_X86_64_IRELATIVE))
6c0556
     {