Blame SOURCES/gcc48-rh1469697-9.patch

13f101
commit b49f8fb8a97e9af8e6ba2b65d18195099cd1bb79
13f101
Author: law <law@138bc75d-0d04-0410-961f-82ee72b054a4>
13f101
Date:   Thu Sep 21 04:30:16 2017 +0000
13f101
13f101
            * config/s390/s390.c (MIN_UNROLL_PROBES): Define.
13f101
            (allocate_stack_space): New function, partially extracted from
13f101
            s390_emit_prologue.
13f101
            (s390_emit_prologue): Track offset to most recent stack probe.
13f101
            Code to allocate space moved into allocate_stack_space.
13f101
            Dump actions when no stack is allocated.
13f101
            (s390_prologue_plus_offset): New function.
13f101
            (s390_emit_stack_probe): Likewise.
13f101
    
13f101
            * gcc.dg/stack-check-5.c:  Add argument for s390.
13f101
            * lib/target-supports.exp:
13f101
            (check_effective_target_supports_stack_clash_protection): Enable for
13f101
            s390/s390x targets.
13f101
    
13f101
    git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@253049 138bc75d-0d04-0410-961f-82ee72b054a4
13f101
13f101
diff --git a/gcc/config/s390/s390.c b/gcc/config/s390/s390.c
13f101
index 3c04781f947..45998bc7516 100644
13f101
--- a/gcc/config/s390/s390.c
13f101
+++ b/gcc/config/s390/s390.c
13f101
@@ -10350,6 +10350,184 @@ s390_emit_stack_tie (void)
13f101
   emit_insn (gen_stack_tie (mem));
13f101
 }
13f101
 
13f101
+/* Calculate TARGET = REG + OFFSET as s390_emit_prologue would do it.
13f101
+   - push too big immediates to the literal pool and annotate the refs
13f101
+   - emit frame related notes for stack pointer changes.  */
13f101
+
13f101
+static rtx
13f101
+s390_prologue_plus_offset (rtx target, rtx reg, rtx offset, bool frame_related_p)
13f101
+{
13f101
+  rtx insn;
13f101
+  rtx orig_offset = offset;
13f101
+
13f101
+  gcc_assert (REG_P (target));
13f101
+  gcc_assert (REG_P (reg));
13f101
+  gcc_assert (CONST_INT_P (offset));
13f101
+
13f101
+  if (offset == const0_rtx)                               /* lr/lgr */
13f101
+    {
13f101
+      insn = emit_move_insn (target, reg);
13f101
+    }
13f101
+  else if (DISP_IN_RANGE (INTVAL (offset)))               /* la */
13f101
+    {
13f101
+      insn = emit_move_insn (target, gen_rtx_PLUS (Pmode, reg,
13f101
+						   offset));
13f101
+    }
13f101
+  else
13f101
+    {
13f101
+      if (!satisfies_constraint_K (offset)                /* ahi/aghi */
13f101
+	  && (!TARGET_EXTIMM
13f101
+	      || (!satisfies_constraint_Op (offset)       /* alfi/algfi */
13f101
+		  && !satisfies_constraint_On (offset)))) /* slfi/slgfi */
13f101
+	offset = force_const_mem (Pmode, offset);
13f101
+
13f101
+      if (target != reg)
13f101
+	{
13f101
+	  insn = emit_move_insn (target, reg);
13f101
+	  RTX_FRAME_RELATED_P (insn) = frame_related_p ? 1 : 0;
13f101
+	}
13f101
+
13f101
+      insn = emit_insn (gen_add2_insn (target, offset));
13f101
+
13f101
+      if (!CONST_INT_P (offset))
13f101
+	{
13f101
+	  annotate_constant_pool_refs (&PATTERN (insn));
13f101
+
13f101
+	  if (frame_related_p)
13f101
+	    add_reg_note (insn, REG_FRAME_RELATED_EXPR,
13f101
+			  gen_rtx_SET (VOIDmode, target,
13f101
+				       gen_rtx_PLUS (Pmode, target,
13f101
+						     orig_offset)));
13f101
+	}
13f101
+    }
13f101
+
13f101
+  RTX_FRAME_RELATED_P (insn) = frame_related_p ? 1 : 0;
13f101
+
13f101
+  /* If this is a stack adjustment and we are generating a stack clash
13f101
+     prologue, then add a REG_STACK_CHECK note to signal that this insn
13f101
+     should be left alone.  */
13f101
+  if (flag_stack_clash_protection && target == stack_pointer_rtx)
13f101
+    add_reg_note (insn, REG_STACK_CHECK, const0_rtx);
13f101
+
13f101
+  return insn;
13f101
+}
13f101
+
13f101
+/* Emit a compare instruction with a volatile memory access as stack
13f101
+   probe.  It does not waste store tags and does not clobber any
13f101
+   registers apart from the condition code.  */
13f101
+static void
13f101
+s390_emit_stack_probe (rtx addr)
13f101
+{
13f101
+  rtx tmp = gen_rtx_MEM (Pmode, addr);
13f101
+  MEM_VOLATILE_P (tmp) = 1;
13f101
+  s390_emit_compare (EQ, gen_rtx_REG (Pmode, 0), tmp);
13f101
+  emit_insn (gen_blockage ());
13f101
+}
13f101
+
13f101
+/* Use a runtime loop if we have to emit more probes than this.  */
13f101
+#define MIN_UNROLL_PROBES 3
13f101
+
13f101
+/* Allocate SIZE bytes of stack space, using TEMP_REG as a temporary
13f101
+   if necessary.  LAST_PROBE_OFFSET contains the offset of the closest
13f101
+   probe relative to the stack pointer.
13f101
+
13f101
+   Note that SIZE is negative.
13f101
+
13f101
+   The return value is true if TEMP_REG has been clobbered.  */
13f101
+static bool
13f101
+allocate_stack_space (rtx size, HOST_WIDE_INT last_probe_offset,
13f101
+		      rtx temp_reg)
13f101
+{
13f101
+  bool temp_reg_clobbered_p = false;
13f101
+  HOST_WIDE_INT probe_interval
13f101
+    = 1 << PARAM_VALUE (PARAM_STACK_CLASH_PROTECTION_PROBE_INTERVAL);
13f101
+  HOST_WIDE_INT guard_size
13f101
+    = 1 << PARAM_VALUE (PARAM_STACK_CLASH_PROTECTION_GUARD_SIZE);
13f101
+
13f101
+  if (flag_stack_clash_protection)
13f101
+    {
13f101
+      if (last_probe_offset + -INTVAL (size) < guard_size)
13f101
+	dump_stack_clash_frame_info (NO_PROBE_SMALL_FRAME, true);
13f101
+      else
13f101
+	{
13f101
+	  rtx offset = GEN_INT (probe_interval - UNITS_PER_LONG);
13f101
+	  HOST_WIDE_INT rounded_size = -INTVAL (size) & -probe_interval;
13f101
+	  HOST_WIDE_INT num_probes = rounded_size / probe_interval;
13f101
+	  HOST_WIDE_INT residual = -INTVAL (size) - rounded_size;
13f101
+
13f101
+	  if (num_probes < MIN_UNROLL_PROBES)
13f101
+	    {
13f101
+	      /* Emit unrolled probe statements.  */
13f101
+
13f101
+	      for (unsigned int i = 0; i < num_probes; i++)
13f101
+		{
13f101
+		  s390_prologue_plus_offset (stack_pointer_rtx,
13f101
+					     stack_pointer_rtx,
13f101
+					     GEN_INT (-probe_interval), true);
13f101
+		  s390_emit_stack_probe (gen_rtx_PLUS (Pmode,
13f101
+						       stack_pointer_rtx,
13f101
+						       offset));
13f101
+		}
13f101
+	      dump_stack_clash_frame_info (PROBE_INLINE, residual != 0);
13f101
+	    }
13f101
+	  else
13f101
+	    {
13f101
+	      /* Emit a loop probing the pages.  */
13f101
+
13f101
+	      rtx loop_start_label = gen_label_rtx ();
13f101
+
13f101
+	      /* From now on temp_reg will be the CFA register.  */
13f101
+	      s390_prologue_plus_offset (temp_reg, stack_pointer_rtx,
13f101
+					 GEN_INT (-rounded_size), true);
13f101
+	      emit_label (loop_start_label);
13f101
+
13f101
+	      s390_prologue_plus_offset (stack_pointer_rtx,
13f101
+					 stack_pointer_rtx,
13f101
+					 GEN_INT (-probe_interval), false);
13f101
+	      s390_emit_stack_probe (gen_rtx_PLUS (Pmode,
13f101
+						   stack_pointer_rtx,
13f101
+						   offset));
13f101
+	      emit_cmp_and_jump_insns (stack_pointer_rtx, temp_reg,
13f101
+				       GT, NULL_RTX,
13f101
+				       Pmode, 1, loop_start_label);
13f101
+
13f101
+	      /* Without this make_edges ICEes.  */
13f101
+	      JUMP_LABEL (get_last_insn ()) = loop_start_label;
13f101
+	      LABEL_NUSES (loop_start_label) = 1;
13f101
+
13f101
+	      /* That's going to be a NOP since stack pointer and
13f101
+		 temp_reg are supposed to be the same here.  We just
13f101
+		 emit it to set the CFA reg back to r15.  */
13f101
+	      s390_prologue_plus_offset (stack_pointer_rtx, temp_reg,
13f101
+					 const0_rtx, true);
13f101
+	      temp_reg_clobbered_p = true;
13f101
+	      dump_stack_clash_frame_info (PROBE_LOOP, residual != 0);
13f101
+	    }
13f101
+
13f101
+	  /* Handle any residual allocation request.  */
13f101
+	  s390_prologue_plus_offset (stack_pointer_rtx,
13f101
+				     stack_pointer_rtx,
13f101
+				     GEN_INT (-residual), true);
13f101
+	  last_probe_offset += residual;
13f101
+	  if (last_probe_offset >= probe_interval)
13f101
+	    s390_emit_stack_probe (gen_rtx_PLUS (Pmode,
13f101
+						 stack_pointer_rtx,
13f101
+						 GEN_INT (residual
13f101
+							  - UNITS_PER_LONG)));
13f101
+
13f101
+	  return temp_reg_clobbered_p;
13f101
+	}
13f101
+    }
13f101
+
13f101
+  /* Subtract frame size from stack pointer.  */
13f101
+  s390_prologue_plus_offset (stack_pointer_rtx,
13f101
+			     stack_pointer_rtx,
13f101
+			     size, true);
13f101
+
13f101
+  return temp_reg_clobbered_p;
13f101
+}
13f101
+
13f101
+
13f101
 /* Expand the prologue into a bunch of separate insns.  */
13f101
 
13f101
 void
13f101
@@ -10391,6 +10569,19 @@ s390_emit_prologue (void)
13f101
   else
13f101
     temp_reg = gen_rtx_REG (Pmode, 1);
13f101
 
13f101
+  /* When probing for stack-clash mitigation, we have to track the distance
13f101
+     between the stack pointer and closest known reference.
13f101
+
13f101
+     Most of the time we have to make a worst cast assumption.  The
13f101
+     only exception is when TARGET_BACKCHAIN is active, in which case
13f101
+     we know *sp (offset 0) was written.  */
13f101
+  HOST_WIDE_INT probe_interval
13f101
+    = 1 << PARAM_VALUE (PARAM_STACK_CLASH_PROTECTION_PROBE_INTERVAL);
13f101
+  HOST_WIDE_INT last_probe_offset
13f101
+    = (TARGET_BACKCHAIN
13f101
+       ? (TARGET_PACKED_STACK ? STACK_POINTER_OFFSET - UNITS_PER_LONG : 0)
13f101
+       : probe_interval - (STACK_BOUNDARY / UNITS_PER_WORD));
13f101
+
13f101
   /* Save call saved gprs.  */
13f101
   if (cfun_frame_layout.first_save_gpr != -1)
13f101
     {
13f101
@@ -10400,6 +10591,14 @@ s390_emit_prologue (void)
13f101
 					  - cfun_frame_layout.first_save_gpr_slot),
13f101
 			cfun_frame_layout.first_save_gpr,
13f101
 			cfun_frame_layout.last_save_gpr);
13f101
+
13f101
+      /* This is not 100% correct.  If we have more than one register saved,
13f101
+	 then LAST_PROBE_OFFSET can move even closer to sp.  */
13f101
+      last_probe_offset
13f101
+	= (cfun_frame_layout.gprs_offset +
13f101
+	   UNITS_PER_LONG * (cfun_frame_layout.first_save_gpr
13f101
+			     - cfun_frame_layout.first_save_gpr_slot));
13f101
+
13f101
       emit_insn (insn);
13f101
     }
13f101
 
13f101
@@ -10416,6 +10615,8 @@ s390_emit_prologue (void)
13f101
       if (cfun_fpr_bit_p (i))
13f101
 	{
13f101
 	  save_fpr (stack_pointer_rtx, offset, i + 16);
13f101
+	  if (offset < last_probe_offset)
13f101
+	    last_probe_offset = offset;
13f101
 	  offset += 8;
13f101
 	}
13f101
       else if (!TARGET_PACKED_STACK)
13f101
@@ -10429,6 +10630,8 @@ s390_emit_prologue (void)
13f101
       if (cfun_fpr_bit_p (i))
13f101
 	{
13f101
 	  insn = save_fpr (stack_pointer_rtx, offset, i + 16);
13f101
+	  if (offset < last_probe_offset)
13f101
+	    last_probe_offset = offset;
13f101
 	  offset += 8;
13f101
 
13f101
 	  /* If f4 and f6 are call clobbered they are saved due to stdargs and
13f101
@@ -10451,6 +10654,8 @@ s390_emit_prologue (void)
13f101
 	if (cfun_fpr_bit_p (i))
13f101
 	  {
13f101
 	    insn = save_fpr (stack_pointer_rtx, offset, i + 16);
13f101
+	    if (offset < last_probe_offset)
13f101
+	      last_probe_offset = offset;
13f101
 
13f101
 	    RTX_FRAME_RELATED_P (insn) = 1;
13f101
 	    offset -= 8;
13f101
@@ -10470,10 +10675,11 @@ s390_emit_prologue (void)
13f101
   if (cfun_frame_layout.frame_size > 0)
13f101
     {
13f101
       rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
13f101
-      rtx real_frame_off;
13f101
+      rtx stack_pointer_backup_loc;
13f101
+      bool temp_reg_clobbered_p;
13f101
 
13f101
       if (s390_stack_size)
13f101
-  	{
13f101
+	{
13f101
 	  HOST_WIDE_INT stack_guard;
13f101
 
13f101
 	  if (s390_stack_guard)
13f101
@@ -10538,35 +10744,36 @@ s390_emit_prologue (void)
13f101
       if (s390_warn_dynamicstack_p && cfun->calls_alloca)
13f101
 	warning (0, "%qs uses dynamic stack allocation", current_function_name ());
13f101
 
13f101
-      /* Save incoming stack pointer into temp reg.  */
13f101
-      if (TARGET_BACKCHAIN || next_fpr)
13f101
-	insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
13f101
+      /* Save the location where we could backup the incoming stack
13f101
+	 pointer.  */
13f101
+      stack_pointer_backup_loc = get_last_insn ();
13f101
 
13f101
-      /* Subtract frame size from stack pointer.  */
13f101
+      temp_reg_clobbered_p = allocate_stack_space (frame_off, last_probe_offset,
13f101
+						   temp_reg);
13f101
 
13f101
-      if (DISP_IN_RANGE (INTVAL (frame_off)))
13f101
-	{
13f101
-	  insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
13f101
-			      gen_rtx_PLUS (Pmode, stack_pointer_rtx,
13f101
-					    frame_off));
13f101
-	  insn = emit_insn (insn);
13f101
-	}
13f101
-      else
13f101
+      if (TARGET_BACKCHAIN || next_fpr)
13f101
 	{
13f101
-	  if (!CONST_OK_FOR_K (INTVAL (frame_off)))
13f101
-	    frame_off = force_const_mem (Pmode, frame_off);
13f101
-
13f101
-          insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
13f101
-	  annotate_constant_pool_refs (&PATTERN (insn));
13f101
+	  if (temp_reg_clobbered_p)
13f101
+	    {
13f101
+	      /* allocate_stack_space had to make use of temp_reg and
13f101
+		 we need it to hold a backup of the incoming stack
13f101
+		 pointer.  Calculate back that value from the current
13f101
+		 stack pointer.  */
13f101
+	      s390_prologue_plus_offset (temp_reg, stack_pointer_rtx,
13f101
+					 GEN_INT (cfun_frame_layout.frame_size),
13f101
+					 false);
13f101
+	    }
13f101
+	  else
13f101
+	    {
13f101
+	      /* allocate_stack_space didn't actually required
13f101
+		 temp_reg.  Insert the stack pointer backup insn
13f101
+		 before the stack pointer decrement code - knowing now
13f101
+		 that the value will survive.  */
13f101
+	      emit_insn_after (gen_move_insn (temp_reg, stack_pointer_rtx),
13f101
+			       stack_pointer_backup_loc);
13f101
+	    }
13f101
 	}
13f101
 
13f101
-      RTX_FRAME_RELATED_P (insn) = 1;
13f101
-      real_frame_off = GEN_INT (-cfun_frame_layout.frame_size);
13f101
-      add_reg_note (insn, REG_FRAME_RELATED_EXPR,
13f101
-		    gen_rtx_SET (VOIDmode, stack_pointer_rtx,
13f101
-				 gen_rtx_PLUS (Pmode, stack_pointer_rtx,
13f101
-					       real_frame_off)));
13f101
-
13f101
       /* Set backchain.  */
13f101
 
13f101
       if (TARGET_BACKCHAIN)
13f101
@@ -10590,6 +10797,8 @@ s390_emit_prologue (void)
13f101
 	  emit_clobber (addr);
13f101
 	}
13f101
     }
13f101
+  else if (flag_stack_clash_protection)
13f101
+    dump_stack_clash_frame_info (NO_PROBE_NO_FRAME, false);
13f101
 
13f101
   /* Save fprs 8 - 15 (64 bit ABI).  */
13f101
 
13f101
diff --git a/gcc/testsuite/gcc.dg/stack-check-5.c b/gcc/testsuite/gcc.dg/stack-check-5.c
13f101
index 2171d9b6c23..3178f5d8ce5 100644
13f101
--- a/gcc/testsuite/gcc.dg/stack-check-5.c
13f101
+++ b/gcc/testsuite/gcc.dg/stack-check-5.c
13f101
@@ -3,6 +3,10 @@
13f101
 /* { dg-require-effective-target supports_stack_clash_protection } */
13f101
 
13f101
 
13f101
+/* Otherwise the S/390 back-end might save the stack pointer in f2 ()
13f101
+   into an FPR.  */
13f101
+/* { dg-additional-options "-msoft-float" { target { s390x-*-* } } } */
13f101
+
13f101
 extern void foo (char *);
13f101
 extern void bar (void);
13f101
 
13f101
diff --git a/gcc/testsuite/lib/target-supports.exp b/gcc/testsuite/lib/target-supports.exp
13f101
index 2c669a9822f..f24c5c6e0ac 100644
13f101
--- a/gcc/testsuite/lib/target-supports.exp
13f101
+++ b/gcc/testsuite/lib/target-supports.exp
13f101
@@ -5422,12 +5422,12 @@ proc check_effective_target_supports_stack_clash_protection { } {
13f101
 
13f101
    # Temporary until the target bits are fully ACK'd.
13f101
 #  if { [istarget aarch*-*-*]
13f101
-#       || [istarget s390*-*-*]
13f101
 #       || [istarget powerpc*-*-*] || [istarget rs6000*-*-*] } {
13f101
 #	return 1
13f101
 #  }
13f101
 
13f101
-    if { [istarget x86_64-*-*] || [istarget i?86-*-*] } {
13f101
+    if { [istarget x86_64-*-*] || [istarget i?86-*-*] 
13f101
+	  || [istarget s390*-*-*] } {
13f101
 	return 1
13f101
     }
13f101
   return 0