|
|
22033d |
commit b49f8fb8a97e9af8e6ba2b65d18195099cd1bb79
|
|
|
22033d |
Author: law <law@138bc75d-0d04-0410-961f-82ee72b054a4>
|
|
|
22033d |
Date: Thu Sep 21 04:30:16 2017 +0000
|
|
|
22033d |
|
|
|
22033d |
* config/s390/s390.c (MIN_UNROLL_PROBES): Define.
|
|
|
22033d |
(allocate_stack_space): New function, partially extracted from
|
|
|
22033d |
s390_emit_prologue.
|
|
|
22033d |
(s390_emit_prologue): Track offset to most recent stack probe.
|
|
|
22033d |
Code to allocate space moved into allocate_stack_space.
|
|
|
22033d |
Dump actions when no stack is allocated.
|
|
|
22033d |
(s390_prologue_plus_offset): New function.
|
|
|
22033d |
(s390_emit_stack_probe): Likewise.
|
|
|
22033d |
|
|
|
22033d |
* gcc.dg/stack-check-5.c: Add argument for s390.
|
|
|
22033d |
* lib/target-supports.exp:
|
|
|
22033d |
(check_effective_target_supports_stack_clash_protection): Enable for
|
|
|
22033d |
s390/s390x targets.
|
|
|
22033d |
|
|
|
22033d |
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@253049 138bc75d-0d04-0410-961f-82ee72b054a4
|
|
|
22033d |
|
|
|
22033d |
diff --git a/gcc/config/s390/s390.c b/gcc/config/s390/s390.c
|
|
|
22033d |
index 3c04781f947..45998bc7516 100644
|
|
|
22033d |
--- a/gcc/config/s390/s390.c
|
|
|
22033d |
+++ b/gcc/config/s390/s390.c
|
|
|
22033d |
@@ -10350,6 +10350,184 @@ s390_emit_stack_tie (void)
|
|
|
22033d |
emit_insn (gen_stack_tie (mem));
|
|
|
22033d |
}
|
|
|
22033d |
|
|
|
22033d |
+/* Calculate TARGET = REG + OFFSET as s390_emit_prologue would do it.
|
|
|
22033d |
+ - push too big immediates to the literal pool and annotate the refs
|
|
|
22033d |
+ - emit frame related notes for stack pointer changes. */
|
|
|
22033d |
+
|
|
|
22033d |
+static rtx
|
|
|
22033d |
+s390_prologue_plus_offset (rtx target, rtx reg, rtx offset, bool frame_related_p)
|
|
|
22033d |
+{
|
|
|
22033d |
+ rtx insn;
|
|
|
22033d |
+ rtx orig_offset = offset;
|
|
|
22033d |
+
|
|
|
22033d |
+ gcc_assert (REG_P (target));
|
|
|
22033d |
+ gcc_assert (REG_P (reg));
|
|
|
22033d |
+ gcc_assert (CONST_INT_P (offset));
|
|
|
22033d |
+
|
|
|
22033d |
+ if (offset == const0_rtx) /* lr/lgr */
|
|
|
22033d |
+ {
|
|
|
22033d |
+ insn = emit_move_insn (target, reg);
|
|
|
22033d |
+ }
|
|
|
22033d |
+ else if (DISP_IN_RANGE (INTVAL (offset))) /* la */
|
|
|
22033d |
+ {
|
|
|
22033d |
+ insn = emit_move_insn (target, gen_rtx_PLUS (Pmode, reg,
|
|
|
22033d |
+ offset));
|
|
|
22033d |
+ }
|
|
|
22033d |
+ else
|
|
|
22033d |
+ {
|
|
|
22033d |
+ if (!satisfies_constraint_K (offset) /* ahi/aghi */
|
|
|
22033d |
+ && (!TARGET_EXTIMM
|
|
|
22033d |
+ || (!satisfies_constraint_Op (offset) /* alfi/algfi */
|
|
|
22033d |
+ && !satisfies_constraint_On (offset)))) /* slfi/slgfi */
|
|
|
22033d |
+ offset = force_const_mem (Pmode, offset);
|
|
|
22033d |
+
|
|
|
22033d |
+ if (target != reg)
|
|
|
22033d |
+ {
|
|
|
22033d |
+ insn = emit_move_insn (target, reg);
|
|
|
22033d |
+ RTX_FRAME_RELATED_P (insn) = frame_related_p ? 1 : 0;
|
|
|
22033d |
+ }
|
|
|
22033d |
+
|
|
|
22033d |
+ insn = emit_insn (gen_add2_insn (target, offset));
|
|
|
22033d |
+
|
|
|
22033d |
+ if (!CONST_INT_P (offset))
|
|
|
22033d |
+ {
|
|
|
22033d |
+ annotate_constant_pool_refs (&PATTERN (insn));
|
|
|
22033d |
+
|
|
|
22033d |
+ if (frame_related_p)
|
|
|
22033d |
+ add_reg_note (insn, REG_FRAME_RELATED_EXPR,
|
|
|
22033d |
+ gen_rtx_SET (VOIDmode, target,
|
|
|
22033d |
+ gen_rtx_PLUS (Pmode, target,
|
|
|
22033d |
+ orig_offset)));
|
|
|
22033d |
+ }
|
|
|
22033d |
+ }
|
|
|
22033d |
+
|
|
|
22033d |
+ RTX_FRAME_RELATED_P (insn) = frame_related_p ? 1 : 0;
|
|
|
22033d |
+
|
|
|
22033d |
+ /* If this is a stack adjustment and we are generating a stack clash
|
|
|
22033d |
+ prologue, then add a REG_STACK_CHECK note to signal that this insn
|
|
|
22033d |
+ should be left alone. */
|
|
|
22033d |
+ if (flag_stack_clash_protection && target == stack_pointer_rtx)
|
|
|
22033d |
+ add_reg_note (insn, REG_STACK_CHECK, const0_rtx);
|
|
|
22033d |
+
|
|
|
22033d |
+ return insn;
|
|
|
22033d |
+}
|
|
|
22033d |
+
|
|
|
22033d |
+/* Emit a compare instruction with a volatile memory access as stack
|
|
|
22033d |
+ probe. It does not waste store tags and does not clobber any
|
|
|
22033d |
+ registers apart from the condition code. */
|
|
|
22033d |
+static void
|
|
|
22033d |
+s390_emit_stack_probe (rtx addr)
|
|
|
22033d |
+{
|
|
|
22033d |
+ rtx tmp = gen_rtx_MEM (Pmode, addr);
|
|
|
22033d |
+ MEM_VOLATILE_P (tmp) = 1;
|
|
|
22033d |
+ s390_emit_compare (EQ, gen_rtx_REG (Pmode, 0), tmp);
|
|
|
22033d |
+ emit_insn (gen_blockage ());
|
|
|
22033d |
+}
|
|
|
22033d |
+
|
|
|
22033d |
+/* Use a runtime loop if we have to emit more probes than this. */
|
|
|
22033d |
+#define MIN_UNROLL_PROBES 3
|
|
|
22033d |
+
|
|
|
22033d |
+/* Allocate SIZE bytes of stack space, using TEMP_REG as a temporary
|
|
|
22033d |
+ if necessary. LAST_PROBE_OFFSET contains the offset of the closest
|
|
|
22033d |
+ probe relative to the stack pointer.
|
|
|
22033d |
+
|
|
|
22033d |
+ Note that SIZE is negative.
|
|
|
22033d |
+
|
|
|
22033d |
+ The return value is true if TEMP_REG has been clobbered. */
|
|
|
22033d |
+static bool
|
|
|
22033d |
+allocate_stack_space (rtx size, HOST_WIDE_INT last_probe_offset,
|
|
|
22033d |
+ rtx temp_reg)
|
|
|
22033d |
+{
|
|
|
22033d |
+ bool temp_reg_clobbered_p = false;
|
|
|
22033d |
+ HOST_WIDE_INT probe_interval
|
|
|
22033d |
+ = 1 << PARAM_VALUE (PARAM_STACK_CLASH_PROTECTION_PROBE_INTERVAL);
|
|
|
22033d |
+ HOST_WIDE_INT guard_size
|
|
|
22033d |
+ = 1 << PARAM_VALUE (PARAM_STACK_CLASH_PROTECTION_GUARD_SIZE);
|
|
|
22033d |
+
|
|
|
22033d |
+ if (flag_stack_clash_protection)
|
|
|
22033d |
+ {
|
|
|
22033d |
+ if (last_probe_offset + -INTVAL (size) < guard_size)
|
|
|
22033d |
+ dump_stack_clash_frame_info (NO_PROBE_SMALL_FRAME, true);
|
|
|
22033d |
+ else
|
|
|
22033d |
+ {
|
|
|
22033d |
+ rtx offset = GEN_INT (probe_interval - UNITS_PER_LONG);
|
|
|
22033d |
+ HOST_WIDE_INT rounded_size = -INTVAL (size) & -probe_interval;
|
|
|
22033d |
+ HOST_WIDE_INT num_probes = rounded_size / probe_interval;
|
|
|
22033d |
+ HOST_WIDE_INT residual = -INTVAL (size) - rounded_size;
|
|
|
22033d |
+
|
|
|
22033d |
+ if (num_probes < MIN_UNROLL_PROBES)
|
|
|
22033d |
+ {
|
|
|
22033d |
+ /* Emit unrolled probe statements. */
|
|
|
22033d |
+
|
|
|
22033d |
+ for (unsigned int i = 0; i < num_probes; i++)
|
|
|
22033d |
+ {
|
|
|
22033d |
+ s390_prologue_plus_offset (stack_pointer_rtx,
|
|
|
22033d |
+ stack_pointer_rtx,
|
|
|
22033d |
+ GEN_INT (-probe_interval), true);
|
|
|
22033d |
+ s390_emit_stack_probe (gen_rtx_PLUS (Pmode,
|
|
|
22033d |
+ stack_pointer_rtx,
|
|
|
22033d |
+ offset));
|
|
|
22033d |
+ }
|
|
|
22033d |
+ dump_stack_clash_frame_info (PROBE_INLINE, residual != 0);
|
|
|
22033d |
+ }
|
|
|
22033d |
+ else
|
|
|
22033d |
+ {
|
|
|
22033d |
+ /* Emit a loop probing the pages. */
|
|
|
22033d |
+
|
|
|
22033d |
+ rtx loop_start_label = gen_label_rtx ();
|
|
|
22033d |
+
|
|
|
22033d |
+ /* From now on temp_reg will be the CFA register. */
|
|
|
22033d |
+ s390_prologue_plus_offset (temp_reg, stack_pointer_rtx,
|
|
|
22033d |
+ GEN_INT (-rounded_size), true);
|
|
|
22033d |
+ emit_label (loop_start_label);
|
|
|
22033d |
+
|
|
|
22033d |
+ s390_prologue_plus_offset (stack_pointer_rtx,
|
|
|
22033d |
+ stack_pointer_rtx,
|
|
|
22033d |
+ GEN_INT (-probe_interval), false);
|
|
|
22033d |
+ s390_emit_stack_probe (gen_rtx_PLUS (Pmode,
|
|
|
22033d |
+ stack_pointer_rtx,
|
|
|
22033d |
+ offset));
|
|
|
22033d |
+ emit_cmp_and_jump_insns (stack_pointer_rtx, temp_reg,
|
|
|
22033d |
+ GT, NULL_RTX,
|
|
|
22033d |
+ Pmode, 1, loop_start_label);
|
|
|
22033d |
+
|
|
|
22033d |
+ /* Without this make_edges ICEes. */
|
|
|
22033d |
+ JUMP_LABEL (get_last_insn ()) = loop_start_label;
|
|
|
22033d |
+ LABEL_NUSES (loop_start_label) = 1;
|
|
|
22033d |
+
|
|
|
22033d |
+ /* That's going to be a NOP since stack pointer and
|
|
|
22033d |
+ temp_reg are supposed to be the same here. We just
|
|
|
22033d |
+ emit it to set the CFA reg back to r15. */
|
|
|
22033d |
+ s390_prologue_plus_offset (stack_pointer_rtx, temp_reg,
|
|
|
22033d |
+ const0_rtx, true);
|
|
|
22033d |
+ temp_reg_clobbered_p = true;
|
|
|
22033d |
+ dump_stack_clash_frame_info (PROBE_LOOP, residual != 0);
|
|
|
22033d |
+ }
|
|
|
22033d |
+
|
|
|
22033d |
+ /* Handle any residual allocation request. */
|
|
|
22033d |
+ s390_prologue_plus_offset (stack_pointer_rtx,
|
|
|
22033d |
+ stack_pointer_rtx,
|
|
|
22033d |
+ GEN_INT (-residual), true);
|
|
|
22033d |
+ last_probe_offset += residual;
|
|
|
22033d |
+ if (last_probe_offset >= probe_interval)
|
|
|
22033d |
+ s390_emit_stack_probe (gen_rtx_PLUS (Pmode,
|
|
|
22033d |
+ stack_pointer_rtx,
|
|
|
22033d |
+ GEN_INT (residual
|
|
|
22033d |
+ - UNITS_PER_LONG)));
|
|
|
22033d |
+
|
|
|
22033d |
+ return temp_reg_clobbered_p;
|
|
|
22033d |
+ }
|
|
|
22033d |
+ }
|
|
|
22033d |
+
|
|
|
22033d |
+ /* Subtract frame size from stack pointer. */
|
|
|
22033d |
+ s390_prologue_plus_offset (stack_pointer_rtx,
|
|
|
22033d |
+ stack_pointer_rtx,
|
|
|
22033d |
+ size, true);
|
|
|
22033d |
+
|
|
|
22033d |
+ return temp_reg_clobbered_p;
|
|
|
22033d |
+}
|
|
|
22033d |
+
|
|
|
22033d |
+
|
|
|
22033d |
/* Expand the prologue into a bunch of separate insns. */
|
|
|
22033d |
|
|
|
22033d |
void
|
|
|
22033d |
@@ -10391,6 +10569,19 @@ s390_emit_prologue (void)
|
|
|
22033d |
else
|
|
|
22033d |
temp_reg = gen_rtx_REG (Pmode, 1);
|
|
|
22033d |
|
|
|
22033d |
+ /* When probing for stack-clash mitigation, we have to track the distance
|
|
|
22033d |
+ between the stack pointer and closest known reference.
|
|
|
22033d |
+
|
|
|
22033d |
+ Most of the time we have to make a worst cast assumption. The
|
|
|
22033d |
+ only exception is when TARGET_BACKCHAIN is active, in which case
|
|
|
22033d |
+ we know *sp (offset 0) was written. */
|
|
|
22033d |
+ HOST_WIDE_INT probe_interval
|
|
|
22033d |
+ = 1 << PARAM_VALUE (PARAM_STACK_CLASH_PROTECTION_PROBE_INTERVAL);
|
|
|
22033d |
+ HOST_WIDE_INT last_probe_offset
|
|
|
22033d |
+ = (TARGET_BACKCHAIN
|
|
|
22033d |
+ ? (TARGET_PACKED_STACK ? STACK_POINTER_OFFSET - UNITS_PER_LONG : 0)
|
|
|
22033d |
+ : probe_interval - (STACK_BOUNDARY / UNITS_PER_WORD));
|
|
|
22033d |
+
|
|
|
22033d |
/* Save call saved gprs. */
|
|
|
22033d |
if (cfun_frame_layout.first_save_gpr != -1)
|
|
|
22033d |
{
|
|
|
22033d |
@@ -10400,6 +10591,14 @@ s390_emit_prologue (void)
|
|
|
22033d |
- cfun_frame_layout.first_save_gpr_slot),
|
|
|
22033d |
cfun_frame_layout.first_save_gpr,
|
|
|
22033d |
cfun_frame_layout.last_save_gpr);
|
|
|
22033d |
+
|
|
|
22033d |
+ /* This is not 100% correct. If we have more than one register saved,
|
|
|
22033d |
+ then LAST_PROBE_OFFSET can move even closer to sp. */
|
|
|
22033d |
+ last_probe_offset
|
|
|
22033d |
+ = (cfun_frame_layout.gprs_offset +
|
|
|
22033d |
+ UNITS_PER_LONG * (cfun_frame_layout.first_save_gpr
|
|
|
22033d |
+ - cfun_frame_layout.first_save_gpr_slot));
|
|
|
22033d |
+
|
|
|
22033d |
emit_insn (insn);
|
|
|
22033d |
}
|
|
|
22033d |
|
|
|
22033d |
@@ -10416,6 +10615,8 @@ s390_emit_prologue (void)
|
|
|
22033d |
if (cfun_fpr_bit_p (i))
|
|
|
22033d |
{
|
|
|
22033d |
save_fpr (stack_pointer_rtx, offset, i + 16);
|
|
|
22033d |
+ if (offset < last_probe_offset)
|
|
|
22033d |
+ last_probe_offset = offset;
|
|
|
22033d |
offset += 8;
|
|
|
22033d |
}
|
|
|
22033d |
else if (!TARGET_PACKED_STACK)
|
|
|
22033d |
@@ -10429,6 +10630,8 @@ s390_emit_prologue (void)
|
|
|
22033d |
if (cfun_fpr_bit_p (i))
|
|
|
22033d |
{
|
|
|
22033d |
insn = save_fpr (stack_pointer_rtx, offset, i + 16);
|
|
|
22033d |
+ if (offset < last_probe_offset)
|
|
|
22033d |
+ last_probe_offset = offset;
|
|
|
22033d |
offset += 8;
|
|
|
22033d |
|
|
|
22033d |
/* If f4 and f6 are call clobbered they are saved due to stdargs and
|
|
|
22033d |
@@ -10451,6 +10654,8 @@ s390_emit_prologue (void)
|
|
|
22033d |
if (cfun_fpr_bit_p (i))
|
|
|
22033d |
{
|
|
|
22033d |
insn = save_fpr (stack_pointer_rtx, offset, i + 16);
|
|
|
22033d |
+ if (offset < last_probe_offset)
|
|
|
22033d |
+ last_probe_offset = offset;
|
|
|
22033d |
|
|
|
22033d |
RTX_FRAME_RELATED_P (insn) = 1;
|
|
|
22033d |
offset -= 8;
|
|
|
22033d |
@@ -10470,10 +10675,11 @@ s390_emit_prologue (void)
|
|
|
22033d |
if (cfun_frame_layout.frame_size > 0)
|
|
|
22033d |
{
|
|
|
22033d |
rtx frame_off = GEN_INT (-cfun_frame_layout.frame_size);
|
|
|
22033d |
- rtx real_frame_off;
|
|
|
22033d |
+ rtx stack_pointer_backup_loc;
|
|
|
22033d |
+ bool temp_reg_clobbered_p;
|
|
|
22033d |
|
|
|
22033d |
if (s390_stack_size)
|
|
|
22033d |
- {
|
|
|
22033d |
+ {
|
|
|
22033d |
HOST_WIDE_INT stack_guard;
|
|
|
22033d |
|
|
|
22033d |
if (s390_stack_guard)
|
|
|
22033d |
@@ -10538,35 +10744,36 @@ s390_emit_prologue (void)
|
|
|
22033d |
if (s390_warn_dynamicstack_p && cfun->calls_alloca)
|
|
|
22033d |
warning (0, "%qs uses dynamic stack allocation", current_function_name ());
|
|
|
22033d |
|
|
|
22033d |
- /* Save incoming stack pointer into temp reg. */
|
|
|
22033d |
- if (TARGET_BACKCHAIN || next_fpr)
|
|
|
22033d |
- insn = emit_insn (gen_move_insn (temp_reg, stack_pointer_rtx));
|
|
|
22033d |
+ /* Save the location where we could backup the incoming stack
|
|
|
22033d |
+ pointer. */
|
|
|
22033d |
+ stack_pointer_backup_loc = get_last_insn ();
|
|
|
22033d |
|
|
|
22033d |
- /* Subtract frame size from stack pointer. */
|
|
|
22033d |
+ temp_reg_clobbered_p = allocate_stack_space (frame_off, last_probe_offset,
|
|
|
22033d |
+ temp_reg);
|
|
|
22033d |
|
|
|
22033d |
- if (DISP_IN_RANGE (INTVAL (frame_off)))
|
|
|
22033d |
- {
|
|
|
22033d |
- insn = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
|
|
|
22033d |
- gen_rtx_PLUS (Pmode, stack_pointer_rtx,
|
|
|
22033d |
- frame_off));
|
|
|
22033d |
- insn = emit_insn (insn);
|
|
|
22033d |
- }
|
|
|
22033d |
- else
|
|
|
22033d |
+ if (TARGET_BACKCHAIN || next_fpr)
|
|
|
22033d |
{
|
|
|
22033d |
- if (!CONST_OK_FOR_K (INTVAL (frame_off)))
|
|
|
22033d |
- frame_off = force_const_mem (Pmode, frame_off);
|
|
|
22033d |
-
|
|
|
22033d |
- insn = emit_insn (gen_add2_insn (stack_pointer_rtx, frame_off));
|
|
|
22033d |
- annotate_constant_pool_refs (&PATTERN (insn));
|
|
|
22033d |
+ if (temp_reg_clobbered_p)
|
|
|
22033d |
+ {
|
|
|
22033d |
+ /* allocate_stack_space had to make use of temp_reg and
|
|
|
22033d |
+ we need it to hold a backup of the incoming stack
|
|
|
22033d |
+ pointer. Calculate back that value from the current
|
|
|
22033d |
+ stack pointer. */
|
|
|
22033d |
+ s390_prologue_plus_offset (temp_reg, stack_pointer_rtx,
|
|
|
22033d |
+ GEN_INT (cfun_frame_layout.frame_size),
|
|
|
22033d |
+ false);
|
|
|
22033d |
+ }
|
|
|
22033d |
+ else
|
|
|
22033d |
+ {
|
|
|
22033d |
+ /* allocate_stack_space didn't actually required
|
|
|
22033d |
+ temp_reg. Insert the stack pointer backup insn
|
|
|
22033d |
+ before the stack pointer decrement code - knowing now
|
|
|
22033d |
+ that the value will survive. */
|
|
|
22033d |
+ emit_insn_after (gen_move_insn (temp_reg, stack_pointer_rtx),
|
|
|
22033d |
+ stack_pointer_backup_loc);
|
|
|
22033d |
+ }
|
|
|
22033d |
}
|
|
|
22033d |
|
|
|
22033d |
- RTX_FRAME_RELATED_P (insn) = 1;
|
|
|
22033d |
- real_frame_off = GEN_INT (-cfun_frame_layout.frame_size);
|
|
|
22033d |
- add_reg_note (insn, REG_FRAME_RELATED_EXPR,
|
|
|
22033d |
- gen_rtx_SET (VOIDmode, stack_pointer_rtx,
|
|
|
22033d |
- gen_rtx_PLUS (Pmode, stack_pointer_rtx,
|
|
|
22033d |
- real_frame_off)));
|
|
|
22033d |
-
|
|
|
22033d |
/* Set backchain. */
|
|
|
22033d |
|
|
|
22033d |
if (TARGET_BACKCHAIN)
|
|
|
22033d |
@@ -10590,6 +10797,8 @@ s390_emit_prologue (void)
|
|
|
22033d |
emit_clobber (addr);
|
|
|
22033d |
}
|
|
|
22033d |
}
|
|
|
22033d |
+ else if (flag_stack_clash_protection)
|
|
|
22033d |
+ dump_stack_clash_frame_info (NO_PROBE_NO_FRAME, false);
|
|
|
22033d |
|
|
|
22033d |
/* Save fprs 8 - 15 (64 bit ABI). */
|
|
|
22033d |
|
|
|
22033d |
diff --git a/gcc/testsuite/gcc.dg/stack-check-5.c b/gcc/testsuite/gcc.dg/stack-check-5.c
|
|
|
22033d |
index 2171d9b6c23..3178f5d8ce5 100644
|
|
|
22033d |
--- a/gcc/testsuite/gcc.dg/stack-check-5.c
|
|
|
22033d |
+++ b/gcc/testsuite/gcc.dg/stack-check-5.c
|
|
|
22033d |
@@ -3,6 +3,10 @@
|
|
|
22033d |
/* { dg-require-effective-target supports_stack_clash_protection } */
|
|
|
22033d |
|
|
|
22033d |
|
|
|
22033d |
+/* Otherwise the S/390 back-end might save the stack pointer in f2 ()
|
|
|
22033d |
+ into an FPR. */
|
|
|
22033d |
+/* { dg-additional-options "-msoft-float" { target { s390x-*-* } } } */
|
|
|
22033d |
+
|
|
|
22033d |
extern void foo (char *);
|
|
|
22033d |
extern void bar (void);
|
|
|
22033d |
|
|
|
22033d |
diff --git a/gcc/testsuite/lib/target-supports.exp b/gcc/testsuite/lib/target-supports.exp
|
|
|
22033d |
index 2c669a9822f..f24c5c6e0ac 100644
|
|
|
22033d |
--- a/gcc/testsuite/lib/target-supports.exp
|
|
|
22033d |
+++ b/gcc/testsuite/lib/target-supports.exp
|
|
|
22033d |
@@ -5422,12 +5422,12 @@ proc check_effective_target_supports_stack_clash_protection { } {
|
|
|
22033d |
|
|
|
22033d |
# Temporary until the target bits are fully ACK'd.
|
|
|
22033d |
# if { [istarget aarch*-*-*]
|
|
|
22033d |
-# || [istarget s390*-*-*]
|
|
|
22033d |
# || [istarget powerpc*-*-*] || [istarget rs6000*-*-*] } {
|
|
|
22033d |
# return 1
|
|
|
22033d |
# }
|
|
|
22033d |
|
|
|
22033d |
- if { [istarget x86_64-*-*] || [istarget i?86-*-*] } {
|
|
|
22033d |
+ if { [istarget x86_64-*-*] || [istarget i?86-*-*]
|
|
|
22033d |
+ || [istarget s390*-*-*] } {
|
|
|
22033d |
return 1
|
|
|
22033d |
}
|
|
|
22033d |
return 0
|