summaryrefslogtreecommitdiffstats
path: root/gnu/usr.bin/cc/cc_int/reorg.c
diff options
context:
space:
mode:
authorrgrimes <rgrimes@FreeBSD.org>1995-05-30 05:05:38 +0000
committerrgrimes <rgrimes@FreeBSD.org>1995-05-30 05:05:38 +0000
commit2ad6f3dee6bc54535003f4ab26bbc587b7efa38c (patch)
treea3959baf797787918878bec6d58d6a0fb743ad0a /gnu/usr.bin/cc/cc_int/reorg.c
parent4f960dd75f181e9fa2edc44324f15f0ab5c64b37 (diff)
downloadFreeBSD-src-2ad6f3dee6bc54535003f4ab26bbc587b7efa38c.zip
FreeBSD-src-2ad6f3dee6bc54535003f4ab26bbc587b7efa38c.tar.gz
Remove trailing whitespace.
Diffstat (limited to 'gnu/usr.bin/cc/cc_int/reorg.c')
-rw-r--r--gnu/usr.bin/cc/cc_int/reorg.c88
1 files changed, 44 insertions, 44 deletions
diff --git a/gnu/usr.bin/cc/cc_int/reorg.c b/gnu/usr.bin/cc/cc_int/reorg.c
index c90c055..d977404 100644
--- a/gnu/usr.bin/cc/cc_int/reorg.c
+++ b/gnu/usr.bin/cc/cc_int/reorg.c
@@ -54,7 +54,7 @@ the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
is taken.
The HP-PA always has a branch delay slot. For unconditional branches
- its effects can be annulled when the branch is taken. The effects
+ its effects can be annulled when the branch is taken. The effects
of the delay slot in a conditional branch can be nullified for forward
taken branches, or for untaken backward branches. This means
we can hoist insns from the fall-through path for forward branches or
@@ -335,7 +335,7 @@ mark_referenced_resources (x, res, include_delayed_effects)
We can not just fall through here since then we would be confused
by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
traditional asms unlike their normal usage. */
-
+
for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, 0);
return;
@@ -576,7 +576,7 @@ mark_set_resources (x, res, in_dest, include_delayed_effects)
case CLOBBER:
mark_set_resources (XEXP (x, 0), res, 1, 0);
return;
-
+
case SEQUENCE:
for (i = 0; i < XVECLEN (x, 0); i++)
if (! (INSN_ANNULLED_BRANCH_P (XVECEXP (x, 0, 0))
@@ -773,7 +773,7 @@ find_end_label ()
|| GET_CODE (PATTERN (insn)) == CLOBBER)))
insn = PREV_INSN (insn);
- /* When a target threads its epilogue we might already have a
+ /* When a target threads its epilogue we might already have a
suitable return insn. If so put a label before it for the
end_of_function_label. */
if (GET_CODE (insn) == BARRIER
@@ -852,7 +852,7 @@ emit_delay_sequence (insn, list, length, avail)
rtx delay_insn = copy_rtx (insn);
/* If INSN is followed by a BARRIER, delete the BARRIER since it will only
- confuse further processing. Update LAST in case it was the last insn.
+ confuse further processing. Update LAST in case it was the last insn.
We will put the BARRIER back in later. */
if (NEXT_INSN (insn) && GET_CODE (NEXT_INSN (insn)) == BARRIER)
{
@@ -914,7 +914,7 @@ emit_delay_sequence (insn, list, length, avail)
if (NEXT_INSN (seq_insn) && GET_CODE (NEXT_INSN (seq_insn)) == INSN
&& GET_CODE (PATTERN (NEXT_INSN (seq_insn))) == SEQUENCE)
PREV_INSN (XVECEXP (PATTERN (NEXT_INSN (seq_insn)), 0, 0)) = seq_insn;
-
+
/* If there used to be a BARRIER, put it back. */
if (had_barrier)
emit_barrier_after (seq_insn);
@@ -940,7 +940,7 @@ add_to_delay_list (insn, delay_list)
if (delay_list == 0)
{
struct target_info *tinfo;
-
+
for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
tinfo; tinfo = tinfo->next)
if (tinfo->uid == INSN_UID (insn))
@@ -957,7 +957,7 @@ add_to_delay_list (insn, delay_list)
XEXP (delay_list, 1) = add_to_delay_list (insn, XEXP (delay_list, 1));
return delay_list;
-}
+}
/* Delete INSN from the the delay slot of the insn that it is in. This may
produce an insn without anything in its delay slots. */
@@ -1215,15 +1215,15 @@ get_jump_flags (insn, label)
&& INSN_UID (insn) <= max_uid
&& label != 0
&& INSN_UID (label) <= max_uid)
- flags
+ flags
= (uid_to_ruid[INSN_UID (label)] > uid_to_ruid[INSN_UID (insn)])
? ATTR_FLAG_forward : ATTR_FLAG_backward;
/* No valid direction information. */
else
flags = 0;
-
+
/* If insn is a conditional branch call mostly_true_jump to get
- determine the branch prediction.
+ determine the branch prediction.
Non conditional branches are predicted as very likely taken. */
if (GET_CODE (insn) == JUMP_INSN
@@ -1281,7 +1281,7 @@ rare_destination (insn)
case CODE_LABEL:
return 0;
case BARRIER:
- /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
+ /* A BARRIER can either be after a JUMP_INSN or a CALL_INSN. We
don't scan past JUMP_INSNs, so any barrier we find here must
have been after a CALL_INSN and hence mean the call doesn't
return. */
@@ -1333,7 +1333,7 @@ mostly_true_jump (jump_insn, condition)
{
/* If this is the test of a loop, it is very likely true. We scan
backwards from the target label. If we find a NOTE_INSN_LOOP_BEG
- before the next real insn, we assume the branch is to the top of
+ before the next real insn, we assume the branch is to the top of
the loop. */
for (insn = PREV_INSN (target_label);
insn && GET_CODE (insn) == NOTE;
@@ -1369,7 +1369,7 @@ mostly_true_jump (jump_insn, condition)
return 2;
}
- /* If we couldn't figure out what this jump was, assume it won't be
+ /* If we couldn't figure out what this jump was, assume it won't be
taken. This should be rare. */
if (condition == 0)
return 0;
@@ -1419,7 +1419,7 @@ get_branch_condition (insn, target)
{
rtx pat = PATTERN (insn);
rtx src;
-
+
if (condjump_in_parallel_p (insn))
pat = XVECEXP (pat, 0, 0);
@@ -1665,13 +1665,13 @@ steal_delay_list_from_target (insn, condition, seq, delay_list,
return delay_list;
}
-/* Similar to steal_delay_list_from_target except that SEQ is on the
+/* Similar to steal_delay_list_from_target except that SEQ is on the
fallthrough path of INSN. Here we only do something if the delay insn
of SEQ is an unconditional branch. In that case we steal its delay slot
for INSN since unconditional branches are much easier to fill. */
static rtx
-steal_delay_list_from_fallthrough (insn, condition, seq,
+steal_delay_list_from_fallthrough (insn, condition, seq,
delay_list, sets, needed, other_needed,
slots_to_fill, pslots_filled, pannul_p)
rtx insn, condition;
@@ -1956,17 +1956,17 @@ redundant_insn_p (insn, target, delay_list)
return 0;
/* Stop for an INSN or JUMP_INSN with delayed effects and its delay
- slots because it is difficult to track its resource needs
+ slots because it is difficult to track its resource needs
correctly. */
#ifdef INSN_SETS_ARE_DELAYED
if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
- return 0;
+ return 0;
#endif
#ifdef INSN_REFERENCES_ARE_DELAYED
if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
- return 0;
+ return 0;
#endif
/* See if any of the insns in the delay slot match, updating
@@ -2058,12 +2058,12 @@ redundant_insn_p (insn, target, delay_list)
#ifdef INSN_SETS_ARE_DELAYED
if (INSN_SETS_ARE_DELAYED (XVECEXP (pat, 0, 0)))
- return 0;
+ return 0;
#endif
#ifdef INSN_REFERENCES_ARE_DELAYED
if (INSN_REFERENCES_ARE_DELAYED (XVECEXP (pat, 0, 0)))
- return 0;
+ return 0;
#endif
/* See if any of the insns in the delay slot match, updating
@@ -2092,7 +2092,7 @@ redundant_insn_p (insn, target, delay_list)
}
- /* If the insn requiring the delay slot conflicts with INSN, we
+ /* If the insn requiring the delay slot conflicts with INSN, we
must stop. */
if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1))
return 0;
@@ -2210,7 +2210,7 @@ update_block (insn, where)
{
int b;
- /* Ignore if this was in a delay slot and it came from the target of
+ /* Ignore if this was in a delay slot and it came from the target of
a branch. */
if (INSN_FROM_TARGET_P (insn))
return;
@@ -2466,7 +2466,7 @@ mark_target_live_regs (target, res)
}
else
{
- /* Allocate a place to put our results and chain it into the
+ /* Allocate a place to put our results and chain it into the
hash table. */
tinfo = (struct target_info *) oballoc (sizeof (struct target_info));
tinfo->uid = INSN_UID (target);
@@ -2593,7 +2593,7 @@ mark_target_live_regs (target, res)
= (first_regno
+ HARD_REGNO_NREGS (first_regno,
GET_MODE (XEXP (link, 0))));
-
+
for (i = first_regno; i < last_regno; i++)
SET_HARD_REG_BIT (pending_dead_regs, i);
}
@@ -2612,7 +2612,7 @@ mark_target_live_regs (target, res)
= (first_regno
+ HARD_REGNO_NREGS (first_regno,
GET_MODE (XEXP (link, 0))));
-
+
for (i = first_regno; i < last_regno; i++)
CLEAR_HARD_REG_BIT (current_live_regs, i);
}
@@ -2795,7 +2795,7 @@ fill_simple_delay_slots (first, non_jumps_p)
|| (GET_CODE (insn) == JUMP_INSN && non_jumps_p)
|| (GET_CODE (insn) != JUMP_INSN && ! non_jumps_p))
continue;
-
+
if (GET_CODE (insn) == JUMP_INSN)
flags = get_jump_flags (insn, JUMP_LABEL (insn));
else
@@ -2819,7 +2819,7 @@ fill_simple_delay_slots (first, non_jumps_p)
insn must exist when it is subsequently scanned.
This is tried on each insn with delay slots as some machines
- have insns which perform calls, but are not represented as
+ have insns which perform calls, but are not represented as
CALL_INSNs. */
slots_filled = 0;
@@ -2854,7 +2854,7 @@ fill_simple_delay_slots (first, non_jumps_p)
forward in execution sequence), it must not need or set any resources
that were set by later insns and must not set any resources that
are needed for those insns.
-
+
The delay slot insn itself sets resources unless it is a call
(in which case the called routine, not the insn itself, is doing
the setting). */
@@ -2878,7 +2878,7 @@ fill_simple_delay_slots (first, non_jumps_p)
if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
continue;
- /* Check for resource conflict first, to avoid unnecessary
+ /* Check for resource conflict first, to avoid unnecessary
splitting. */
if (! insn_references_resource_p (trial, &set, 1)
&& ! insn_sets_resource_p (trial, &set, 1)
@@ -2921,7 +2921,7 @@ fill_simple_delay_slots (first, non_jumps_p)
#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)
if (slots_filled != slots_to_fill
&& delay_list == 0
- && GET_CODE (insn) == JUMP_INSN
+ && GET_CODE (insn) == JUMP_INSN
&& (condjump_p (insn) || condjump_in_parallel_p (insn)))
{
delay_list = optimize_skip (insn);
@@ -2966,7 +2966,7 @@ fill_simple_delay_slots (first, non_jumps_p)
mark_referenced_resources (insn, &needed, 1);
maybe_never = 1;
}
- else
+ else
{
mark_set_resources (insn, &set, 0, 1);
mark_referenced_resources (insn, &needed, 1);
@@ -2977,7 +2977,7 @@ fill_simple_delay_slots (first, non_jumps_p)
target = JUMP_LABEL (insn);
target_uses = LABEL_NUSES (target) - 1;
}
-
+
}
for (trial = next_nonnote_insn (insn); trial; trial = next_trial)
@@ -3110,7 +3110,7 @@ fill_simple_delay_slots (first, non_jumps_p)
else
new_label = find_end_label ();
- delay_list
+ delay_list
= add_to_delay_list (copy_rtx (next_trial), delay_list);
slots_filled++;
reorg_redirect_jump (trial, new_label);
@@ -3135,7 +3135,7 @@ fill_simple_delay_slots (first, non_jumps_p)
#ifdef DELAY_SLOTS_FOR_EPILOGUE
/* See if the epilogue needs any delay slots. Try to fill them if so.
- The only thing we can do is scan backwards from the end of the
+ The only thing we can do is scan backwards from the end of the
function. If we did this in a previous pass, it is incorrect to do it
again. */
if (current_function_epilogue_delay_list)
@@ -3783,7 +3783,7 @@ relax_delay_slots (first)
continue;
}
}
-
+
/* If this is an unconditional jump and the previous insn is a
conditional jump, try reversing the condition of the previous
insn and swapping our targets. The next pass might be able to
@@ -3851,7 +3851,7 @@ relax_delay_slots (first)
if (trial == 0 && target_label != 0)
trial = find_end_label ();
- if (trial != target_label
+ if (trial != target_label
&& redirect_with_delay_slots_safe_p (delay_insn, trial, insn))
{
reorg_redirect_jump (delay_insn, trial);
@@ -3887,7 +3887,7 @@ relax_delay_slots (first)
if (target_label == 0)
target_label = find_end_label ();
- if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
+ if (redirect_with_delay_slots_safe_p (delay_insn, target_label,
insn))
{
reorg_redirect_jump (delay_insn, target_label);
@@ -4032,7 +4032,7 @@ make_return_insns (first)
real_return_label = get_label_before (insn);
break;
}
-
+
/* Show an extra usage of REAL_RETURN_LABEL so it won't go away if it
was equal to END_OF_FUNCTION_LABEL. */
LABEL_NUSES (real_return_label)++;
@@ -4154,7 +4154,7 @@ dbr_schedule (first, file)
flag_no_peephole = old_flag_no_peephole;
#endif
- /* If the current function has no insns other than the prologue and
+ /* If the current function has no insns other than the prologue and
epilogue, then do not try to fill any delay slots. */
if (n_basic_blocks == 0)
return;
@@ -4173,7 +4173,7 @@ dbr_schedule (first, file)
uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int *));
for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
uid_to_ruid[INSN_UID (insn)] = i;
-
+
/* Initialize the list of insns that need filling. */
if (unfilled_firstobj == 0)
{
@@ -4193,12 +4193,12 @@ dbr_schedule (first, file)
&& (GET_CODE (PATTERN (insn)) == ADDR_VEC
|| GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
continue;
-
+
if (num_delay_slots (insn) > 0)
obstack_ptr_grow (&unfilled_slots_obstack, insn);
/* Ensure all jumps go to the last of a set of consecutive labels. */
- if (GET_CODE (insn) == JUMP_INSN
+ if (GET_CODE (insn) == JUMP_INSN
&& (condjump_p (insn) || condjump_in_parallel_p (insn))
&& JUMP_LABEL (insn) != 0
&& ((target = prev_label (next_active_insn (JUMP_LABEL (insn))))
OpenPOWER on IntegriCloud