mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-04-18 05:40:23 +08:00
alias.c (mems_in_disjoint_alias_sets_p, [...]): Use gcc_assert and gcc_unreachable instead of abort.
* alias.c (mems_in_disjoint_alias_sets_p, record_alias_subset, record_set, rtx_equal_for_memref_p, init_alias_analysis): Use gcc_assert and gcc_unreachable instead of abort. * alloc-pool.c (abort, fancy_abort): Remove. (create_alloc_pool, free_alloc_pool, pool_alloc, pool_free): Use gcc_assert or gcc_unreachable. * attribs.c (init_attributes, decl_attributes): Likewise. * bb-reorder.c (FREE, find_traces_1_round, copy_bb, mark_bb_for_unlikely_executed_section, add_labels_and_missing_jumps, fix_crossing_conditional_branches, fix_crossing_unconditional_branches): Likewise. * bitmap.c (bitmap_first_set_bit, bitmap_last_set_bit, bitmap_operation): Likewise. * bt-load.c (insn_sets_btr_p, augment_live_range, move_btr_def): Likewise. * builtins.c (c_readstr, expand_builtin_longjmp, apply_args_size, apply_result_size, expand_builtin_apply, expand_builtin_mathfn, expand_builtin_mathfn_2, expand_builtin_mathfn_3, builtin_memcpy_read_str, expand_movstr, expand_builtin_stpcpy, expand_builtin_memcmp, expand_builtin_args_info, std_gimplify_va_arg_expr, expand_builtin_unop, expand_builtin_fputs, expand_builtin_profile_func, expand_builtin_fork_or_exec, fold_builtin_bitop, fold_builtin_classify, fold_builtin_fputs): Likewise. From-SVN: r86797
This commit is contained in:
parent
0de4325e0b
commit
298e6adcf2
19
gcc/alias.c
19
gcc/alias.c
@ -229,7 +229,6 @@ get_alias_set_entry (HOST_WIDE_INT alias_set)
|
||||
static inline int
|
||||
mems_in_disjoint_alias_sets_p (rtx mem1, rtx mem2)
|
||||
{
|
||||
#ifdef ENABLE_CHECKING
|
||||
/* Perform a basic sanity check. Namely, that there are no alias sets
|
||||
if we're not using strict aliasing. This helps to catch bugs
|
||||
whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
|
||||
@ -237,10 +236,8 @@ mems_in_disjoint_alias_sets_p (rtx mem1, rtx mem2)
|
||||
gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
|
||||
use alias sets to indicate that spilled registers cannot alias each
|
||||
other, we might need to remove this check. */
|
||||
if (! flag_strict_aliasing
|
||||
&& (MEM_ALIAS_SET (mem1) != 0 || MEM_ALIAS_SET (mem2) != 0))
|
||||
abort ();
|
||||
#endif
|
||||
gcc_assert (flag_strict_aliasing
|
||||
|| (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2)));
|
||||
|
||||
return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
|
||||
}
|
||||
@ -624,8 +621,7 @@ record_alias_subset (HOST_WIDE_INT superset, HOST_WIDE_INT subset)
|
||||
if (superset == subset)
|
||||
return;
|
||||
|
||||
if (superset == 0)
|
||||
abort ();
|
||||
gcc_assert (superset);
|
||||
|
||||
superset_entry = get_alias_set_entry (superset);
|
||||
if (superset_entry == 0)
|
||||
@ -930,8 +926,7 @@ record_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
|
||||
|
||||
regno = REGNO (dest);
|
||||
|
||||
if (regno >= VARRAY_SIZE (reg_base_value))
|
||||
abort ();
|
||||
gcc_assert (regno < VARRAY_SIZE (reg_base_value));
|
||||
|
||||
/* If this spans multiple hard registers, then we must indicate that every
|
||||
register has an unusable value. */
|
||||
@ -1307,7 +1302,7 @@ rtx_equal_for_memref_p (rtx x, rtx y)
|
||||
contain anything but integers and other rtx's,
|
||||
except for within LABEL_REFs and SYMBOL_REFs. */
|
||||
default:
|
||||
abort ();
|
||||
gcc_unreachable ();
|
||||
}
|
||||
}
|
||||
return 1;
|
||||
@ -2899,8 +2894,8 @@ init_alias_analysis (void)
|
||||
}
|
||||
|
||||
/* Now propagate values from new_reg_base_value to reg_base_value. */
|
||||
if (maxreg != (unsigned int) max_reg_num())
|
||||
abort ();
|
||||
gcc_assert (maxreg == (unsigned int) max_reg_num());
|
||||
|
||||
for (ui = 0; ui < maxreg; ui++)
|
||||
{
|
||||
if (new_reg_base_value[ui]
|
||||
|
@ -25,16 +25,6 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
#include "alloc-pool.h"
|
||||
#include "hashtab.h"
|
||||
|
||||
/* Redefine abort to report an internal error w/o coredump, and
|
||||
reporting the location of the error in the source file. This logic
|
||||
is duplicated in rtl.h and tree.h because every file that needs the
|
||||
special abort includes one or both. toplev.h gets too few files,
|
||||
system.h gets too many. */
|
||||
|
||||
extern void fancy_abort (const char *, int, const char *)
|
||||
ATTRIBUTE_NORETURN;
|
||||
#define abort() fancy_abort (__FILE__, __LINE__, __FUNCTION__)
|
||||
|
||||
#define align_eight(x) (((x+7) >> 3) << 3)
|
||||
|
||||
/* The internal allocation object. */
|
||||
@ -135,8 +125,7 @@ create_alloc_pool (const char *name, size_t size, size_t num)
|
||||
struct alloc_pool_descriptor *desc;
|
||||
#endif
|
||||
|
||||
if (!name)
|
||||
abort ();
|
||||
gcc_assert (name);
|
||||
|
||||
/* Make size large enough to store the list header. */
|
||||
if (size < sizeof (alloc_pool_list))
|
||||
@ -151,8 +140,7 @@ create_alloc_pool (const char *name, size_t size, size_t num)
|
||||
#endif
|
||||
|
||||
/* Um, we can't really allocate 0 elements per block. */
|
||||
if (num == 0)
|
||||
abort ();
|
||||
gcc_assert (num);
|
||||
|
||||
/* Find the size of the pool structure, and the name. */
|
||||
pool_size = sizeof (struct alloc_pool_def);
|
||||
@ -201,10 +189,7 @@ free_alloc_pool (alloc_pool pool)
|
||||
struct alloc_pool_descriptor *desc = alloc_pool_descriptor (pool->name);
|
||||
#endif
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
if (!pool)
|
||||
abort ();
|
||||
#endif
|
||||
gcc_assert (pool);
|
||||
|
||||
/* Free each block allocated to the pool. */
|
||||
for (block = pool->block_list; block != NULL; block = next_block)
|
||||
@ -234,10 +219,7 @@ pool_alloc (alloc_pool pool)
|
||||
desc->allocated+=pool->elt_size;
|
||||
#endif
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
if (!pool)
|
||||
abort ();
|
||||
#endif
|
||||
gcc_assert (pool);
|
||||
|
||||
/* If there are no more free elements, make some more!. */
|
||||
if (!pool->free_list)
|
||||
@ -296,22 +278,19 @@ pool_free (alloc_pool pool, void *ptr)
|
||||
{
|
||||
alloc_pool_list header;
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
if (!ptr)
|
||||
abort ();
|
||||
gcc_assert (ptr);
|
||||
|
||||
#ifdef ENABLE_CHECKING
|
||||
memset (ptr, 0xaf, pool->elt_size - offsetof (allocation_object, u.data));
|
||||
|
||||
/* Check whether the PTR was allocated from POOL. */
|
||||
if (pool->id != ALLOCATION_OBJECT_PTR_FROM_USER_PTR (ptr)->id)
|
||||
abort ();
|
||||
gcc_assert (pool->id == ALLOCATION_OBJECT_PTR_FROM_USER_PTR (ptr)->id);
|
||||
|
||||
/* Mark the element to be free. */
|
||||
ALLOCATION_OBJECT_PTR_FROM_USER_PTR (ptr)->id = 0;
|
||||
#else
|
||||
/* Check if we free more than we allocated, which is Bad (TM). */
|
||||
if (pool->elts_free + 1 > pool->elts_allocated)
|
||||
abort ();
|
||||
gcc_assert (pool->elts_free < pool->elts_allocated);
|
||||
#endif
|
||||
|
||||
header = (alloc_pool_list) ptr;
|
||||
|
@ -77,25 +77,25 @@ init_attributes (void)
|
||||
/* The name must not begin and end with __. */
|
||||
const char *name = attribute_tables[i][j].name;
|
||||
int len = strlen (name);
|
||||
if (name[0] == '_' && name[1] == '_'
|
||||
&& name[len - 1] == '_' && name[len - 2] == '_')
|
||||
abort ();
|
||||
|
||||
gcc_assert (!(name[0] == '_' && name[1] == '_'
|
||||
&& name[len - 1] == '_' && name[len - 2] == '_'));
|
||||
|
||||
/* The minimum and maximum lengths must be consistent. */
|
||||
if (attribute_tables[i][j].min_length < 0)
|
||||
abort ();
|
||||
if (attribute_tables[i][j].max_length != -1
|
||||
&& (attribute_tables[i][j].max_length
|
||||
< attribute_tables[i][j].min_length))
|
||||
abort ();
|
||||
gcc_assert (attribute_tables[i][j].min_length >= 0);
|
||||
|
||||
gcc_assert (attribute_tables[i][j].max_length == -1
|
||||
|| (attribute_tables[i][j].max_length
|
||||
>= attribute_tables[i][j].min_length));
|
||||
|
||||
/* An attribute cannot require both a DECL and a TYPE. */
|
||||
if (attribute_tables[i][j].decl_required
|
||||
&& attribute_tables[i][j].type_required)
|
||||
abort ();
|
||||
gcc_assert (!attribute_tables[i][j].decl_required
|
||||
|| !attribute_tables[i][j].type_required);
|
||||
|
||||
/* If an attribute requires a function type, in particular
|
||||
it requires a type. */
|
||||
if (attribute_tables[i][j].function_type_required
|
||||
&& !attribute_tables[i][j].type_required)
|
||||
abort ();
|
||||
gcc_assert (!attribute_tables[i][j].function_type_required
|
||||
|| attribute_tables[i][j].type_required);
|
||||
}
|
||||
}
|
||||
|
||||
@ -105,9 +105,8 @@ init_attributes (void)
|
||||
int j, k;
|
||||
for (j = 0; attribute_tables[i][j].name != NULL; j++)
|
||||
for (k = j + 1; attribute_tables[i][k].name != NULL; k++)
|
||||
if (!strcmp (attribute_tables[i][j].name,
|
||||
attribute_tables[i][k].name))
|
||||
abort ();
|
||||
gcc_assert (strcmp (attribute_tables[i][j].name,
|
||||
attribute_tables[i][k].name));
|
||||
}
|
||||
/* Check that no name occurs in more than one table. */
|
||||
for (i = 0; i < ARRAY_SIZE (attribute_tables); i++)
|
||||
@ -117,9 +116,8 @@ init_attributes (void)
|
||||
for (j = i + 1; j < ARRAY_SIZE (attribute_tables); j++)
|
||||
for (k = 0; attribute_tables[i][k].name != NULL; k++)
|
||||
for (l = 0; attribute_tables[j][l].name != NULL; l++)
|
||||
if (!strcmp (attribute_tables[i][k].name,
|
||||
attribute_tables[j][l].name))
|
||||
abort ();
|
||||
gcc_assert (strcmp (attribute_tables[i][k].name,
|
||||
attribute_tables[j][l].name));
|
||||
}
|
||||
#endif
|
||||
|
||||
@ -327,10 +325,11 @@ decl_attributes (tree *node, tree attributes, int flags)
|
||||
fn_ptr_tmp = build_pointer_type (fn_ptr_tmp);
|
||||
if (DECL_P (*node))
|
||||
TREE_TYPE (*node) = fn_ptr_tmp;
|
||||
else if (TREE_CODE (*node) == POINTER_TYPE)
|
||||
*node = fn_ptr_tmp;
|
||||
else
|
||||
abort ();
|
||||
{
|
||||
gcc_assert (TREE_CODE (*node) == POINTER_TYPE);
|
||||
*node = fn_ptr_tmp;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -137,8 +137,7 @@ static bbro_basic_block_data *bbd;
|
||||
#define GET_ARRAY_SIZE(X) ((((X) / 4) + 1) * 5)
|
||||
|
||||
/* Free the memory and set the pointer to NULL. */
|
||||
#define FREE(P) \
|
||||
do { if (P) { free (P); P = 0; } else { abort (); } } while (0)
|
||||
#define FREE(P) (gcc_assert (P), free (P), P = 0)
|
||||
|
||||
/* Structure for holding information about a trace. */
|
||||
struct trace
|
||||
@ -501,10 +500,7 @@ find_traces_1_round (int branch_th, int exec_th, gcov_type count_th,
|
||||
/* Select the successor that will be placed after BB. */
|
||||
for (e = bb->succ; e; e = e->succ_next)
|
||||
{
|
||||
#ifdef ENABLE_CHECKING
|
||||
if (e->flags & EDGE_FAKE)
|
||||
abort ();
|
||||
#endif
|
||||
gcc_assert (!(e->flags & EDGE_FAKE));
|
||||
|
||||
if (e->dest == EXIT_BLOCK_PTR)
|
||||
continue;
|
||||
@ -760,10 +756,9 @@ copy_bb (basic_block old_bb, edge e, basic_block bb, int trace)
|
||||
new_bb = duplicate_block (old_bb, e);
|
||||
BB_COPY_PARTITION (new_bb, old_bb);
|
||||
|
||||
if (e->dest != new_bb)
|
||||
abort ();
|
||||
if (e->dest->rbi->visited)
|
||||
abort ();
|
||||
gcc_assert (e->dest == new_bb);
|
||||
gcc_assert (!e->dest->rbi->visited);
|
||||
|
||||
if (dump_file)
|
||||
fprintf (dump_file,
|
||||
"Duplicated bb %d (created bb %d)\n",
|
||||
@ -1345,9 +1340,7 @@ mark_bb_for_unlikely_executed_section (basic_block bb)
|
||||
|
||||
/* If basic block does not contain a NOTE_INSN_BASIC_BLOCK, there is
|
||||
a major problem. */
|
||||
|
||||
if (!insert_insn)
|
||||
abort ();
|
||||
gcc_assert (insert_insn);
|
||||
|
||||
/* Insert note and assign basic block number to it. */
|
||||
|
||||
@ -1391,28 +1384,19 @@ add_labels_and_missing_jumps (edge *crossing_edges, int n_crossing_edges)
|
||||
/* bb just falls through. */
|
||||
{
|
||||
/* make sure there's only one successor */
|
||||
if (src->succ && (src->succ->succ_next == NULL))
|
||||
{
|
||||
/* Find label in dest block. */
|
||||
label = block_label (dest);
|
||||
|
||||
new_jump = emit_jump_insn_after (gen_jump (label),
|
||||
BB_END (src));
|
||||
barrier = emit_barrier_after (new_jump);
|
||||
JUMP_LABEL (new_jump) = label;
|
||||
LABEL_NUSES (label) += 1;
|
||||
src->rbi->footer = unlink_insn_chain (barrier,
|
||||
barrier);
|
||||
/* Mark edge as non-fallthru. */
|
||||
crossing_edges[i]->flags &= ~EDGE_FALLTHRU;
|
||||
}
|
||||
else
|
||||
{
|
||||
/* Basic block has two successors, but
|
||||
doesn't end in a jump; something is wrong
|
||||
here! */
|
||||
abort();
|
||||
}
|
||||
gcc_assert (src->succ && !src->succ->succ_next);
|
||||
|
||||
/* Find label in dest block. */
|
||||
label = block_label (dest);
|
||||
|
||||
new_jump = emit_jump_insn_after (gen_jump (label),
|
||||
BB_END (src));
|
||||
barrier = emit_barrier_after (new_jump);
|
||||
JUMP_LABEL (new_jump) = label;
|
||||
LABEL_NUSES (label) += 1;
|
||||
src->rbi->footer = unlink_insn_chain (barrier, barrier);
|
||||
/* Mark edge as non-fallthru. */
|
||||
crossing_edges[i]->flags &= ~EDGE_FALLTHRU;
|
||||
} /* end: 'if (GET_CODE ... ' */
|
||||
} /* end: 'if (src && src->index...' */
|
||||
} /* end: 'if (dest && dest->index...' */
|
||||
@ -1722,12 +1706,13 @@ fix_crossing_conditional_branches (void)
|
||||
(old_label),
|
||||
BB_END (new_bb));
|
||||
}
|
||||
else if (HAVE_return
|
||||
&& GET_CODE (old_label) == RETURN)
|
||||
new_jump = emit_jump_insn_after (gen_return (),
|
||||
BB_END (new_bb));
|
||||
else
|
||||
abort ();
|
||||
{
|
||||
gcc_assert (HAVE_return
|
||||
&& GET_CODE (old_label) == RETURN);
|
||||
new_jump = emit_jump_insn_after (gen_return (),
|
||||
BB_END (new_bb));
|
||||
}
|
||||
|
||||
barrier = emit_barrier_after (new_jump);
|
||||
JUMP_LABEL (new_jump) = old_label;
|
||||
@ -1794,13 +1779,12 @@ fix_crossing_unconditional_branches (void)
|
||||
{
|
||||
rtx label2, table;
|
||||
|
||||
if (any_condjump_p (last_insn))
|
||||
abort ();
|
||||
gcc_assert (!any_condjump_p (last_insn));
|
||||
|
||||
/* Make sure the jump is not already an indirect or table jump. */
|
||||
|
||||
else if (!computed_jump_p (last_insn)
|
||||
&& !tablejump_p (last_insn, &label2, &table))
|
||||
if (!computed_jump_p (last_insn)
|
||||
&& !tablejump_p (last_insn, &label2, &table))
|
||||
{
|
||||
/* We have found a "crossing" unconditional branch. Now
|
||||
we must convert it to an indirect jump. First create
|
||||
|
@ -415,7 +415,7 @@ bitmap_first_set_bit (bitmap a)
|
||||
for (word_num = 0; word_num < BITMAP_ELEMENT_WORDS; ++word_num)
|
||||
if ((word = ptr->bits[word_num]) != 0)
|
||||
goto word_found;
|
||||
abort ();
|
||||
gcc_unreachable ();
|
||||
word_found:
|
||||
#endif
|
||||
|
||||
@ -472,7 +472,7 @@ bitmap_last_set_bit (bitmap a)
|
||||
for (word_num = BITMAP_ELEMENT_WORDS; word_num-- > 0; )
|
||||
if ((word = ptr->bits[word_num]) != 0)
|
||||
goto word_found;
|
||||
abort ();
|
||||
gcc_unreachable ();
|
||||
word_found:
|
||||
#endif
|
||||
|
||||
@ -608,7 +608,7 @@ bitmap_operation (bitmap to, bitmap from1, bitmap from2,
|
||||
switch (operation)
|
||||
{
|
||||
default:
|
||||
abort ();
|
||||
gcc_unreachable ();
|
||||
|
||||
case BITMAP_AND:
|
||||
DOIT (&);
|
||||
|
@ -237,8 +237,8 @@ insn_sets_btr_p (rtx insn, int check_const, int *regno)
|
||||
if (REG_P (dest)
|
||||
&& TEST_HARD_REG_BIT (all_btrs, REGNO (dest)))
|
||||
{
|
||||
if (btr_referenced_p (src, NULL))
|
||||
abort();
|
||||
gcc_assert (!btr_referenced_p (src, NULL));
|
||||
|
||||
if (!check_const || CONSTANT_P (src))
|
||||
{
|
||||
if (regno)
|
||||
@ -875,11 +875,13 @@ augment_live_range (bitmap live_range, HARD_REG_SET *btrs_live_in_range,
|
||||
|
||||
if (dominated_by_p (CDI_DOMINATORS, new_bb, head_bb))
|
||||
*tos++ = new_bb;
|
||||
else if (dominated_by_p (CDI_DOMINATORS, head_bb, new_bb))
|
||||
else
|
||||
{
|
||||
edge e;
|
||||
int new_block = new_bb->index;
|
||||
|
||||
gcc_assert (dominated_by_p (CDI_DOMINATORS, head_bb, new_bb));
|
||||
|
||||
bitmap_set_bit (live_range, new_block);
|
||||
if (flag_btr_bb_exclusive)
|
||||
IOR_HARD_REG_SET (*btrs_live_in_range, btrs_live[new_block]);
|
||||
@ -900,8 +902,6 @@ augment_live_range (bitmap live_range, HARD_REG_SET *btrs_live_in_range,
|
||||
for (e = head_bb->pred; e; e = e->pred_next)
|
||||
*tos++ = e->src;
|
||||
}
|
||||
else
|
||||
abort();
|
||||
|
||||
while (tos != worklist)
|
||||
{
|
||||
@ -1146,8 +1146,8 @@ move_btr_def (basic_block new_def_bb, int btr, btr_def def, bitmap live_range,
|
||||
{
|
||||
insp = BB_END (b);
|
||||
for (insp = BB_END (b); ! INSN_P (insp); insp = PREV_INSN (insp))
|
||||
if (insp == BB_HEAD (b))
|
||||
abort ();
|
||||
gcc_assert (insp != BB_HEAD (b));
|
||||
|
||||
if (JUMP_P (insp) || can_throw_internal (insp))
|
||||
insp = PREV_INSN (insp);
|
||||
}
|
||||
|
@ -376,8 +376,8 @@ c_readstr (const char *str, enum machine_mode mode)
|
||||
HOST_WIDE_INT ch;
|
||||
unsigned int i, j;
|
||||
|
||||
if (GET_MODE_CLASS (mode) != MODE_INT)
|
||||
abort ();
|
||||
gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
|
||||
|
||||
c[0] = 0;
|
||||
c[1] = 0;
|
||||
ch = 1;
|
||||
@ -390,8 +390,8 @@ c_readstr (const char *str, enum machine_mode mode)
|
||||
&& GET_MODE_SIZE (mode) > UNITS_PER_WORD)
|
||||
j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
|
||||
j *= BITS_PER_UNIT;
|
||||
if (j > 2 * HOST_BITS_PER_WIDE_INT)
|
||||
abort ();
|
||||
gcc_assert (j <= 2 * HOST_BITS_PER_WIDE_INT);
|
||||
|
||||
if (ch)
|
||||
ch = (unsigned char) str[i];
|
||||
c[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
|
||||
@ -700,8 +700,7 @@ expand_builtin_longjmp (rtx buf_addr, rtx value)
|
||||
a second argument of 1, because that is what builtin_setjmp will
|
||||
return. This also makes EH slightly more efficient, since we are no
|
||||
longer copying around a value that we don't care about. */
|
||||
if (value != const1_rtx)
|
||||
abort ();
|
||||
gcc_assert (value == const1_rtx);
|
||||
|
||||
current_function_calls_longjmp = 1;
|
||||
|
||||
@ -758,8 +757,8 @@ expand_builtin_longjmp (rtx buf_addr, rtx value)
|
||||
internal exception handling use only. */
|
||||
for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
|
||||
{
|
||||
if (insn == last)
|
||||
abort ();
|
||||
gcc_assert (insn != last);
|
||||
|
||||
if (JUMP_P (insn))
|
||||
{
|
||||
REG_NOTES (insn) = alloc_EXPR_LIST (REG_NON_LOCAL_GOTO, const0_rtx,
|
||||
@ -1048,8 +1047,7 @@ apply_args_size (void)
|
||||
{
|
||||
mode = reg_raw_mode[regno];
|
||||
|
||||
if (mode == VOIDmode)
|
||||
abort ();
|
||||
gcc_assert (mode != VOIDmode);
|
||||
|
||||
align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
|
||||
if (size % align != 0)
|
||||
@ -1087,8 +1085,7 @@ apply_result_size (void)
|
||||
{
|
||||
mode = reg_raw_mode[regno];
|
||||
|
||||
if (mode == VOIDmode)
|
||||
abort ();
|
||||
gcc_assert (mode != VOIDmode);
|
||||
|
||||
align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
|
||||
if (size % align != 0)
|
||||
@ -1360,8 +1357,8 @@ expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
|
||||
for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
|
||||
if ((mode = apply_result_mode[regno]) != VOIDmode)
|
||||
{
|
||||
if (valreg)
|
||||
abort (); /* HAVE_untyped_call required. */
|
||||
gcc_assert (!valreg); /* HAVE_untyped_call required. */
|
||||
|
||||
valreg = gen_rtx_REG (mode, regno);
|
||||
}
|
||||
|
||||
@ -1373,7 +1370,7 @@ expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
|
||||
}
|
||||
else
|
||||
#endif
|
||||
abort ();
|
||||
gcc_unreachable ();
|
||||
|
||||
/* Find the CALL insn we just emitted, and attach the register usage
|
||||
information. */
|
||||
@ -1742,7 +1739,7 @@ expand_builtin_mathfn (tree exp, rtx target, rtx subtarget)
|
||||
case BUILT_IN_NEARBYINTL:
|
||||
builtin_optab = nearbyint_optab; break;
|
||||
default:
|
||||
abort ();
|
||||
gcc_unreachable ();
|
||||
}
|
||||
|
||||
/* Make a suitable register to place result in. */
|
||||
@ -1882,7 +1879,7 @@ expand_builtin_mathfn_2 (tree exp, rtx target, rtx subtarget)
|
||||
case BUILT_IN_DREML:
|
||||
builtin_optab = drem_optab; break;
|
||||
default:
|
||||
abort ();
|
||||
gcc_unreachable ();
|
||||
}
|
||||
|
||||
/* Make a suitable register to place result in. */
|
||||
@ -1982,7 +1979,7 @@ expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
|
||||
case BUILT_IN_COSL:
|
||||
builtin_optab = sincos_optab; break;
|
||||
default:
|
||||
abort ();
|
||||
gcc_unreachable ();
|
||||
}
|
||||
|
||||
/* Make a suitable register to place result in. */
|
||||
@ -2005,7 +2002,7 @@ expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
|
||||
case BUILT_IN_COSL:
|
||||
builtin_optab = cos_optab; break;
|
||||
default:
|
||||
abort();
|
||||
gcc_unreachable ();
|
||||
}
|
||||
}
|
||||
|
||||
@ -2032,23 +2029,24 @@ expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
|
||||
Set TARGET to wherever the result comes back. */
|
||||
if (builtin_optab == sincos_optab)
|
||||
{
|
||||
int result;
|
||||
|
||||
switch (DECL_FUNCTION_CODE (fndecl))
|
||||
{
|
||||
case BUILT_IN_SIN:
|
||||
case BUILT_IN_SINF:
|
||||
case BUILT_IN_SINL:
|
||||
if (!expand_twoval_unop (builtin_optab, op0, 0, target, 0))
|
||||
abort();
|
||||
result = expand_twoval_unop (builtin_optab, op0, 0, target, 0);
|
||||
break;
|
||||
case BUILT_IN_COS:
|
||||
case BUILT_IN_COSF:
|
||||
case BUILT_IN_COSL:
|
||||
if (!expand_twoval_unop (builtin_optab, op0, target, 0, 0))
|
||||
abort();
|
||||
result = expand_twoval_unop (builtin_optab, op0, target, 0, 0);
|
||||
break;
|
||||
default:
|
||||
abort();
|
||||
gcc_unreachable ();
|
||||
}
|
||||
gcc_assert (result);
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -2665,10 +2663,9 @@ builtin_memcpy_read_str (void *data, HOST_WIDE_INT offset,
|
||||
{
|
||||
const char *str = (const char *) data;
|
||||
|
||||
if (offset < 0
|
||||
|| ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
|
||||
> strlen (str) + 1))
|
||||
abort (); /* Attempt to read past the end of constant string. */
|
||||
gcc_assert (offset >= 0
|
||||
&& ((unsigned HOST_WIDE_INT) offset + GET_MODE_SIZE (mode)
|
||||
<= strlen (str) + 1));
|
||||
|
||||
return c_readstr (str + offset, mode);
|
||||
}
|
||||
@ -3015,8 +3012,7 @@ expand_movstr (tree dest, tree src, rtx target, int endp)
|
||||
|
||||
insn = data->genfun (end, dest_mem, src_mem);
|
||||
|
||||
if (insn == 0)
|
||||
abort ();
|
||||
gcc_assert (insn);
|
||||
|
||||
emit_insn (insn);
|
||||
|
||||
@ -3138,8 +3134,7 @@ expand_builtin_stpcpy (tree arglist, rtx target, enum machine_mode mode)
|
||||
ret = emit_move_insn (target,
|
||||
plus_constant (ret,
|
||||
INTVAL (len_rtx)));
|
||||
if (! ret)
|
||||
abort ();
|
||||
gcc_assert (ret);
|
||||
|
||||
return target;
|
||||
}
|
||||
@ -3532,7 +3527,7 @@ expand_builtin_memcmp (tree exp ATTRIBUTE_UNUSED, tree arglist, rtx target,
|
||||
GEN_INT (MIN (arg1_align, arg2_align)));
|
||||
else
|
||||
#endif
|
||||
abort ();
|
||||
gcc_unreachable ();
|
||||
|
||||
if (insn)
|
||||
emit_insn (insn);
|
||||
@ -4125,8 +4120,7 @@ expand_builtin_args_info (tree arglist)
|
||||
int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
|
||||
int *word_ptr = (int *) ¤t_function_args_info;
|
||||
|
||||
if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
|
||||
abort ();
|
||||
gcc_assert (sizeof (CUMULATIVE_ARGS) % sizeof (int) == 0);
|
||||
|
||||
if (arglist != 0)
|
||||
{
|
||||
@ -4296,7 +4290,7 @@ std_gimplify_va_arg_expr (tree valist, tree type, tree *pre_p, tree *post_p)
|
||||
/* All of the alignment and movement below is for args-grow-up machines.
|
||||
As of 2004, there are only 3 ARGS_GROW_DOWNWARD targets, and they all
|
||||
implement their own specialized gimplify_va_arg_expr routines. */
|
||||
abort ();
|
||||
gcc_unreachable ();
|
||||
#endif
|
||||
|
||||
indirect = pass_by_reference (NULL, TYPE_MODE (type), type, false);
|
||||
@ -4625,9 +4619,8 @@ expand_builtin_unop (enum machine_mode target_mode, tree arglist, rtx target,
|
||||
Set TARGET to wherever the result comes back. */
|
||||
target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
|
||||
op_optab, op0, target, 1);
|
||||
if (target == 0)
|
||||
abort ();
|
||||
|
||||
gcc_assert (target);
|
||||
|
||||
return convert_to_mode (target_mode, target, 0);
|
||||
}
|
||||
|
||||
@ -4704,7 +4697,7 @@ expand_builtin_fputs (tree arglist, rtx target, bool unlocked)
|
||||
break;
|
||||
}
|
||||
default:
|
||||
abort ();
|
||||
gcc_unreachable ();
|
||||
}
|
||||
|
||||
return expand_expr (build_function_call_expr (fn, arglist),
|
||||
@ -5242,10 +5235,8 @@ expand_builtin_profile_func (bool exitp)
|
||||
rtx this, which;
|
||||
|
||||
this = DECL_RTL (current_function_decl);
|
||||
if (MEM_P (this))
|
||||
this = XEXP (this, 0);
|
||||
else
|
||||
abort ();
|
||||
gcc_assert (MEM_P (this));
|
||||
this = XEXP (this, 0);
|
||||
|
||||
if (exitp)
|
||||
which = profile_function_exit_libfunc;
|
||||
@ -5480,7 +5471,7 @@ expand_builtin_fork_or_exec (tree fn, tree arglist, rtx target, int ignore)
|
||||
break;
|
||||
|
||||
default:
|
||||
abort ();
|
||||
gcc_unreachable ();
|
||||
}
|
||||
|
||||
decl = build_decl (FUNCTION_DECL, id, TREE_TYPE (fn));
|
||||
@ -6813,7 +6804,7 @@ fold_builtin_bitop (tree exp)
|
||||
break;
|
||||
|
||||
default:
|
||||
abort();
|
||||
gcc_unreachable ();
|
||||
}
|
||||
|
||||
return build_int_cst (TREE_TYPE (exp), result);
|
||||
@ -7740,7 +7731,7 @@ fold_builtin_classify (tree exp, int builtin_index)
|
||||
return fold (build2 (UNORDERED_EXPR, type, arg, arg));
|
||||
|
||||
default:
|
||||
abort ();
|
||||
gcc_unreachable ();
|
||||
}
|
||||
}
|
||||
|
||||
@ -9158,7 +9149,7 @@ fold_builtin_fputs (tree arglist, bool ignore, bool unlocked, tree len)
|
||||
break;
|
||||
}
|
||||
default:
|
||||
abort ();
|
||||
gcc_unreachable ();
|
||||
}
|
||||
|
||||
/* These optimizations are only performed when the result is ignored,
|
||||
|
Loading…
x
Reference in New Issue
Block a user