cfgloop.c (get_loop_body_in_bfs_order): Avoid redundant call to bitmap_bit_p.

* cfgloop.c (get_loop_body_in_bfs_order): Avoid redundant call to
	bitmap_bit_p.
	* config/bfin/bifn.c (bfin_discover_loop): Likewise.
	* dominance.c (iterate_fix_dominators): Likewise.
	* dse.c (set_usage_bits): Likewise.
	(set_position_unneeded, record_store): Likewise.
	* gimple-fold.c (get_maxval_strlen): Likewise.
	* haifa-sched.c (fix_inter_tick, fix_recovery_deps): Likewise.
	* ipa-inline.c (update_caller_keys): Likewise.
	* ipa-split.c (verify_non_ssa_vars): Likewise.
	* ipa-type-escape.c (mark_type, close_type_seen): Likewise.
	(close_type_exposed_parameter, close_type_full_escape): Likewise.
	(close_addressof_down): Likewise.
	* ira-color.c (assign_hard_reg, push_allocno_to_stack): Likewise.
	(setup_allocno_left_conflicts_size): Likewise.
	(ira_reassign_conflict_allocnos): Likewise.
	(ira_reassign_pseudos): Likewise.
	* ira-emit.c (change_loop): Likewise.
	* loop-invariant.c (mark_regno_live, mark_regno_death): Likewise.
	* lto-streamer-out.c (write_symbol): Likewise.
	* predict.c (expr_expected_value_1): Likewise.
	* regstat.c (regstat_bb_compute_ri): Likewise.
	* sel-sched.c (create_block_for_bookkeeping): Likewise.
	(track_scheduled_insns_and_blocks, sel_sched_region_1): Likewise.
	* stmt.c (expand_case): Likewise.
	* tree-eh.c (emit_eh_dispatch): Likewise.
	* tree-into-ssa.c (prune_unused_phi_nodes): Likewise.
	* tree-loop-distribution.c (make_nodes_having_upstream_mem_writes):
	Likewise.
	(rdg_flag_vertex, rdg_flag_loop_exits): Likewise.
	(rdg_build_components): Likewise.
	* tree-object-size.c (collect_object_sizes_for): Likewise.
	* tree-sra.c (convert_callers): Likewise.
	* tree-ssa-coalesce.c (live_track_add_partition): Likewise.
	* tree-ssa-live.c (mark_all_vars_used_1): Likewise.
	* tree-ssa-pre.c (bitmap_set_replace_value): Likewise.

From-SVN: r163378
This commit is contained in:
Nathan Froyd 2010-08-19 16:51:39 +00:00 committed by Nathan Froyd
parent 99d821c01c
commit fcaa4ca433
26 changed files with 101 additions and 138 deletions

View File

@ -1,3 +1,42 @@
2010-08-19 Nathan Froyd <froydnj@codesourcery.com>
* cfgloop.c (get_loop_body_in_bfs_order): Avoid redundant call to
bitmap_bit_p.
* config/bfin/bifn.c (bfin_discover_loop): Likewise.
* dominance.c (iterate_fix_dominators): Likewise.
* dse.c (set_usage_bits): Likewise.
(set_position_unneeded, record_store): Likewise.
* gimple-fold.c (get_maxval_strlen): Likewise.
* haifa-sched.c (fix_inter_tick, fix_recovery_deps): Likewise.
* ipa-inline.c (update_caller_keys): Likewise.
* ipa-split.c (verify_non_ssa_vars): Likewise.
* ipa-type-escape.c (mark_type, close_type_seen): Likewise.
(close_type_exposed_parameter, close_type_full_escape): Likewise.
(close_addressof_down): Likewise.
* ira-color.c (assign_hard_reg, push_allocno_to_stack): Likewise.
(setup_allocno_left_conflicts_size): Likewise.
(ira_reassign_conflict_allocnos): Likewise.
(ira_reassign_pseudos): Likewise.
* ira-emit.c (change_loop): Likewise.
* loop-invariant.c (mark_regno_live, mark_regno_death): Likewise.
* lto-streamer-out.c (write_symbol): Likewise.
* predict.c (expr_expected_value_1): Likewise.
* regstat.c (regstat_bb_compute_ri): Likewise.
* sel-sched.c (create_block_for_bookkeeping): Likewise.
(track_scheduled_insns_and_blocks, sel_sched_region_1): Likewise.
* stmt.c (expand_case): Likewise.
* tree-eh.c (emit_eh_dispatch): Likewise.
* tree-into-ssa.c (prune_unused_phi_nodes): Likewise.
* tree-loop-distribution.c (make_nodes_having_upstream_mem_writes):
Likewise.
(rdg_flag_vertex, rdg_flag_loop_exits): Likewise.
(rdg_build_components): Likewise.
* tree-object-size.c (collect_object_sizes_for): Likewise.
* tree-sra.c (convert_callers): Likewise.
* tree-ssa-coalesce.c (live_track_add_partition): Likewise.
* tree-ssa-live.c (mark_all_vars_used_1): Likewise.
* tree-ssa-pre.c (bitmap_set_replace_value): Likewise.
2010-08-19 Uros Bizjak <ubizjak@gmail.com>
* config/i386/i386.md (*lea_1): Use P mode iterator.

View File

@ -924,22 +924,16 @@ get_loop_body_in_bfs_order (const struct loop *loop)
edge e;
edge_iterator ei;
if (!bitmap_bit_p (visited, bb->index))
{
/* This basic block is now visited */
bitmap_set_bit (visited, bb->index);
blocks[i++] = bb;
}
if (bitmap_set_bit (visited, bb->index))
/* This basic block is now visited */
blocks[i++] = bb;
FOR_EACH_EDGE (e, ei, bb->succs)
{
if (flow_bb_inside_loop_p (loop, e->dest))
{
if (!bitmap_bit_p (visited, e->dest->index))
{
bitmap_set_bit (visited, e->dest->index);
blocks[i++] = e->dest;
}
if (bitmap_set_bit (visited, e->dest->index))
blocks[i++] = e->dest;
}
}

View File

@ -4398,14 +4398,13 @@ bfin_discover_loop (loop_info loop, basic_block tail_bb, rtx tail_insn)
break;
}
if (bitmap_bit_p (loop->block_bitmap, bb->index))
if (!bitmap_set_bit (loop->block_bitmap, bb->index))
continue;
/* We've not seen this block before. Add it to the loop's
list and then add each successor to the work list. */
VEC_safe_push (basic_block, heap, loop->blocks, bb);
bitmap_set_bit (loop->block_bitmap, bb->index);
if (bb != tail_bb)
{

View File

@ -1357,10 +1357,9 @@ iterate_fix_dominators (enum cdi_direction dir, VEC (basic_block, heap) *bbs,
dom_i = (size_t) *pointer_map_contains (map, dom);
/* Do not include parallel edges to G. */
if (bitmap_bit_p ((bitmap) g->vertices[dom_i].data, i))
if (!bitmap_set_bit ((bitmap) g->vertices[dom_i].data, i))
continue;
bitmap_set_bit ((bitmap) g->vertices[dom_i].data, i);
add_edge (g, dom_i, i);
}
}

View File

@ -963,11 +963,10 @@ set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width)
ai = i;
}
if (bitmap_bit_p (store1, ai))
if (!bitmap_set_bit (store1, ai))
bitmap_set_bit (store2, ai);
else
{
bitmap_set_bit (store1, ai);
if (i < 0)
{
if (group->offset_map_size_n < ai)
@ -1232,11 +1231,8 @@ set_position_unneeded (store_info_t s_info, int pos)
{
if (__builtin_expect (s_info->is_large, false))
{
if (!bitmap_bit_p (s_info->positions_needed.large.bmap, pos))
{
s_info->positions_needed.large.count++;
bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
}
if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
s_info->positions_needed.large.count++;
}
else
s_info->positions_needed.small_bitmask
@ -1393,10 +1389,8 @@ record_store (rtx body, bb_info_t bb_info)
gcc_assert (GET_MODE (mem) != BLKmode);
if (bitmap_bit_p (store1, spill_alias_set))
if (!bitmap_set_bit (store1, spill_alias_set))
bitmap_set_bit (store2, spill_alias_set);
else
bitmap_set_bit (store1, spill_alias_set);
if (clear_alias_group->offset_map_size_p < spill_alias_set)
clear_alias_group->offset_map_size_p = spill_alias_set;

View File

@ -1004,9 +1004,8 @@ get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
}
/* If we were already here, break the infinite cycle. */
if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg)))
if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
return true;
bitmap_set_bit (visited, SSA_NAME_VERSION (arg));
var = arg;
def_stmt = SSA_NAME_DEF_STMT (var);

View File

@ -3613,9 +3613,8 @@ fix_inter_tick (rtx head, rtx tail)
gcc_assert (tick >= MIN_TICK);
/* Fix INSN_TICK of instruction from just scheduled block. */
if (!bitmap_bit_p (&processed, INSN_LUID (head)))
if (bitmap_set_bit (&processed, INSN_LUID (head)))
{
bitmap_set_bit (&processed, INSN_LUID (head));
tick -= next_clock;
if (tick < MIN_TICK)
@ -3635,9 +3634,8 @@ fix_inter_tick (rtx head, rtx tail)
/* If NEXT has its INSN_TICK calculated, fix it.
If not - it will be properly calculated from
scratch later in fix_tick_ready. */
&& !bitmap_bit_p (&processed, INSN_LUID (next)))
&& bitmap_set_bit (&processed, INSN_LUID (next)))
{
bitmap_set_bit (&processed, INSN_LUID (next));
tick -= next_clock;
if (tick < MIN_TICK)
@ -4756,11 +4754,8 @@ fix_recovery_deps (basic_block rec)
{
sd_delete_dep (sd_it);
if (!bitmap_bit_p (&in_ready, INSN_LUID (consumer)))
{
ready_list = alloc_INSN_LIST (consumer, ready_list);
bitmap_set_bit (&in_ready, INSN_LUID (consumer));
}
if (bitmap_set_bit (&in_ready, INSN_LUID (consumer)))
ready_list = alloc_INSN_LIST (consumer, ready_list);
}
else
{

View File

@ -699,9 +699,8 @@ update_caller_keys (fibheap_t heap, struct cgraph_node *node,
if (!node->local.inlinable
|| node->global.inlined_to)
return;
if (bitmap_bit_p (updated_nodes, node->uid))
if (!bitmap_set_bit (updated_nodes, node->uid))
return;
bitmap_set_bit (updated_nodes, node->uid);
node->global.estimated_growth = INT_MIN;
/* See if there is something to do. */

View File

@ -202,12 +202,11 @@ verify_non_ssa_vars (struct split_point *current, bitmap non_ssa_vars,
FOR_EACH_EDGE (e, ei, bb->preds)
if (e->src != ENTRY_BLOCK_PTR
&& !bitmap_bit_p (seen, e->src->index))
&& bitmap_set_bit (seen, e->src->index))
{
gcc_checking_assert (!bitmap_bit_p (current->split_bbs,
e->src->index));
VEC_safe_push (basic_block, heap, worklist, e->src);
bitmap_set_bit (seen, e->src->index);
}
for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
{

View File

@ -485,18 +485,13 @@ mark_type (tree type, enum escape_t escape_status)
}
uid = TYPE_UID (type);
if (bitmap_bit_p (map, uid))
if (!bitmap_set_bit (map, uid))
return type;
else
{
bitmap_set_bit (map, uid);
if (escape_status == FULL_ESCAPE)
{
/* Efficiency hack. When things are bad, do not mess around
with this type anymore. */
bitmap_set_bit (global_types_exposed_parameter, uid);
}
}
else if (escape_status == FULL_ESCAPE)
/* Efficiency hack. When things are bad, do not mess around
with this type anymore. */
bitmap_set_bit (global_types_exposed_parameter, uid);
return type;
}
@ -1746,9 +1741,8 @@ close_type_seen (tree type)
uid = TYPE_UID (type);
if (bitmap_bit_p (been_there_done_that, uid))
if (!bitmap_set_bit (been_there_done_that, uid))
return;
bitmap_set_bit (been_there_done_that, uid);
/* If we are doing a language with a type hierarchy, mark all of
the superclasses. */
@ -1796,9 +1790,8 @@ close_type_exposed_parameter (tree type)
uid = TYPE_UID (type);
gcc_assert (!POINTER_TYPE_P (type));
if (bitmap_bit_p (been_there_done_that, uid))
if (!bitmap_set_bit (been_there_done_that, uid))
return;
bitmap_set_bit (been_there_done_that, uid);
/* If the field is a struct or union type, mark all of the
subfields. */
@ -1851,9 +1844,8 @@ close_type_full_escape (tree type)
return;
uid = TYPE_UID (type);
if (bitmap_bit_p (been_there_done_that, uid))
if (!bitmap_set_bit (been_there_done_that, uid))
return;
bitmap_set_bit (been_there_done_that, uid);
subtype_map = subtype_map_for_uid (uid, false);
@ -1929,9 +1921,8 @@ close_addressof_down (int uid)
else
return NULL;
if (bitmap_bit_p (been_there_done_that, uid))
if (!bitmap_set_bit (been_there_done_that, uid))
return map;
bitmap_set_bit (been_there_done_that, uid);
/* If the type escapes, get rid of the addressof map, it will not be
needed. */

View File

@ -568,11 +568,9 @@ assign_hard_reg (ira_allocno_t allocno, bool retry_p)
if (allocno_coalesced_p)
{
if (bitmap_bit_p (processed_coalesced_allocno_bitmap,
if (!bitmap_set_bit (processed_coalesced_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno)))
continue;
bitmap_set_bit (processed_coalesced_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno));
}
ira_allocate_and_copy_costs
@ -977,11 +975,9 @@ push_allocno_to_stack (ira_allocno_t allocno)
{
conflict_obj = ALLOCNO_OBJECT (conflict_allocno,
OBJECT_SUBWORD (conflict_obj));
if (bitmap_bit_p (processed_coalesced_allocno_bitmap,
if (!bitmap_set_bit (processed_coalesced_allocno_bitmap,
OBJECT_CONFLICT_ID (conflict_obj)))
continue;
bitmap_set_bit (processed_coalesced_allocno_bitmap,
OBJECT_CONFLICT_ID (conflict_obj));
}
if (!ALLOCNO_IN_GRAPH_P (conflict_allocno)
@ -1552,11 +1548,9 @@ setup_allocno_left_conflicts_size (ira_allocno_t allocno)
== ALLOCNO_COVER_CLASS (conflict_allocno));
if (allocno_coalesced_p)
{
if (bitmap_bit_p (processed_coalesced_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno)))
if (!bitmap_set_bit (processed_coalesced_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno)))
continue;
bitmap_set_bit (processed_coalesced_allocno_bitmap,
ALLOCNO_NUM (conflict_allocno));
}
if (! ALLOCNO_ASSIGNED_P (conflict_allocno))
@ -2436,9 +2430,8 @@ ira_reassign_conflict_allocnos (int start_regno)
ira_allocno_t conflict_a = OBJECT_ALLOCNO (conflict_obj);
ira_assert (ira_reg_classes_intersect_p
[cover_class][ALLOCNO_COVER_CLASS (conflict_a)]);
if (bitmap_bit_p (allocnos_to_color, ALLOCNO_NUM (conflict_a)))
if (!bitmap_set_bit (allocnos_to_color, ALLOCNO_NUM (conflict_a)))
continue;
bitmap_set_bit (allocnos_to_color, ALLOCNO_NUM (conflict_a));
sorted_allocnos[allocnos_to_color_num++] = conflict_a;
}
}
@ -3041,10 +3034,9 @@ ira_reassign_pseudos (int *spilled_pseudo_regs, int num,
ira_allocno_t conflict_a = OBJECT_ALLOCNO (conflict_obj);
if (ALLOCNO_HARD_REGNO (conflict_a) < 0
&& ! ALLOCNO_DONT_REASSIGN_P (conflict_a)
&& ! bitmap_bit_p (temp, ALLOCNO_REGNO (conflict_a)))
&& bitmap_set_bit (temp, ALLOCNO_REGNO (conflict_a)))
{
spilled_pseudo_regs[num++] = ALLOCNO_REGNO (conflict_a);
bitmap_set_bit (temp, ALLOCNO_REGNO (conflict_a));
/* ?!? This seems wrong. */
bitmap_set_bit (consideration_allocno_bitmap,
ALLOCNO_NUM (conflict_a));

View File

@ -521,8 +521,7 @@ change_loop (ira_loop_tree_node_t node)
regno = ALLOCNO_REGNO (allocno);
if (ALLOCNO_CAP_MEMBER (allocno) != NULL)
continue;
used_p = bitmap_bit_p (used_regno_bitmap, regno);
bitmap_set_bit (used_regno_bitmap, regno);
used_p = !bitmap_set_bit (used_regno_bitmap, regno);
ALLOCNO_SOMEWHERE_RENAMED_P (allocno) = true;
if (! used_p)
continue;

View File

@ -1671,9 +1671,8 @@ mark_regno_live (int regno)
loop != current_loops->tree_root;
loop = loop_outer (loop))
bitmap_set_bit (&LOOP_DATA (loop)->regs_live, regno);
if (bitmap_bit_p (&curr_regs_live, regno))
if (!bitmap_set_bit (&curr_regs_live, regno))
return;
bitmap_set_bit (&curr_regs_live, regno);
change_pressure (regno, true);
}
@ -1681,9 +1680,8 @@ mark_regno_live (int regno)
static void
mark_regno_death (int regno)
{
if (! bitmap_bit_p (&curr_regs_live, regno))
if (! bitmap_clear_bit (&curr_regs_live, regno))
return;
bitmap_clear_bit (&curr_regs_live, regno);
change_pressure (regno, false);
}

View File

@ -2323,10 +2323,8 @@ write_symbol (struct lto_streamer_cache_d *cache,
gcc_assert (slot_num >= 0);
/* Avoid duplicate symbols. */
if (bitmap_bit_p (seen, slot_num))
if (!bitmap_set_bit (seen, slot_num))
return;
else
bitmap_set_bit (seen, slot_num);
if (DECL_EXTERNAL (t))
{

View File

@ -1180,9 +1180,8 @@ expr_expected_value_1 (tree type, tree op0, enum tree_code code, tree op1, bitma
def = SSA_NAME_DEF_STMT (op0);
/* If we were already here, break the infinite cycle. */
if (bitmap_bit_p (visited, SSA_NAME_VERSION (op0)))
if (!bitmap_set_bit (visited, SSA_NAME_VERSION (op0)))
return NULL;
bitmap_set_bit (visited, SSA_NAME_VERSION (op0));
if (gimple_code (def) == GIMPLE_PHI)
{

View File

@ -308,10 +308,9 @@ regstat_bb_compute_ri (unsigned int bb_index,
REG_BASIC_BLOCK (uregno) = REG_BLOCK_GLOBAL;
}
if (!bitmap_bit_p (live, uregno))
if (bitmap_set_bit (live, uregno))
{
/* This register is now live. */
bitmap_set_bit (live, uregno);
/* If we have seen this regno, then it has already been
processed correctly with the per insn increment. If

View File

@ -4632,11 +4632,8 @@ create_block_for_bookkeeping (edge e1, edge e2)
if (INSN_P (insn))
EXPR_ORIG_BB_INDEX (INSN_EXPR (insn)) = succ->index;
if (bitmap_bit_p (code_motion_visited_blocks, new_bb->index))
{
bitmap_set_bit (code_motion_visited_blocks, succ->index);
bitmap_clear_bit (code_motion_visited_blocks, new_bb->index);
}
if (bitmap_clear_bit (code_motion_visited_blocks, new_bb->index))
bitmap_set_bit (code_motion_visited_blocks, succ->index);
gcc_assert (LABEL_P (BB_HEAD (new_bb))
&& LABEL_P (BB_HEAD (succ)));
@ -5785,7 +5782,7 @@ track_scheduled_insns_and_blocks (rtx insn)
we still need to count it as an originator. */
bitmap_set_bit (current_originators, INSN_UID (insn));
if (!bitmap_bit_p (current_copies, INSN_UID (insn)))
if (!bitmap_clear_bit (current_copies, INSN_UID (insn)))
{
/* Note that original block needs to be rescheduled, as we pulled an
instruction out of it. */
@ -5794,8 +5791,6 @@ track_scheduled_insns_and_blocks (rtx insn)
else if (INSN_UID (insn) < first_emitted_uid && !DEBUG_INSN_P (insn))
num_insns_scheduled++;
}
else
bitmap_clear_bit (current_copies, INSN_UID (insn));
/* For instructions we must immediately remove insn from the
stream, so subsequent update_data_sets () won't include this
@ -7498,7 +7493,7 @@ sel_sched_region_1 (void)
continue;
}
if (bitmap_bit_p (blocks_to_reschedule, bb->index))
if (bitmap_clear_bit (blocks_to_reschedule, bb->index))
{
flist_tail_init (new_fences);
@ -7507,8 +7502,6 @@ sel_sched_region_1 (void)
/* Mark BB as head of the new ebb. */
bitmap_set_bit (forced_ebb_heads, bb->index);
bitmap_clear_bit (blocks_to_reschedule, bb->index);
gcc_assert (fences == NULL);
init_fences (bb_note (bb));

View File

@ -2338,11 +2338,8 @@ expand_case (gimple stmt)
/* If we have not seen this label yet, then increase the
number of unique case node targets seen. */
lab = label_rtx (n->code_label);
if (!bitmap_bit_p (label_bitmap, CODE_LABEL_NUMBER (lab)))
{
bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab));
uniq++;
}
if (bitmap_set_bit (label_bitmap, CODE_LABEL_NUMBER (lab)))
uniq++;
}
BITMAP_FREE (label_bitmap);

View File

@ -847,9 +847,8 @@ emit_eh_dispatch (gimple_seq *seq, eh_region region)
static void
note_eh_region_may_contain_throw (eh_region region)
{
while (!bitmap_bit_p (eh_region_may_contain_throw_map, region->index))
while (bitmap_set_bit (eh_region_may_contain_throw_map, region->index))
{
bitmap_set_bit (eh_region_may_contain_throw_map, region->index);
region = region->outer;
if (region == NULL)
break;

View File

@ -960,11 +960,10 @@ prune_unused_phi_nodes (bitmap phis, bitmap kills, bitmap uses)
}
/* If the phi node is already live, there is nothing to do. */
if (bitmap_bit_p (live_phis, p))
if (!bitmap_set_bit (live_phis, p))
continue;
/* Mark the phi as live, and add the new uses to the worklist. */
bitmap_set_bit (live_phis, p);
/* Add the new uses to the worklist. */
def_bb = BASIC_BLOCK (p);
FOR_EACH_EDGE (e, ei, def_bb->preds)
{

View File

@ -519,11 +519,9 @@ mark_nodes_having_upstream_mem_writes (struct graph *rdg)
for (i = 0; VEC_iterate (int, nodes, i, x); i++)
{
if (bitmap_bit_p (seen, x))
if (!bitmap_set_bit (seen, x))
continue;
bitmap_set_bit (seen, x);
if (RDG_MEM_WRITE_STMT (rdg, x)
|| predecessor_has_mem_write (rdg, &(rdg->vertices[x]))
/* In anti dependences the read should occur before
@ -644,12 +642,11 @@ rdg_flag_vertex (struct graph *rdg, int v, bitmap partition, bitmap loops,
{
struct loop *loop;
if (bitmap_bit_p (partition, v))
if (!bitmap_set_bit (partition, v))
return;
loop = loop_containing_stmt (RDG_STMT (rdg, v));
bitmap_set_bit (loops, loop->num);
bitmap_set_bit (partition, v);
if (rdg_cannot_recompute_vertex_p (rdg, v))
{
@ -730,11 +727,8 @@ rdg_flag_loop_exits (struct graph *rdg, bitmap loops, bitmap partition,
part_has_writes);
EXECUTE_IF_SET_IN_BITMAP (new_loops, 0, i, bi)
if (!bitmap_bit_p (loops, i))
{
bitmap_set_bit (loops, i);
collect_condition_stmts (get_loop (i), &conds);
}
if (bitmap_set_bit (loops, i))
collect_condition_stmts (get_loop (i), &conds);
BITMAP_FREE (new_loops);
}
@ -864,14 +858,13 @@ rdg_build_components (struct graph *rdg, VEC (int, heap) *starting_vertices,
{
int c = rdg->vertices[v].component;
if (!bitmap_bit_p (saved_components, c))
if (bitmap_set_bit (saved_components, c))
{
rdgc x = XCNEW (struct rdg_component);
x->num = c;
x->vertices = all_components[c];
VEC_safe_push (rdgc, heap, *components, x);
bitmap_set_bit (saved_components, c);
}
}

View File

@ -889,9 +889,8 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
if (osi->pass == 0)
{
if (! bitmap_bit_p (osi->visited, varno))
if (bitmap_set_bit (osi->visited, varno))
{
bitmap_set_bit (osi->visited, varno);
object_sizes[object_size_type][varno]
= (object_size_type & 2) ? -1 : 0;
}

View File

@ -4233,11 +4233,8 @@ convert_callers (struct cgraph_node *node, tree old_decl,
}
for (cs = node->callers; cs; cs = cs->next_caller)
if (!bitmap_bit_p (recomputed_callers, cs->caller->uid))
{
compute_inline_parameters (cs->caller);
bitmap_set_bit (recomputed_callers, cs->caller->uid);
}
if (bitmap_set_bit (recomputed_callers, cs->caller->uid))
compute_inline_parameters (cs->caller);
BITMAP_FREE (recomputed_callers);
current_function_decl = old_cur_fndecl;

View File

@ -725,11 +725,8 @@ live_track_add_partition (live_track_p ptr, int partition)
root = basevar_index (ptr->map, partition);
/* If this base var wasn't live before, it is now. Clear the element list
since it was delayed until needed. */
if (!bitmap_bit_p (ptr->live_base_var, root))
{
bitmap_set_bit (ptr->live_base_var, root);
bitmap_clear (ptr->live_base_partitions[root]);
}
if (bitmap_set_bit (ptr->live_base_var, root))
bitmap_clear (ptr->live_base_partitions[root]);
bitmap_set_bit (ptr->live_base_partitions[root], partition);
}

View File

@ -377,11 +377,8 @@ mark_all_vars_used_1 (tree *tp, int *walk_subtrees, void *data)
eliminated as unused. */
if (TREE_CODE (t) == VAR_DECL)
{
if (data != NULL && bitmap_bit_p ((bitmap) data, DECL_UID (t)))
{
bitmap_clear_bit ((bitmap) data, DECL_UID (t));
mark_all_vars_used (&DECL_INITIAL (t), data);
}
if (data != NULL && bitmap_clear_bit ((bitmap) data, DECL_UID (t)))
mark_all_vars_used (&DECL_INITIAL (t), data);
set_is_used (t);
}
/* remove_unused_scope_block_p requires information about labels

View File

@ -855,9 +855,8 @@ bitmap_set_replace_value (bitmap_set_t set, unsigned int lookfor,
exprset = VEC_index (bitmap_set_t, value_expressions, lookfor);
FOR_EACH_EXPR_ID_IN_SET (exprset, i, bi)
{
if (bitmap_bit_p (&set->expressions, i))
if (bitmap_clear_bit (&set->expressions, i))
{
bitmap_clear_bit (&set->expressions, i);
bitmap_set_bit (&set->expressions, get_expression_id (expr));
return;
}