mirror of
git://gcc.gnu.org/git/gcc.git
synced 2025-03-22 10:51:19 +08:00
c-common.c, [...]: Fix comment typos.
* c-common.c, cfgexpand.c, cgraphunit.c, defaults.h, et-forest.c, expr.c, gimplify.c, global.c, gthr-lynx.h, hard-reg-set.h, modulo-sched.c, optabs.c, postreload-gcse.c, tree-data-ref.c, tree-flow.h, tree-if-conv.c, tree-inline.c, tree-sra.c, tree-ssa-loop-im.c, tree-ssa-loop-ivopts.c, tree-ssa-loop-niter.c, tree-ssa-operands.c, tree-ssa-operands.h, tree-ssa-propagate.c, tree-ssa-propagate.h, tree-ssa-threadupdate.c, value-prof.c, vec.c, vec.h: Fix comment typos. Follow spelling conventions. From-SVN: r87104
This commit is contained in:
parent
2addf92620
commit
2a7e31df70
@ -1,3 +1,15 @@
|
||||
2004-09-05 Kazu Hirata <kazu@cs.umass.edu>
|
||||
|
||||
* c-common.c, cfgexpand.c, cgraphunit.c, defaults.h,
|
||||
et-forest.c, expr.c, gimplify.c, global.c, gthr-lynx.h,
|
||||
hard-reg-set.h, modulo-sched.c, optabs.c, postreload-gcse.c,
|
||||
tree-data-ref.c, tree-flow.h, tree-if-conv.c, tree-inline.c,
|
||||
tree-sra.c, tree-ssa-loop-im.c, tree-ssa-loop-ivopts.c,
|
||||
tree-ssa-loop-niter.c, tree-ssa-operands.c,
|
||||
tree-ssa-operands.h, tree-ssa-propagate.c,
|
||||
tree-ssa-propagate.h, tree-ssa-threadupdate.c, value-prof.c,
|
||||
vec.c, vec.h: Fix comment typos. Follow spelling conventions.
|
||||
|
||||
2004-09-05 Diego Novillo <dnovillo@redhat.com>
|
||||
|
||||
* tree-if-conv.c (gate_tree_if_conversion): Enable only if the
|
||||
|
@ -1444,7 +1444,7 @@ check_case_value (tree value)
|
||||
type (ie. before the default conversion to int) of the switch testing
|
||||
expression.
|
||||
TYPE is the promoted type of the testing expression, and ORIG_TYPE is
|
||||
the type before promiting it. CASE_LOW_P is a pointer to the lower
|
||||
the type before promoting it. CASE_LOW_P is a pointer to the lower
|
||||
bound of the case label, and CASE_HIGH_P is the upper bound or NULL
|
||||
if the case is not a case range.
|
||||
The caller has to make sure that we are not called with NULL for
|
||||
|
@ -606,7 +606,7 @@ defer_stack_allocation (tree var, bool toplevel)
|
||||
}
|
||||
|
||||
/* A subroutine of expand_used_vars. Expand one variable according to
|
||||
its flavour. Variables to be placed on the stack are not actually
|
||||
its flavor. Variables to be placed on the stack are not actually
|
||||
expanded yet, merely recorded. */
|
||||
|
||||
static void
|
||||
|
@ -1261,7 +1261,7 @@ cgraph_decide_recursive_inlining (struct cgraph_node *node)
|
||||
max_depth = PARAM_VALUE (PARAM_MAX_INLINE_RECURSIVE_DEPTH);
|
||||
}
|
||||
|
||||
/* Make sure that function is small enought to be considered for inlining. */
|
||||
/* Make sure that function is small enough to be considered for inlining. */
|
||||
if (!max_depth
|
||||
|| cgraph_estimate_size_after_inlining (1, node, node) >= limit)
|
||||
return;
|
||||
|
@ -627,7 +627,7 @@ do { fputs (integer_asm_op (POINTER_SIZE / BITS_PER_UNIT, TRUE), FILE); \
|
||||
#define FLOAT_LIB_COMPARE_RETURNS_BOOL(MODE, COMPARISON) false
|
||||
#endif
|
||||
|
||||
/* True if the targets integer-comparision fucntions return { 0, 1, 2
|
||||
/* True if the targets integer-comparision functions return { 0, 1, 2
|
||||
} to indicate { <, ==, > }. False if { -1, 0, 1 } is used
|
||||
instead. The libgcc routines are biased. */
|
||||
#ifndef TARGET_LIB_INT_CMP_BIASED
|
||||
|
@ -137,7 +137,7 @@ et_recomp_min (struct et_occ *occ)
|
||||
}
|
||||
|
||||
#ifdef DEBUG_ET
|
||||
/* Checks whether neighbourhood of OCC seems sane. */
|
||||
/* Checks whether neighborhood of OCC seems sane. */
|
||||
|
||||
static void
|
||||
et_check_occ_sanity (struct et_occ *occ)
|
||||
|
@ -3596,7 +3596,7 @@ expand_assignment (tree to, tree from, int want_value)
|
||||
if (bitsize >= GET_MODE_BITSIZE (GET_MODE (str_rtx)))
|
||||
break;
|
||||
|
||||
/* We can't handle fields split accross multiple entities. */
|
||||
/* We can't handle fields split across multiple entities. */
|
||||
if (bitpos1 + bitsize > GET_MODE_BITSIZE (GET_MODE (str_rtx)))
|
||||
break;
|
||||
|
||||
@ -6120,7 +6120,7 @@ expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
|
||||
|
||||
case IMAGPART_EXPR:
|
||||
/* The imaginary part of the complex number is always second.
|
||||
The expresion is therefore always offset by the size of the
|
||||
The expression is therefore always offset by the size of the
|
||||
scalar type. */
|
||||
offset = 0;
|
||||
bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
|
||||
|
@ -1008,7 +1008,7 @@ gimplify_decl_expr (tree *stmt_p)
|
||||
gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
|
||||
gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
|
||||
|
||||
/* All occurences of this decl in final gimplified code will be
|
||||
/* All occurrences of this decl in final gimplified code will be
|
||||
replaced by indirection. Setting DECL_VALUE_EXPR does two
|
||||
things: First, it lets the rest of the gimplifier know what
|
||||
replacement to use. Second, it lets the debug info know
|
||||
@ -3677,7 +3677,7 @@ gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
|
||||
|
||||
case CONST_DECL:
|
||||
/* If we require an lvalue, such as for ADDR_EXPR, retain the
|
||||
CONST_DECL node. Otherwise the decl is replacable by its
|
||||
CONST_DECL node. Otherwise the decl is replaceable by its
|
||||
value. */
|
||||
/* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
|
||||
if (fallback & fb_lvalue)
|
||||
|
@ -2176,7 +2176,7 @@ check_earlyclobber (rtx insn)
|
||||
}
|
||||
}
|
||||
|
||||
/* The function returns true if register classes C1 and C2 inetrsect. */
|
||||
/* The function returns true if register classes C1 and C2 intersect. */
|
||||
|
||||
static bool
|
||||
regclass_intersect (enum reg_class c1, enum reg_class c2)
|
||||
|
@ -45,7 +45,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
weak. If the multi-threaded application includes iostream.h,
|
||||
gthr-posix.h is included and pthread_create will be defined weak.
|
||||
If pthead_create is weak its defining module in libc is not
|
||||
necessarly included in the link and the symbol is resolved to zero.
|
||||
necessarily included in the link and the symbol is resolved to zero.
|
||||
Therefore the first call to it will crash.
|
||||
|
||||
Since -mthreads is a multilib switch on LynxOS we know that at this
|
||||
|
@ -35,7 +35,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
the same format as a HARD_REG_SET. To help make sure this is true,
|
||||
we only try the widest fast integer mode (HOST_WIDEST_FAST_INT)
|
||||
instead of all the smaller types. This approach loses only if
|
||||
there are avery few registers and then only in the few cases where
|
||||
there are very few registers and then only in the few cases where
|
||||
we have an array of HARD_REG_SETs, so it needn't be as complex as
|
||||
it used to be. */
|
||||
|
||||
|
@ -1938,7 +1938,7 @@ ps_insn_find_column (partial_schedule_ptr ps, ps_insn_ptr ps_i,
|
||||
row = SMODULO (ps_i->cycle, ps->ii);
|
||||
|
||||
/* Find the first must follow and the last must precede
|
||||
and insert the node immediatly after the must precede
|
||||
and insert the node immediately after the must precede
|
||||
but make sure that it there is no must follow after it. */
|
||||
for (next_ps_i = ps->rows[row];
|
||||
next_ps_i;
|
||||
@ -2007,7 +2007,7 @@ ps_insn_advance_column (partial_schedule_ptr ps, ps_insn_ptr ps_i,
|
||||
if (TEST_BIT (must_follow, next_node->cuid))
|
||||
return false;
|
||||
|
||||
/* Advace PS_I over its next_in_row in the doubly linked list. */
|
||||
/* Advance PS_I over its next_in_row in the doubly linked list. */
|
||||
prev = ps_i->prev_in_row;
|
||||
next = ps_i->next_in_row;
|
||||
|
||||
|
@ -532,7 +532,7 @@ expand_doubleword_shift_condmove (enum machine_mode op1_mode, optab binoptab,
|
||||
masked by it and shifts in the range [BITS_PER_WORD, SHIFT_MASK) will
|
||||
fill with zeros or sign bits as appropriate.
|
||||
|
||||
If SHIFT_MASK is BITS_PER_WORD - 1, this routine will synthesise
|
||||
If SHIFT_MASK is BITS_PER_WORD - 1, this routine will synthesize
|
||||
a doubleword shift whose equivalent mask is BITS_PER_WORD * 2 - 1.
|
||||
Doing this preserves semantics required by SHIFT_COUNT_TRUNCATED.
|
||||
In all other cases, shifts by values outside [0, BITS_PER_UNIT * 2)
|
||||
|
@ -86,7 +86,7 @@ static struct
|
||||
|
||||
/* We need to keep a hash table of expressions. The table entries are of
|
||||
type 'struct expr', and for each expression there is a single linked
|
||||
list of occurences. */
|
||||
list of occurrences. */
|
||||
|
||||
/* The table itself. */
|
||||
static htab_t expr_table;
|
||||
@ -107,7 +107,7 @@ struct expr
|
||||
static struct obstack expr_obstack;
|
||||
|
||||
/* Occurrence of an expression.
|
||||
There is at most one occurence per basic block. If a pattern appears
|
||||
There is at most one occurrence per basic block. If a pattern appears
|
||||
more than once, the last appearance is used. */
|
||||
|
||||
struct occr
|
||||
@ -422,7 +422,7 @@ lookup_expr_in_table (rtx pat)
|
||||
}
|
||||
|
||||
|
||||
/* Dump all expressions and occurences that are currently in the
|
||||
/* Dump all expressions and occurrences that are currently in the
|
||||
expression hash table to FILE. */
|
||||
|
||||
/* This helper is called via htab_traverse. */
|
||||
|
@ -44,7 +44,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
- polyhedron dependence
|
||||
or with the chains of recurrences based representation,
|
||||
|
||||
- to define a knowledge base for storing the data dependeces
|
||||
- to define a knowledge base for storing the data dependences
|
||||
information,
|
||||
|
||||
- to define an interface to access this data.
|
||||
|
@ -605,7 +605,7 @@ struct tree_niter_desc
|
||||
to false, then the other fields in this structure
|
||||
should not be used; there is no guarantee that they
|
||||
will be correct. */
|
||||
tree may_be_zero; /* The booleand expression. If it evaluates to true,
|
||||
tree may_be_zero; /* The boolean expression. If it evaluates to true,
|
||||
the loop will exit in the first iteration (i.e.
|
||||
its latch will not be executed), even if the niter
|
||||
field says otherwise. */
|
||||
|
@ -949,7 +949,7 @@ ifc_temp_var (tree type, tree exp)
|
||||
var = create_tmp_var (type, name);
|
||||
add_referenced_tmp_var (var);
|
||||
|
||||
/* Build new statement to assigne EXP to new variable. */
|
||||
/* Build new statement to assign EXP to new variable. */
|
||||
stmt = build (MODIFY_EXPR, type, var, exp);
|
||||
|
||||
/* Get SSA name for the new variable and set make new statement
|
||||
|
@ -1922,7 +1922,7 @@ walk_type_fields (tree type, walk_tree_fn func, void *data, void *htab)
|
||||
case POINTER_TYPE:
|
||||
case REFERENCE_TYPE:
|
||||
/* We have to worry about mutually recursive pointers. These can't
|
||||
be written in C. They can in Ada. It's pathlogical, but
|
||||
be written in C. They can in Ada. It's pathological, but
|
||||
there's an ACATS test (c38102a) that checks it. Deal with this
|
||||
by checking if we're pointing to another pointer, that one
|
||||
points to another pointer, that one does too, and we have no htab.
|
||||
|
@ -398,7 +398,7 @@ sra_elt_hash (const void *x)
|
||||
|
||||
/* Take into account everything back up the chain. Given that chain
|
||||
lengths are rarely very long, this should be acceptable. If we
|
||||
truely identify this as a performance problem, it should work to
|
||||
truly identify this as a performance problem, it should work to
|
||||
hash the pointer value "e->parent". */
|
||||
for (p = e->parent; p ; p = p->parent)
|
||||
h = (h * 65521) ^ sra_hash_tree (p->element);
|
||||
|
@ -206,7 +206,7 @@ movement_possibility (tree stmt)
|
||||
}
|
||||
|
||||
/* Suppose that operand DEF is used inside the LOOP. Returns the outermost
|
||||
loop to that we could move the expresion using DEF if it did not have
|
||||
loop to that we could move the expression using DEF if it did not have
|
||||
other operands, i.e. the outermost loop enclosing LOOP in that the value
|
||||
of DEF is invariant. */
|
||||
|
||||
@ -587,7 +587,7 @@ loop_commit_inserts (void)
|
||||
}
|
||||
|
||||
/* Hoist the statements in basic block BB out of the loops prescribed by
|
||||
data stored in LIM_DATA structres associated with each statement. Callback
|
||||
data stored in LIM_DATA structures associated with each statement. Callback
|
||||
for walk_dominator_tree. */
|
||||
|
||||
static void
|
||||
@ -641,7 +641,7 @@ move_computations_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
|
||||
}
|
||||
|
||||
/* Hoist the statements out of the loops prescribed by data stored in
|
||||
LIM_DATA structres associated with each statement.*/
|
||||
LIM_DATA structures associated with each statement.*/
|
||||
|
||||
static void
|
||||
move_computations (void)
|
||||
@ -698,7 +698,7 @@ may_move_till (tree ref, tree *index, void *data)
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Forces statements definining (invariant) SSA names in expression EXPR to be
|
||||
/* Forces statements defining (invariant) SSA names in expression EXPR to be
|
||||
moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */
|
||||
|
||||
static void
|
||||
@ -1023,7 +1023,7 @@ rewrite_mem_refs (tree tmp_var, struct mem_ref *mem_refs)
|
||||
}
|
||||
|
||||
/* Records request for store motion of memory reference REF from LOOP.
|
||||
MEM_REFS is the list of occurences of the reference REF inside LOOP;
|
||||
MEM_REFS is the list of occurrences of the reference REF inside LOOP;
|
||||
these references are rewritten by a new temporary variable.
|
||||
Exits from the LOOP are stored in EXITS, there are N_EXITS of them.
|
||||
The initialization of the temporary variable is put to the preheader
|
||||
|
@ -228,7 +228,7 @@ struct ivopts_data
|
||||
#define CONSIDER_ALL_CANDIDATES_BOUND \
|
||||
((unsigned) PARAM_VALUE (PARAM_IV_CONSIDER_ALL_CANDIDATES_BOUND))
|
||||
|
||||
/* If there are more iv occurences, we just give up (it is quite unlikely that
|
||||
/* If there are more iv occurrences, we just give up (it is quite unlikely that
|
||||
optimizing such a loop would help, and it would take ages). */
|
||||
|
||||
#define MAX_CONSIDERED_USES \
|
||||
|
@ -190,7 +190,7 @@ number_of_iterations_cond (tree type, tree base0, tree step0,
|
||||
/* We want to take care only of <=; this is easy,
|
||||
as in cases the overflow would make the transformation unsafe the loop
|
||||
does not roll. Seemingly it would make more sense to want to take
|
||||
care of <, as NE is more simmilar to it, but the problem is that here
|
||||
care of <, as NE is more similar to it, but the problem is that here
|
||||
the transformation would be more difficult due to possibly infinite
|
||||
loops. */
|
||||
if (zero_p (step0))
|
||||
@ -266,7 +266,7 @@ number_of_iterations_cond (tree type, tree base0, tree step0,
|
||||
obviously if the test for overflow during that transformation
|
||||
passed, we cannot overflow here. Most importantly any
|
||||
loop with sharp end condition and step 1 falls into this
|
||||
cathegory, so handling this case specially is definitely
|
||||
category, so handling this case specially is definitely
|
||||
worth the troubles. */
|
||||
may_xform = boolean_true_node;
|
||||
}
|
||||
|
@ -51,7 +51,7 @@ Boston, MA 02111-1307, USA. */
|
||||
get_stmt_operands() in the primary entry point.
|
||||
|
||||
The operand tree is the parsed by the various get_* routines which look
|
||||
through the stmt tree for the occurence of operands which may be of
|
||||
through the stmt tree for the occurrence of operands which may be of
|
||||
interest, and calls are made to the append_* routines whenever one is
|
||||
found. There are 5 of these routines, each representing one of the
|
||||
5 types of operands. Defs, Uses, Virtual Uses, Virtual May Defs, and
|
||||
@ -1686,9 +1686,9 @@ copy_virtual_operands (tree dst, tree src)
|
||||
|
||||
|
||||
/* Specifically for use in DOM's expression analysis. Given a store, we
|
||||
create an artifical stmt which looks like a load from the store, this can
|
||||
create an artificial stmt which looks like a load from the store, this can
|
||||
be used to eliminate redundant loads. OLD_OPS are the operands from the
|
||||
store stmt, and NEW_STMT is the new load which reperesent a load of the
|
||||
store stmt, and NEW_STMT is the new load which represents a load of the
|
||||
values stored. */
|
||||
|
||||
void
|
||||
|
@ -188,7 +188,7 @@ extern void create_ssa_artficial_load_stmt (stmt_operands_p, tree);
|
||||
|
||||
|
||||
/* This structure is used in the operand iterator loops. It contains the
|
||||
items required to determine which operand is retreived next. During
|
||||
items required to determine which operand is retrieved next. During
|
||||
optimization, this structure is scalarized, and any unused fields are
|
||||
optimized away, resulting in little overhead. */
|
||||
|
||||
@ -217,7 +217,7 @@ typedef struct ssa_operand_iterator_d
|
||||
#define SSA_OP_VUSE 0x04 /* VUSE operands. */
|
||||
#define SSA_OP_VMAYUSE 0x08 /* USE portion of V_MAY_DEFS. */
|
||||
#define SSA_OP_VMAYDEF 0x10 /* DEF portion of V_MAY_DEFS. */
|
||||
#define SSA_OP_VMUSTDEF 0x20 /* V_MUST_DEF defintions. */
|
||||
#define SSA_OP_VMUSTDEF 0x20 /* V_MUST_DEF definitions. */
|
||||
|
||||
/* These are commonly grouped operand flags. */
|
||||
#define SSA_OP_VIRTUAL_USES (SSA_OP_VUSE | SSA_OP_VMAYUSE)
|
||||
|
@ -68,7 +68,7 @@
|
||||
|
||||
SSA_PROP_INTERESTING: S produces a value that can be computed
|
||||
at compile time. Its result can be propagated into the
|
||||
statements that feed from S. Furhtermore, if S is a
|
||||
statements that feed from S. Furthermore, if S is a
|
||||
conditional jump, only the edge known to be taken is added
|
||||
to the work list. Edges that are known not to execute are
|
||||
never simulated.
|
||||
|
@ -40,7 +40,7 @@ enum ssa_prop_result {
|
||||
returned by SSA_PROP_VISIT_STMT should be added to
|
||||
INTERESTING_SSA_EDGES. If the statement being visited is a
|
||||
conditional jump, SSA_PROP_VISIT_STMT should indicate which edge
|
||||
out of the basic block should be marked exectuable. */
|
||||
out of the basic block should be marked executable. */
|
||||
SSA_PROP_INTERESTING,
|
||||
|
||||
/* The statement produces a varying (i.e., useless) value and
|
||||
|
@ -208,7 +208,7 @@ create_block_for_threading (basic_block bb, struct redirection_data *rd)
|
||||
to update dominator tree and SSA graph after such changes.
|
||||
|
||||
The key to keeping the SSA graph update managable is to duplicate
|
||||
the side effects occuring in BB so that those side effects still
|
||||
the side effects occurring in BB so that those side effects still
|
||||
occur on the paths which bypass BB after redirecting edges.
|
||||
|
||||
We accomplish this by creating duplicates of BB and arranging for
|
||||
|
@ -62,7 +62,7 @@ static struct value_prof_hooks *value_prof_hooks;
|
||||
-- list of counters starting from the first one. */
|
||||
|
||||
/* For speculative prefetching, the range in that we do not prefetch (because
|
||||
we assume that it will be in cache anyway). The assymetry between min and
|
||||
we assume that it will be in cache anyway). The asymmetry between min and
|
||||
max range is trying to reflect the fact that the sequential prefetching
|
||||
of the data is commonly done directly by hardware. Nevertheless, these
|
||||
values are just a guess and should of course be target-specific. */
|
||||
@ -971,7 +971,7 @@ speculative_prefetching_transform (rtx insn)
|
||||
|
||||
/* We require that count is at least half of all; this means
|
||||
that for the transformation to fire the value must be constant
|
||||
at least 50% of time (and 75% gives the garantee of usage). */
|
||||
at least 50% of time (and 75% gives the guarantee of usage). */
|
||||
if (!rtx_equal_p (address, value) || 2 * count < all)
|
||||
return false;
|
||||
|
||||
|
@ -49,7 +49,7 @@ vec_p_reserve (void *vec, int reserve MEM_STAT_DECL)
|
||||
/* Ensure there are at least RESERVE free slots in VEC, if RESERVE >=
|
||||
0. If RESERVE < 0, increase the current allocation exponentially.
|
||||
VEC can be NULL, in which case a new vector is created. The
|
||||
vector's trailing array is at VEC_OFFSET offset and consistes of
|
||||
vector's trailing array is at VEC_OFFSET offset and consists of
|
||||
ELT_SIZE sized elements. */
|
||||
|
||||
void *
|
||||
|
@ -30,7 +30,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
interoperate with the GTY machinery.
|
||||
|
||||
Because of the different behaviour of objects and of pointers to
|
||||
objects, there are two flavours. One to deal with a vector of
|
||||
objects, there are two flavors. One to deal with a vector of
|
||||
pointers to objects, and one to deal with a vector of objects
|
||||
themselves. Both of these pass pointers to objects around -- in
|
||||
the former case the pointers are stored into the vector and in the
|
||||
@ -277,7 +277,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
|
||||
void VEC_T_ordered_remove (VEC(T) *v, unsigned ix); // Object
|
||||
|
||||
Remove an element from the IXth position of V. Ordering of
|
||||
remaining elements is preserverd. For pointer vectors returns the
|
||||
remaining elements is preserved. For pointer vectors returns the
|
||||
removed object. This is an O(N) operation due to a memmove. */
|
||||
|
||||
#define VEC_ordered_remove(TDEF,V,I) \
|
||||
|
Loading…
x
Reference in New Issue
Block a user