c-typeck.c: Fix comment typos.

* c-typeck.c: Fix comment typos.
	* calls.c: Likewise.
	* cfgcleanup.c: Likewise.
	* cgraph.h: Likewise.
	* dwarfout.c: Likewise.
	* emit-rtl.c: Likewise.
	* explow.c: Likewise.
	* expr.c: Likewise.
	* flow.c: Likewise.
	* function.c: Likewise.
	* gcc.c: Likewise.
	* ggc-zone.c: Likewise.
	* ifcvt.c: Likewise.
	* local-alloc.c: Likewise.
	* predict.c: Likewise.
	* pretty-print.c: Likewise.
	* profile.c: Likewise.
	* ra-colorize.c: Likewise.
	* sched-vis.c: Likewise.
	* stor-layout.c: Likewise.

From-SVN: r74594
This commit is contained in:
Kazu Hirata 2003-12-13 04:11:23 +00:00
parent 7ecd45764f
commit ba228239d2
20 changed files with 27 additions and 27 deletions

View File

@ -5642,7 +5642,7 @@ output_pending_init_elements (int all)
retry:
/* Look thru the whole pending tree.
/* Look through the whole pending tree.
If we find an element that should be output now,
output it. Otherwise, set NEXT to the element
that comes first among those still pending. */

View File

@ -290,7 +290,7 @@ prepare_call_address (rtx funexp, tree fndecl, rtx *call_fusage,
/* Get possible static chain value for nested function in C. */
static_chain_value = lookup_static_chain (fndecl);
/* Make a valid memory address and copy constants thru pseudo-regs,
/* Make a valid memory address and copy constants through pseudo-regs,
but not for a constant address if -fno-function-cse. */
if (GET_CODE (funexp) != SYMBOL_REF)
/* If we are using registers for parameters, force the

View File

@ -174,7 +174,7 @@ try_simplify_condjump (basic_block cbranch_block)
end = BB_END (jump_block);
/* Deleting a block may produce unreachable code warning even when we are
not deleting anything live. Supress it by moving all the line number
not deleting anything live. Suppress it by moving all the line number
notes out of the block. */
for (insn = BB_HEAD (jump_block); insn != NEXT_INSN (BB_END (jump_block));
insn = next)

View File

@ -75,7 +75,7 @@ struct cgraph_rtl_info GTY(())
};
/* The cgraph data strutcture.
/* The cgraph data structure.
Each function decl has assigned cgraph_node listing callees and callers. */
struct cgraph_node GTY((chain_next ("%h.next"), chain_prev ("%h.previous")))
@ -122,7 +122,7 @@ struct cgraph_edge GTY(())
bool inline_call;
};
/* The cgraph_varpool data strutcture.
/* The cgraph_varpool data structure.
Each static variable decl has assigned cgraph_varpool_node. */
struct cgraph_varpool_node GTY(())

View File

@ -2074,7 +2074,7 @@ output_mem_loc_descriptor (rtx rtl)
which contains the given subreg. */
rtl = SUBREG_REG (rtl);
/* Drop thru. */
/* Drop through. */
case REG:
@ -2157,7 +2157,7 @@ output_loc_descriptor (rtx rtl)
which contains the given subreg. */
rtl = SUBREG_REG (rtl);
/* Drop thru. */
/* Drop through. */
case REG:
ASM_OUTPUT_DWARF_STACK_OP (asm_out_file, OP_REG);

View File

@ -2596,7 +2596,7 @@ verify_rtx_sharing (rtx orig, rtx insn)
return;
}
/* Go through all the RTL insn bodies and chec that there is no inexpected
/* Go through all the RTL insn bodies and check that there is no unexpected
sharing in between the subexpressions. */
void

View File

@ -444,7 +444,7 @@ memory_address (enum machine_mode mode, rtx x)
x = convert_memory_address (Pmode, x);
/* By passing constant addresses thru registers
/* By passing constant addresses through registers
we get a chance to cse them. */
if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
x = force_reg (Pmode, x);

View File

@ -9793,7 +9793,7 @@ do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
if (mode != Pmode)
index = convert_to_mode (Pmode, index, 1);
/* Don't let a MEM slip thru, because then INDEX that comes
/* Don't let a MEM slip through, because then INDEX that comes
out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
and break_out_memory_refs will go to work on it and mess it up. */
#ifdef PIC_CASE_VECTOR_ADDRESS

View File

@ -2462,7 +2462,7 @@ mark_set_regs (struct propagate_block_info *pbi, rtx x, rtx insn)
case SET:
if (GET_CODE (XEXP (x, 1)) == ASM_OPERANDS)
flags |= PROP_ASM_SCAN;
/* Fall thru */
/* Fall through */
case CLOBBER:
mark_set_1 (pbi, code, SET_DEST (x), cond, insn, flags);
return;
@ -2497,7 +2497,7 @@ mark_set_regs (struct propagate_block_info *pbi, rtx x, rtx insn)
mark_set:
if (GET_CODE (XEXP (sub, 1)) == ASM_OPERANDS)
flags |= PROP_ASM_SCAN;
/* Fall thru */
/* Fall through */
case CLOBBER:
mark_clob:
mark_set_1 (pbi, code, SET_DEST (sub), cond, insn, flags);

View File

@ -6733,7 +6733,7 @@ expand_function_start (tree subr, int parms_have_cleanups)
tem = decl_function_context (tem);
if (tem == 0)
break;
/* Chain thru stack frames, assuming pointer to next lexical frame
/* Chain through stack frames, assuming pointer to next lexical frame
is found at the place we always store it. */
#ifdef FRAME_GROWS_DOWNWARD
last_ptr = plus_constant (last_ptr,

View File

@ -2687,7 +2687,7 @@ execute (void)
{
/* verbose_only_flag should act as if the spec was
executed, so increment execution_count before
returning. Theis prevent spurious warnings about
returning. This prevents spurious warnings about
unused linker input files, etc. */
execution_count++;
return 0;

View File

@ -89,7 +89,7 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
If we track inter-zone pointers, we can mark single zones at a
time.
If we have a zone where we guarantee no inter-zone pointers, we
could mark that zone seperately.
could mark that zone separately.
The garbage zone should not be marked, and we should return 1 in
ggc_set_mark for any object in the garbage zone, which cuts off
marking quickly. */

View File

@ -2962,8 +2962,8 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb,
if (HAVE_conditional_execution)
{
/* In the conditional execution case, we have things easy. We know
the condition is reversible. We don't have to check life info,
becase we're going to conditionally execute the code anyway.
the condition is reversible. We don't have to check life info
because we're going to conditionally execute the code anyway.
All that's left is making sure the insns involved can actually
be predicated. */

View File

@ -108,7 +108,7 @@ struct qty
/* Number of words needed to hold the data in given quantity.
This depends on its machine mode. It is used for these purposes:
1. It is used in computing the relative importances of qtys,
1. It is used in computing the relative importance of qtys,
which determines the order in which we look for regs for them.
2. It is used in rules that prevent tying several registers of
different sizes in a way that is geometrically impossible

View File

@ -878,7 +878,7 @@ typedef struct block_info_def
/* To keep queue of basic blocks to process. */
basic_block next;
/* True if block needs to be visited in prop_freqency. */
/* True if block needs to be visited in propagate_freq. */
int tovisit:1;
/* Number of predecessors we need to visit first. */

View File

@ -472,7 +472,7 @@ pp_base_last_position_in_text (const pretty_printer *pp)
}
/* Return the amount of characters PRETTY-PRINTER can accept to
make a full line. Meaningfull only in line-wrapping mode. */
make a full line. Meaningful only in line-wrapping mode. */
int
pp_base_remaining_character_count_for_line (pretty_printer *pp)
{

View File

@ -502,7 +502,7 @@ compute_branch_probabilities (void)
}
for (e = bb->succ; e; e = e->succ_next)
{
/* Function may return twice in the cased the called fucntion is
/* Function may return twice in the cased the called function is
setjmp or calls fork, but we can't represent this by extra
edge from the entry, since extra edge from the exit is
already present. We get negative frequency from the entry

View File

@ -618,7 +618,7 @@ ok (struct web *target, struct web *source)
{
/* The main webs do _not_ conflict, only some parts of both. This
means, that 4 is possibly true, so we need to check this too.
For this we go thru all sub conflicts between T and C, and see if
For this we go through all sub conflicts between T and C, and see if
the target part of C already conflicts with S. When this is not
the case we disallow coalescing. */
struct sub_conflict *sl;
@ -1214,7 +1214,7 @@ calculate_dont_begin (struct web *web, HARD_REG_SET *result)
}
}
/* The next if() only gets true, if there was no wl->sub at all, in
which case we are only making one go thru this loop with W being
which case we are only making one go through this loop with W being
a whole web. */
if (!sl)
break;
@ -1433,7 +1433,7 @@ colorize_one_web (struct web *web, int hard)
even if we spill this one here, the situation won't become better
in the next iteration. It probably will have the same conflicts,
those will have the same colors, and we would come here again, for
all parts, in which this one gets splitted by the spill. This
all parts, in which this one gets split by the spill. This
can result in endless iteration spilling the same register again and
again. That's why we try to find a neighbor, which spans more
instructions that ourself, and got a color, and try to spill _that_.
@ -1826,7 +1826,7 @@ try_recolor_web (struct web *web)
else if (web2->type == SELECT)
/* This means, that WEB2 once was a part of a coalesced
web, which got spilled in the above colorize_one_web()
call, and whose parts then got splitted and put back
call, and whose parts then got split and put back
onto the SELECT stack. As the cause for that splitting
(the coloring of WEB) was worthless, we should again
coalesce the parts, as they were before. For now we

View File

@ -80,7 +80,7 @@ insn_print_units (rtx insn)
}
/* MAX_VISUAL_LINES is the maximum number of lines in visualization table
of a basic block. If more lines are needed, table is splitted to two.
of a basic block. If more lines are needed, table is split to two.
n_visual_lines is the number of lines printed so far for a block.
visual_tbl contains the block visualization info.
vis_no_unit holds insns in a cycle that are not mapped to any unit. */

View File

@ -453,7 +453,7 @@ layout_decl (tree decl, unsigned int known_align)
else if (DECL_PACKED (decl) && DECL_USER_ALIGN (decl))
/* Don't touch DECL_ALIGN. For other packed fields, go ahead and
round up; we'll reduce it again below. We want packing to
supercede USER_ALIGN inherited from the type, but defer to
supersede USER_ALIGN inherited from the type, but defer to
alignment explicitly specified on the field decl. */;
else
do_type_align (type, decl);