[multiple changes]

2003-02-22  Richard Henderson  <rth@redhat.com>

	* i386.c, i386.h (TUNEMASK): Rename from CPUMASK.

2003-02-22  Kelley Cook <kelley@dwhoops.info>

        * i386.h, i386.c, i386.md (ix86_tune): Rename from ix86_cpu.
	(ix86_tune_string): Rename from ix86_cpu_string.

From-SVN: r63314
This commit is contained in:
Richard Henderson 2003-02-22 22:10:02 -08:00
parent 9d913bbf3f
commit 9e5555268a
4 changed files with 103 additions and 94 deletions

View File

@ -1,3 +1,12 @@
2003-02-22 Richard Henderson <rth@redhat.com>
* i386.c, i386.h (TUNEMASK): Rename from CPUMASK.
2003-02-22 Kelley Cook <kelley@dwhoops.info>
* i386.h, i386.c, i386.md (ix86_tune): Rename from ix86_cpu.
(ix86_tune_string): Rename from ix86_cpu_string.
2003-02-22 Kelley Cook <kelleycook@comcast.net> 2003-02-22 Kelley Cook <kelleycook@comcast.net>
* config/i386/i386.c: Replace "mcpu" with "mtune". * config/i386/i386.c: Replace "mcpu" with "mtune".

View File

@ -754,12 +754,12 @@ enum tls_dialect ix86_tls_dialect = TLS_DIALECT_GNU;
enum fpmath_unit ix86_fpmath; enum fpmath_unit ix86_fpmath;
/* Which cpu are we scheduling for. */ /* Which cpu are we scheduling for. */
enum processor_type ix86_cpu; enum processor_type ix86_tune;
/* Which instruction set architecture to use. */ /* Which instruction set architecture to use. */
enum processor_type ix86_arch; enum processor_type ix86_arch;
/* Strings to hold which cpu and instruction set architecture to use. */ /* Strings to hold which cpu and instruction set architecture to use. */
const char *ix86_cpu_string; /* for -mtune=<xxx> */ const char *ix86_tune_string; /* for -mtune=<xxx> */
const char *ix86_arch_string; /* for -march=<xxx> */ const char *ix86_arch_string; /* for -march=<xxx> */
const char *ix86_fpmath_string; /* for -mfpmath=<xxx> */ const char *ix86_fpmath_string; /* for -mfpmath=<xxx> */
@ -1148,10 +1148,10 @@ override_options ()
SUBTARGET_OVERRIDE_OPTIONS; SUBTARGET_OVERRIDE_OPTIONS;
#endif #endif
if (!ix86_cpu_string && ix86_arch_string) if (!ix86_tune_string && ix86_arch_string)
ix86_cpu_string = ix86_arch_string; ix86_tune_string = ix86_arch_string;
if (!ix86_cpu_string) if (!ix86_tune_string)
ix86_cpu_string = cpu_names [TARGET_CPU_DEFAULT]; ix86_tune_string = cpu_names [TARGET_CPU_DEFAULT];
if (!ix86_arch_string) if (!ix86_arch_string)
ix86_arch_string = TARGET_64BIT ? "k8" : "i386"; ix86_arch_string = TARGET_64BIT ? "k8" : "i386";
@ -1201,7 +1201,7 @@ override_options ()
{ {
ix86_arch = processor_alias_table[i].processor; ix86_arch = processor_alias_table[i].processor;
/* Default cpu tuning to the architecture. */ /* Default cpu tuning to the architecture. */
ix86_cpu = ix86_arch; ix86_tune = ix86_arch;
if (processor_alias_table[i].flags & PTA_MMX if (processor_alias_table[i].flags & PTA_MMX
&& !(target_flags_explicit & MASK_MMX)) && !(target_flags_explicit & MASK_MMX))
target_flags |= MASK_MMX; target_flags |= MASK_MMX;
@ -1228,9 +1228,9 @@ override_options ()
error ("bad value (%s) for -march= switch", ix86_arch_string); error ("bad value (%s) for -march= switch", ix86_arch_string);
for (i = 0; i < pta_size; i++) for (i = 0; i < pta_size; i++)
if (! strcmp (ix86_cpu_string, processor_alias_table[i].name)) if (! strcmp (ix86_tune_string, processor_alias_table[i].name))
{ {
ix86_cpu = processor_alias_table[i].processor; ix86_tune = processor_alias_table[i].processor;
if (TARGET_64BIT && !(processor_alias_table[i].flags & PTA_64BIT)) if (TARGET_64BIT && !(processor_alias_table[i].flags & PTA_64BIT))
error ("CPU you selected does not support x86-64 instruction set"); error ("CPU you selected does not support x86-64 instruction set");
break; break;
@ -1238,14 +1238,14 @@ override_options ()
if (processor_alias_table[i].flags & PTA_PREFETCH_SSE) if (processor_alias_table[i].flags & PTA_PREFETCH_SSE)
x86_prefetch_sse = true; x86_prefetch_sse = true;
if (i == pta_size) if (i == pta_size)
error ("bad value (%s) for -mtune= switch", ix86_cpu_string); error ("bad value (%s) for -mtune= switch", ix86_tune_string);
if (optimize_size) if (optimize_size)
ix86_cost = &size_cost; ix86_cost = &size_cost;
else else
ix86_cost = processor_target_table[ix86_cpu].cost; ix86_cost = processor_target_table[ix86_tune].cost;
target_flags |= processor_target_table[ix86_cpu].target_enable; target_flags |= processor_target_table[ix86_tune].target_enable;
target_flags &= ~processor_target_table[ix86_cpu].target_disable; target_flags &= ~processor_target_table[ix86_tune].target_disable;
/* Arrange to set up i386_stack_locals for all functions. */ /* Arrange to set up i386_stack_locals for all functions. */
init_machine_status = ix86_init_machine_status; init_machine_status = ix86_init_machine_status;
@ -1308,17 +1308,17 @@ override_options ()
/* Default align_* from the processor table. */ /* Default align_* from the processor table. */
if (align_loops == 0) if (align_loops == 0)
{ {
align_loops = processor_target_table[ix86_cpu].align_loop; align_loops = processor_target_table[ix86_tune].align_loop;
align_loops_max_skip = processor_target_table[ix86_cpu].align_loop_max_skip; align_loops_max_skip = processor_target_table[ix86_tune].align_loop_max_skip;
} }
if (align_jumps == 0) if (align_jumps == 0)
{ {
align_jumps = processor_target_table[ix86_cpu].align_jump; align_jumps = processor_target_table[ix86_tune].align_jump;
align_jumps_max_skip = processor_target_table[ix86_cpu].align_jump_max_skip; align_jumps_max_skip = processor_target_table[ix86_tune].align_jump_max_skip;
} }
if (align_functions == 0) if (align_functions == 0)
{ {
align_functions = processor_target_table[ix86_cpu].align_func; align_functions = processor_target_table[ix86_tune].align_func;
} }
/* Validate -mpreferred-stack-boundary= value, or provide default. /* Validate -mpreferred-stack-boundary= value, or provide default.
@ -1339,7 +1339,7 @@ override_options ()
} }
/* Validate -mbranch-cost= value, or provide default. */ /* Validate -mbranch-cost= value, or provide default. */
ix86_branch_cost = processor_target_table[ix86_cpu].cost->branch_cost; ix86_branch_cost = processor_target_table[ix86_tune].cost->branch_cost;
if (ix86_branch_cost_string) if (ix86_branch_cost_string)
{ {
i = atoi (ix86_branch_cost_string); i = atoi (ix86_branch_cost_string);
@ -1438,7 +1438,7 @@ override_options ()
if (x86_3dnow_a & (1 << ix86_arch)) if (x86_3dnow_a & (1 << ix86_arch))
target_flags |= MASK_3DNOW_A; target_flags |= MASK_3DNOW_A;
} }
if ((x86_accumulate_outgoing_args & CPUMASK) if ((x86_accumulate_outgoing_args & TUNEMASK)
&& !(target_flags_explicit & MASK_ACCUMULATE_OUTGOING_ARGS) && !(target_flags_explicit & MASK_ACCUMULATE_OUTGOING_ARGS)
&& !optimize_size) && !optimize_size)
target_flags |= MASK_ACCUMULATE_OUTGOING_ARGS; target_flags |= MASK_ACCUMULATE_OUTGOING_ARGS;
@ -4004,7 +4004,7 @@ promotable_binary_operator (op, mode)
case MULT: case MULT:
/* Modern CPUs have same latency for HImode and SImode multiply, /* Modern CPUs have same latency for HImode and SImode multiply,
but 386 and 486 do HImode multiply faster. */ but 386 and 486 do HImode multiply faster. */
return ix86_cpu > PROCESSOR_I486; return ix86_tune > PROCESSOR_I486;
case PLUS: case PLUS:
case AND: case AND:
case IOR: case IOR:
@ -4264,7 +4264,7 @@ standard_80387_constant_p (x)
/* For XFmode constants, try to find a special 80387 instruction on /* For XFmode constants, try to find a special 80387 instruction on
those CPUs that benefit from them. */ those CPUs that benefit from them. */
if (GET_MODE (x) == XFmode if (GET_MODE (x) == XFmode
&& x86_ext_80387_constants & CPUMASK) && x86_ext_80387_constants & TUNEMASK)
{ {
REAL_VALUE_TYPE r; REAL_VALUE_TYPE r;
int i; int i;
@ -5468,7 +5468,7 @@ ix86_decompose_address (addr, out)
/* Special case: on K6, [%esi] makes the instruction vector decoded. /* Special case: on K6, [%esi] makes the instruction vector decoded.
Avoid this by transforming to [%esi+0]. */ Avoid this by transforming to [%esi+0]. */
if (ix86_cpu == PROCESSOR_K6 && !optimize_size if (ix86_tune == PROCESSOR_K6 && !optimize_size
&& base && !index && !disp && base && !index && !disp
&& REG_P (base) && REG_P (base)
&& REGNO_REG_CLASS (REGNO (base)) == SIREG) && REGNO_REG_CLASS (REGNO (base)) == SIREG)
@ -11910,7 +11910,7 @@ ix86_attr_length_address_default (insn)
static int static int
ix86_issue_rate () ix86_issue_rate ()
{ {
switch (ix86_cpu) switch (ix86_tune)
{ {
case PROCESSOR_PENTIUM: case PROCESSOR_PENTIUM:
case PROCESSOR_K6: case PROCESSOR_K6:
@ -12037,7 +12037,7 @@ ix86_adjust_cost (insn, link, dep_insn, cost)
insn_type = get_attr_type (insn); insn_type = get_attr_type (insn);
dep_insn_type = get_attr_type (dep_insn); dep_insn_type = get_attr_type (dep_insn);
switch (ix86_cpu) switch (ix86_tune)
{ {
case PROCESSOR_PENTIUM: case PROCESSOR_PENTIUM:
/* Address Generation Interlock adds a cycle of latency. */ /* Address Generation Interlock adds a cycle of latency. */
@ -12328,7 +12328,7 @@ ix86_sched_reorder (dump, sched_verbose, ready, n_readyp, clock_var)
goto out; goto out;
} }
switch (ix86_cpu) switch (ix86_tune)
{ {
default: default:
break; break;
@ -12353,7 +12353,7 @@ ix86_variable_issue (dump, sched_verbose, insn, can_issue_more)
int can_issue_more; int can_issue_more;
{ {
int i; int i;
switch (ix86_cpu) switch (ix86_tune)
{ {
default: default:
return can_issue_more - 1; return can_issue_more - 1;
@ -12420,7 +12420,7 @@ ia32_use_dfa_pipeline_interface ()
static int static int
ia32_multipass_dfa_lookahead () ia32_multipass_dfa_lookahead ()
{ {
if (ix86_cpu == PROCESSOR_PENTIUM) if (ix86_tune == PROCESSOR_PENTIUM)
return 2; return 2;
else else
return 0; return 0;

View File

@ -201,17 +201,17 @@ extern int target_flags;
#endif #endif
#endif #endif
#define TARGET_386 (ix86_cpu == PROCESSOR_I386) #define TARGET_386 (ix86_tune == PROCESSOR_I386)
#define TARGET_486 (ix86_cpu == PROCESSOR_I486) #define TARGET_486 (ix86_tune == PROCESSOR_I486)
#define TARGET_PENTIUM (ix86_cpu == PROCESSOR_PENTIUM) #define TARGET_PENTIUM (ix86_tune == PROCESSOR_PENTIUM)
#define TARGET_PENTIUMPRO (ix86_cpu == PROCESSOR_PENTIUMPRO) #define TARGET_PENTIUMPRO (ix86_tune == PROCESSOR_PENTIUMPRO)
#define TARGET_K6 (ix86_cpu == PROCESSOR_K6) #define TARGET_K6 (ix86_tune == PROCESSOR_K6)
#define TARGET_ATHLON (ix86_cpu == PROCESSOR_ATHLON) #define TARGET_ATHLON (ix86_tune == PROCESSOR_ATHLON)
#define TARGET_PENTIUM4 (ix86_cpu == PROCESSOR_PENTIUM4) #define TARGET_PENTIUM4 (ix86_tune == PROCESSOR_PENTIUM4)
#define TARGET_K8 (ix86_cpu == PROCESSOR_K8) #define TARGET_K8 (ix86_tune == PROCESSOR_K8)
#define TARGET_ATHLON_K8 (TARGET_K8 || TARGET_ATHLON) #define TARGET_ATHLON_K8 (TARGET_K8 || TARGET_ATHLON)
#define CPUMASK (1 << ix86_cpu) #define TUNEMASK (1 << ix86_tune)
extern const int x86_use_leave, x86_push_memory, x86_zero_extend_with_and; extern const int x86_use_leave, x86_push_memory, x86_zero_extend_with_and;
extern const int x86_use_bit_test, x86_cmove, x86_deep_branch; extern const int x86_use_bit_test, x86_cmove, x86_deep_branch;
extern const int x86_branch_hints, x86_unroll_strlen; extern const int x86_branch_hints, x86_unroll_strlen;
@ -233,57 +233,57 @@ extern const int x86_use_ffreep, x86_sse_partial_regs_for_cvtsd2ss;
extern const int x86_inter_unit_moves; extern const int x86_inter_unit_moves;
extern int x86_prefetch_sse; extern int x86_prefetch_sse;
#define TARGET_USE_LEAVE (x86_use_leave & CPUMASK) #define TARGET_USE_LEAVE (x86_use_leave & TUNEMASK)
#define TARGET_PUSH_MEMORY (x86_push_memory & CPUMASK) #define TARGET_PUSH_MEMORY (x86_push_memory & TUNEMASK)
#define TARGET_ZERO_EXTEND_WITH_AND (x86_zero_extend_with_and & CPUMASK) #define TARGET_ZERO_EXTEND_WITH_AND (x86_zero_extend_with_and & TUNEMASK)
#define TARGET_USE_BIT_TEST (x86_use_bit_test & CPUMASK) #define TARGET_USE_BIT_TEST (x86_use_bit_test & TUNEMASK)
#define TARGET_UNROLL_STRLEN (x86_unroll_strlen & CPUMASK) #define TARGET_UNROLL_STRLEN (x86_unroll_strlen & TUNEMASK)
/* For sane SSE instruction set generation we need fcomi instruction. It is /* For sane SSE instruction set generation we need fcomi instruction. It is
safe to enable all CMOVE instructions. */ safe to enable all CMOVE instructions. */
#define TARGET_CMOVE ((x86_cmove & (1 << ix86_arch)) || TARGET_SSE) #define TARGET_CMOVE ((x86_cmove & (1 << ix86_arch)) || TARGET_SSE)
#define TARGET_DEEP_BRANCH_PREDICTION (x86_deep_branch & CPUMASK) #define TARGET_DEEP_BRANCH_PREDICTION (x86_deep_branch & TUNEMASK)
#define TARGET_BRANCH_PREDICTION_HINTS (x86_branch_hints & CPUMASK) #define TARGET_BRANCH_PREDICTION_HINTS (x86_branch_hints & TUNEMASK)
#define TARGET_DOUBLE_WITH_ADD (x86_double_with_add & CPUMASK) #define TARGET_DOUBLE_WITH_ADD (x86_double_with_add & TUNEMASK)
#define TARGET_USE_SAHF ((x86_use_sahf & CPUMASK) && !TARGET_64BIT) #define TARGET_USE_SAHF ((x86_use_sahf & TUNEMASK) && !TARGET_64BIT)
#define TARGET_MOVX (x86_movx & CPUMASK) #define TARGET_MOVX (x86_movx & TUNEMASK)
#define TARGET_PARTIAL_REG_STALL (x86_partial_reg_stall & CPUMASK) #define TARGET_PARTIAL_REG_STALL (x86_partial_reg_stall & TUNEMASK)
#define TARGET_USE_LOOP (x86_use_loop & CPUMASK) #define TARGET_USE_LOOP (x86_use_loop & TUNEMASK)
#define TARGET_USE_FIOP (x86_use_fiop & CPUMASK) #define TARGET_USE_FIOP (x86_use_fiop & TUNEMASK)
#define TARGET_USE_MOV0 (x86_use_mov0 & CPUMASK) #define TARGET_USE_MOV0 (x86_use_mov0 & TUNEMASK)
#define TARGET_USE_CLTD (x86_use_cltd & CPUMASK) #define TARGET_USE_CLTD (x86_use_cltd & TUNEMASK)
#define TARGET_SPLIT_LONG_MOVES (x86_split_long_moves & CPUMASK) #define TARGET_SPLIT_LONG_MOVES (x86_split_long_moves & TUNEMASK)
#define TARGET_READ_MODIFY_WRITE (x86_read_modify_write & CPUMASK) #define TARGET_READ_MODIFY_WRITE (x86_read_modify_write & TUNEMASK)
#define TARGET_READ_MODIFY (x86_read_modify & CPUMASK) #define TARGET_READ_MODIFY (x86_read_modify & TUNEMASK)
#define TARGET_PROMOTE_QImode (x86_promote_QImode & CPUMASK) #define TARGET_PROMOTE_QImode (x86_promote_QImode & TUNEMASK)
#define TARGET_FAST_PREFIX (x86_fast_prefix & CPUMASK) #define TARGET_FAST_PREFIX (x86_fast_prefix & TUNEMASK)
#define TARGET_SINGLE_STRINGOP (x86_single_stringop & CPUMASK) #define TARGET_SINGLE_STRINGOP (x86_single_stringop & TUNEMASK)
#define TARGET_QIMODE_MATH (x86_qimode_math & CPUMASK) #define TARGET_QIMODE_MATH (x86_qimode_math & TUNEMASK)
#define TARGET_HIMODE_MATH (x86_himode_math & CPUMASK) #define TARGET_HIMODE_MATH (x86_himode_math & TUNEMASK)
#define TARGET_PROMOTE_QI_REGS (x86_promote_qi_regs & CPUMASK) #define TARGET_PROMOTE_QI_REGS (x86_promote_qi_regs & TUNEMASK)
#define TARGET_PROMOTE_HI_REGS (x86_promote_hi_regs & CPUMASK) #define TARGET_PROMOTE_HI_REGS (x86_promote_hi_regs & TUNEMASK)
#define TARGET_ADD_ESP_4 (x86_add_esp_4 & CPUMASK) #define TARGET_ADD_ESP_4 (x86_add_esp_4 & TUNEMASK)
#define TARGET_ADD_ESP_8 (x86_add_esp_8 & CPUMASK) #define TARGET_ADD_ESP_8 (x86_add_esp_8 & TUNEMASK)
#define TARGET_SUB_ESP_4 (x86_sub_esp_4 & CPUMASK) #define TARGET_SUB_ESP_4 (x86_sub_esp_4 & TUNEMASK)
#define TARGET_SUB_ESP_8 (x86_sub_esp_8 & CPUMASK) #define TARGET_SUB_ESP_8 (x86_sub_esp_8 & TUNEMASK)
#define TARGET_INTEGER_DFMODE_MOVES (x86_integer_DFmode_moves & CPUMASK) #define TARGET_INTEGER_DFMODE_MOVES (x86_integer_DFmode_moves & TUNEMASK)
#define TARGET_PARTIAL_REG_DEPENDENCY (x86_partial_reg_dependency & CPUMASK) #define TARGET_PARTIAL_REG_DEPENDENCY (x86_partial_reg_dependency & TUNEMASK)
#define TARGET_SSE_PARTIAL_REG_DEPENDENCY \ #define TARGET_SSE_PARTIAL_REG_DEPENDENCY \
(x86_sse_partial_reg_dependency & CPUMASK) (x86_sse_partial_reg_dependency & TUNEMASK)
#define TARGET_SSE_PARTIAL_REGS (x86_sse_partial_regs & CPUMASK) #define TARGET_SSE_PARTIAL_REGS (x86_sse_partial_regs & TUNEMASK)
#define TARGET_SSE_PARTIAL_REGS_FOR_CVTSD2SS \ #define TARGET_SSE_PARTIAL_REGS_FOR_CVTSD2SS \
(x86_sse_partial_regs_for_cvtsd2ss & CPUMASK) (x86_sse_partial_regs_for_cvtsd2ss & TUNEMASK)
#define TARGET_SSE_TYPELESS_STORES (x86_sse_typeless_stores & CPUMASK) #define TARGET_SSE_TYPELESS_STORES (x86_sse_typeless_stores & TUNEMASK)
#define TARGET_SSE_TYPELESS_LOAD0 (x86_sse_typeless_load0 & CPUMASK) #define TARGET_SSE_TYPELESS_LOAD0 (x86_sse_typeless_load0 & TUNEMASK)
#define TARGET_SSE_LOAD0_BY_PXOR (x86_sse_load0_by_pxor & CPUMASK) #define TARGET_SSE_LOAD0_BY_PXOR (x86_sse_load0_by_pxor & TUNEMASK)
#define TARGET_MEMORY_MISMATCH_STALL (x86_memory_mismatch_stall & CPUMASK) #define TARGET_MEMORY_MISMATCH_STALL (x86_memory_mismatch_stall & TUNEMASK)
#define TARGET_PROLOGUE_USING_MOVE (x86_prologue_using_move & CPUMASK) #define TARGET_PROLOGUE_USING_MOVE (x86_prologue_using_move & TUNEMASK)
#define TARGET_EPILOGUE_USING_MOVE (x86_epilogue_using_move & CPUMASK) #define TARGET_EPILOGUE_USING_MOVE (x86_epilogue_using_move & TUNEMASK)
#define TARGET_DECOMPOSE_LEA (x86_decompose_lea & CPUMASK) #define TARGET_DECOMPOSE_LEA (x86_decompose_lea & TUNEMASK)
#define TARGET_PREFETCH_SSE (x86_prefetch_sse) #define TARGET_PREFETCH_SSE (x86_prefetch_sse)
#define TARGET_SHIFT1 (x86_shift1 & CPUMASK) #define TARGET_SHIFT1 (x86_shift1 & TUNEMASK)
#define TARGET_USE_FFREEP (x86_use_ffreep & CPUMASK) #define TARGET_USE_FFREEP (x86_use_ffreep & TUNEMASK)
#define TARGET_REP_MOVL_OPTIMAL (x86_rep_movl_optimal & CPUMASK) #define TARGET_REP_MOVL_OPTIMAL (x86_rep_movl_optimal & TUNEMASK)
#define TARGET_INTER_UNIT_MOVES (x86_inter_unit_moves & CPUMASK) #define TARGET_INTER_UNIT_MOVES (x86_inter_unit_moves & TUNEMASK)
#define TARGET_STACK_PROBE (target_flags & MASK_STACK_PROBE) #define TARGET_STACK_PROBE (target_flags & MASK_STACK_PROBE)
@ -432,7 +432,7 @@ extern int x86_prefetch_sse;
option if the fixed part matches. The actual option name is made option if the fixed part matches. The actual option name is made
by appending `-m' to the specified name. */ by appending `-m' to the specified name. */
#define TARGET_OPTIONS \ #define TARGET_OPTIONS \
{ { "tune=", &ix86_cpu_string, \ { { "tune=", &ix86_tune_string, \
N_("Schedule code for given CPU")}, \ N_("Schedule code for given CPU")}, \
{ "fpmath=", &ix86_fpmath_string, \ { "fpmath=", &ix86_fpmath_string, \
N_("Generate floating point mathematics using given instruction set")},\ N_("Generate floating point mathematics using given instruction set")},\
@ -510,9 +510,9 @@ extern int x86_prefetch_sse;
do \ do \
{ \ { \
size_t arch_len = strlen (ix86_arch_string); \ size_t arch_len = strlen (ix86_arch_string); \
size_t cpu_len = strlen (ix86_cpu_string); \ size_t tune_len = strlen (ix86_tune_string); \
int last_arch_char = ix86_arch_string[arch_len - 1]; \ int last_arch_char = ix86_arch_string[arch_len - 1]; \
int last_cpu_char = ix86_cpu_string[cpu_len - 1]; \ int last_tune_char = ix86_tune_string[tune_len - 1]; \
\ \
if (TARGET_64BIT) \ if (TARGET_64BIT) \
{ \ { \
@ -529,7 +529,7 @@ extern int x86_prefetch_sse;
} \ } \
\ \
/* Built-ins based on -mtune= (or -march= if no \ /* Built-ins based on -mtune= (or -march= if no \
CPU given). */ \ -mtune= given). */ \
if (TARGET_386) \ if (TARGET_386) \
builtin_define ("__tune_i386__"); \ builtin_define ("__tune_i386__"); \
else if (TARGET_486) \ else if (TARGET_486) \
@ -538,14 +538,14 @@ extern int x86_prefetch_sse;
{ \ { \
builtin_define ("__tune_i586__"); \ builtin_define ("__tune_i586__"); \
builtin_define ("__tune_pentium__"); \ builtin_define ("__tune_pentium__"); \
if (last_cpu_char == 'x') \ if (last_tune_char == 'x') \
builtin_define ("__tune_pentium_mmx__"); \ builtin_define ("__tune_pentium_mmx__"); \
} \ } \
else if (TARGET_PENTIUMPRO) \ else if (TARGET_PENTIUMPRO) \
{ \ { \
builtin_define ("__tune_i686__"); \ builtin_define ("__tune_i686__"); \
builtin_define ("__tune_pentiumpro__"); \ builtin_define ("__tune_pentiumpro__"); \
switch (last_cpu_char) \ switch (last_tune_char) \
{ \ { \
case '3': \ case '3': \
builtin_define ("__tune_pentium3__"); \ builtin_define ("__tune_pentium3__"); \
@ -558,16 +558,16 @@ extern int x86_prefetch_sse;
else if (TARGET_K6) \ else if (TARGET_K6) \
{ \ { \
builtin_define ("__tune_k6__"); \ builtin_define ("__tune_k6__"); \
if (last_cpu_char == '2') \ if (last_tune_char == '2') \
builtin_define ("__tune_k6_2__"); \ builtin_define ("__tune_k6_2__"); \
else if (last_cpu_char == '3') \ else if (last_tune_char == '3') \
builtin_define ("__tune_k6_3__"); \ builtin_define ("__tune_k6_3__"); \
} \ } \
else if (TARGET_ATHLON) \ else if (TARGET_ATHLON) \
{ \ { \
builtin_define ("__tune_athlon__"); \ builtin_define ("__tune_athlon__"); \
/* Only plain "athlon" lacks SSE. */ \ /* Only plain "athlon" lacks SSE. */ \
if (last_cpu_char != 'n') \ if (last_tune_char != 'n') \
builtin_define ("__tune_athlon_sse__"); \ builtin_define ("__tune_athlon_sse__"); \
} \ } \
else if (TARGET_K8) \ else if (TARGET_K8) \
@ -3074,8 +3074,8 @@ enum processor_type
PROCESSOR_max PROCESSOR_max
}; };
extern enum processor_type ix86_cpu; extern enum processor_type ix86_tune;
extern const char *ix86_cpu_string; extern const char *ix86_tune_string;
extern enum processor_type ix86_arch; extern enum processor_type ix86_arch;
extern const char *ix86_arch_string; extern const char *ix86_arch_string;

View File

@ -136,7 +136,7 @@
;; Processor type. This attribute must exactly match the processor_type ;; Processor type. This attribute must exactly match the processor_type
;; enumeration in i386.h. ;; enumeration in i386.h.
(define_attr "cpu" "i386,i486,pentium,pentiumpro,k6,athlon,pentium4,k8" (define_attr "cpu" "i386,i486,pentium,pentiumpro,k6,athlon,pentium4,k8"
(const (symbol_ref "ix86_cpu"))) (const (symbol_ref "ix86_tune")))
;; A basic instruction type. Refinements due to arguments to be ;; A basic instruction type. Refinements due to arguments to be
;; provided in other attributes. ;; provided in other attributes.