diff --git a/gcc/ChangeLog b/gcc/ChangeLog index bfc9461a95b6..157e9f439601 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,19 +1,29 @@ +2011-11-18 Uros Bizjak + + PR tree-optimization/51118 + * fold-const.c (fold_checksum_tree): Check for TS_TYPED structure + before using TREE_TYPE accessor on expr. + 2011-11-17 Jan Hubicka PR bootstrap/51134 * i386.c (atom_cost): Fix 32bit memset description. - (expand_set_or_movmem_via_loop_with_iter): Output proper bounds check for epilogue loops. - (expand_movmem_epilogue): Handle epilogues up to size 15 w/o producing byte loop. - (decide_alg): sse_loop is not useable wthen SSE2 is disabled; when not optimizing always - use rep movsb or lincall; do not produce word sized loops when optimizing memset for - size (to avoid need for large constants). - (ix86_expand_movmem): Get into sync with ix86_expand_setmem; choose unroll factors - better; always do 128bit moves when producing SSE loops; do not produce loopy epilogue - when size is too small. - (promote_duplicated_reg_to_size): Do not look into desired alignments when - doing vector expansion. - (ix86_expand_setmem): Track better when promoted value is available; choose unroll factors - more sanely.; output loopy epilogue only when needed. + (expand_set_or_movmem_via_loop_with_iter): Output proper bounds check + for epilogue loops. + (expand_movmem_epilogue): Handle epilogues up to size 15 w/o producing + byte loop. + (decide_alg): sse_loop is not useable wthen SSE2 is disabled; + when not optimizing always use rep movsb or lincall; do not produce + word sized loops when optimizing memset for size (to avoid need + for large constants). + (ix86_expand_movmem): Get into sync with ix86_expand_setmem; + choose unroll factors better; always do 128bit moves when producing + SSE loops; do not produce loopy epilogue when size is too small. + (promote_duplicated_reg_to_size): Do not look into desired alignments + when doing vector expansion. + (ix86_expand_setmem): Track better when promoted value is available; + choose unroll factors more sanely. Output loopy epilogue only + when needed. 2011-11-17 Steve Ellcey @@ -28,7 +38,7 @@ 2011-11-17 Andrew MacLeod - * builtins.c (expand_builtin): Remove 4th parameter representing + * builtins.c (expand_builtin): Remove 4th parameter representing weak/strong mode when __atomic_compare_exchange becomes a library call. 2011-11-17 Richard Henderson diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 167573b17b31..e590377265e4 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -14157,7 +14157,8 @@ fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht) } } md5_process_bytes (expr, tree_size (expr), ctx); - fold_checksum_tree (TREE_TYPE (expr), ctx, ht); + if (CODE_CONTAINS_STRUCT (code, TS_TYPED)) + fold_checksum_tree (TREE_TYPE (expr), ctx, ht); if (TREE_CODE_CLASS (code) != tcc_type && TREE_CODE_CLASS (code) != tcc_declaration && code != TREE_LIST