aboutsummaryrefslogtreecommitdiff
path: root/toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99396.patch
diff options
context:
space:
mode:
Diffstat (limited to 'toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99396.patch')
-rw-r--r--toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99396.patch1721
1 files changed, 1721 insertions, 0 deletions
diff --git a/toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99396.patch b/toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99396.patch
new file mode 100644
index 0000000..b7eaa68
--- /dev/null
+++ b/toolchain-layer/recipes-devtools/gcc/gcc-4.5/linaro/gcc-4.5-linaro-r99396.patch
@@ -0,0 +1,1721 @@
+2010-09-15 Chung-Lin Tang <cltang@codesourcery.com>
+
+ Issue #9441
+
+ Backport from mainline:
+
+ 2010-06-25 Bernd Schmidt <bernds@codesourcery.com>
+
+ With large parts from Jim Wilson:
+ PR target/43902
+
+ gcc/
+ * tree-pretty-print.c (dump_generic_node, op_code_prio): Add
+ WIDEN_MULT_PLUS_EXPR and WIDEN_MULT_MINUS_EXPR.
+ * optabs.c (optab_for_tree_code): Likewise.
+ (expand_widen_pattern_expr): Likewise.
+ * tree-ssa-math-opts.c (convert_mult_to_widen): New function, broken
+ out of execute_optimize_widening_mul.
+ (convert_plusminus_to_widen): New function.
+ (execute_optimize_widening_mul): Use the two new functions.
+ * expr.c (expand_expr_real_2): Add support for GIMPLE_TERNARY_RHS.
+ Remove code to generate widening multiply-accumulate. Add support
+ for WIDEN_MULT_PLUS_EXPR and WIDEN_MULT_MINUS_EXPR.
+ * gimple-pretty-print.c (dump_ternary_rhs): New function.
+ (dump_gimple_assign): Call it when appropriate.
+ * tree.def (WIDEN_MULT_PLUS_EXPR, WIDEN_MULT_MINUS_EXPR): New codes.
+ * cfgexpand.c (gimple_assign_rhs_to_tree): Likewise.
+ (expand_gimple_stmt_1): Likewise.
+ (expand_debug_expr): Support WIDEN_MULT_PLUS_EXPR and
+ WIDEN_MULT_MINUS_EXPR.
+ * tree-ssa-operands.c (get_expr_operands): Likewise.
+ * tree-inline.c (estimate_operator_cost): Likewise.
+ * gimple.c (extract_ops_from_tree_1): Renamed from
+ extract_ops_from_tree. Add new arg for a third operand; fill it.
+ (gimple_build_assign_stat): Support operations with three operands.
+ (gimple_build_assign_with_ops_stat): Likewise.
+ (gimple_assign_set_rhs_from_tree): Likewise.
+ (gimple_assign_set_rhs_with_ops_1): Renamed from
+ gimple_assign_set_rhs_with_ops. Add new arg for a third operand.
+ (get_gimple_rhs_num_ops): Support GIMPLE_TERNARY_RHS.
+ (get_gimple_rhs_num_ops): Handle WIDEN_MULT_PLUS_EXPR and
+ WIDEN_MULT_MINUS_EXPR.
+ * gimple.h (enum gimple_rhs_class): Add GIMPLE_TERNARY_RHS.
+ (extract_ops_from_tree_1): Adjust declaration.
+ (gimple_assign_set_rhs_with_ops_1): Likewise.
+ (gimple_build_assign_with_ops): Pass NULL for last operand.
+ (gimple_build_assign_with_ops3): New macro.
+ (gimple_assign_rhs3, gimple_assign_rhs3_ptr, gimple_assign_set_rhs3,
+ gimple_assign_set_rhs_with_ops, extract_ops_from_tree): New inline
+ functions.
+ * tree-cfg.c (verify_gimple_assign_ternary): New static function.
+ (verify_gimple_assign): Call it.
+ * doc/gimple.texi (Manipulating operands): Document GIMPLE_TERNARY_RHS.
+ (Tuple specific accessors, subsection GIMPLE_ASSIGN): Document new
+ functions for dealing with three-operand statements.
+ * tree.c (commutative_ternary_tree_code): New function.
+ * tree.h (commutative_ternary_tree_code): Declare it.
+ * tree-vrp.c (gimple_assign_nonnegative_warnv_p): Return false for
+ ternary statements.
+ (gimple_assign_nonzero_warnv_p): Likewise.
+ * tree-ssa-sccvn.c (stmt_has_constants): Handle GIMPLE_TERNARY_RHS.
+ * tree-ssa-ccp.c (get_rhs_assign_op_for_ccp): New static function.
+ (ccp_fold): Use it. Handle GIMPLE_TERNARY_RHS.
+ * tree-ssa-dom.c (enum expr_kind): Add EXPR_TERNARY.
+ (struct hashtable_expr): New member ternary in the union.
+ (initialize_hash_element): Handle GIMPLE_TERNARY_RHS.
+ (hashable_expr_equal_p): Fix indentation. Handle EXPR_TERNARY.
+ (iterative_hash_hashable_expr): Likewise.
+ (print_expr_hash_elt): Handle EXPR_TERNARY.
+ * gimple-fold.c (fold_gimple_assign): Handle GIMPLE_TERNARY_RHS.
+ * tree-ssa-threadedge.c (fold_assignment_stmt): Remove useless break
+ statements. Handle GIMPLE_TERNARY_RHS.
+
+ From Jim Wilson:
+ gcc/testsuite/
+ * gcc.target/mips/madd-9.c: New test.
+
+ 2010-06-29 Bernd Schmidt <bernds@codesourcery.com>
+
+ PR target/43902
+ gcc/
+ * config/arm/arm.md (maddsidi4, umaddsidi4): New expanders.
+ (maddhisi4): Renamed from mulhisi3addsi. Operands renumbered.
+ (maddhidi4): Likewise.
+
+ gcc/testsuite/
+ * gcc.target/arm/wmul-1.c: Test for smlabb instead of smulbb.
+ * gcc.target/arm/wmul-3.c: New test.
+ * gcc.target/arm/wmul-4.c: New test.
+
+ 2010-07-22 Richard Sandiford <rdsandiford@googlemail.com>
+
+ gcc/
+ * tree-ssa-math-opts.c (is_widening_mult_rhs_p): New function.
+ (is_widening_mult_p): Likewise.
+ (convert_to_widen): Use them.
+ (convert_plusminus_to_widen): Likewise. Handle fixed-point types as
+ well as integer ones.
+
+ 2010-07-31 Richard Sandiford <rdsandiford@googlemail.com>
+
+ gcc/
+ * tree-ssa-math-opts.c (convert_plusminus_to_widen): Fix type
+ used in the call to optab_for_tree_code. Fix the second
+ is_widening_mult_p call. Check that both unwidened operands
+ have the same sign.
+
+ 2010-09-15 Jie Zhang <jie@codesourcery.com>
+
+ Backport from mainline:
+
+=== modified file 'gcc/cfgexpand.c'
+Index: gcc-4_5-branch/gcc/cfgexpand.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/cfgexpand.c 2011-07-22 16:59:23.000000000 -0700
++++ gcc-4_5-branch/gcc/cfgexpand.c 2011-07-22 16:59:28.581747691 -0700
+@@ -64,7 +64,13 @@
+
+ grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
+
+- if (grhs_class == GIMPLE_BINARY_RHS)
++ if (grhs_class == GIMPLE_TERNARY_RHS)
++ t = build3 (gimple_assign_rhs_code (stmt),
++ TREE_TYPE (gimple_assign_lhs (stmt)),
++ gimple_assign_rhs1 (stmt),
++ gimple_assign_rhs2 (stmt),
++ gimple_assign_rhs3 (stmt));
++ else if (grhs_class == GIMPLE_BINARY_RHS)
+ t = build2 (gimple_assign_rhs_code (stmt),
+ TREE_TYPE (gimple_assign_lhs (stmt)),
+ gimple_assign_rhs1 (stmt),
+@@ -1893,6 +1899,9 @@
+ ops.type = TREE_TYPE (lhs);
+ switch (get_gimple_rhs_class (gimple_expr_code (stmt)))
+ {
++ case GIMPLE_TERNARY_RHS:
++ ops.op2 = gimple_assign_rhs3 (stmt);
++ /* Fallthru */
+ case GIMPLE_BINARY_RHS:
+ ops.op1 = gimple_assign_rhs2 (stmt);
+ /* Fallthru */
+@@ -2243,6 +2252,8 @@
+ {
+ case COND_EXPR:
+ case DOT_PROD_EXPR:
++ case WIDEN_MULT_PLUS_EXPR:
++ case WIDEN_MULT_MINUS_EXPR:
+ goto ternary;
+
+ case TRUTH_ANDIF_EXPR:
+@@ -3030,6 +3041,8 @@
+ return NULL;
+
+ case WIDEN_MULT_EXPR:
++ case WIDEN_MULT_PLUS_EXPR:
++ case WIDEN_MULT_MINUS_EXPR:
+ if (SCALAR_INT_MODE_P (GET_MODE (op0))
+ && SCALAR_INT_MODE_P (mode))
+ {
+@@ -3042,7 +3055,13 @@
+ op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
+ else
+ op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
+- return gen_rtx_MULT (mode, op0, op1);
++ op0 = gen_rtx_MULT (mode, op0, op1);
++ if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
++ return op0;
++ else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
++ return gen_rtx_PLUS (mode, op0, op2);
++ else
++ return gen_rtx_MINUS (mode, op2, op0);
+ }
+ return NULL;
+
+Index: gcc-4_5-branch/gcc/config/arm/arm.md
+===================================================================
+--- gcc-4_5-branch.orig/gcc/config/arm/arm.md 2011-07-22 16:59:25.000000000 -0700
++++ gcc-4_5-branch/gcc/config/arm/arm.md 2011-07-22 16:59:28.581747691 -0700
+@@ -1507,7 +1507,15 @@
+ (set_attr "predicable" "yes")]
+ )
+
+-;; Unnamed template to match long long multiply-accumulate (smlal)
++(define_expand "maddsidi4"
++ [(set (match_operand:DI 0 "s_register_operand" "")
++ (plus:DI
++ (mult:DI
++ (sign_extend:DI (match_operand:SI 1 "s_register_operand" ""))
++ (sign_extend:DI (match_operand:SI 2 "s_register_operand" "")))
++ (match_operand:DI 3 "s_register_operand" "")))]
++ "TARGET_32BIT && arm_arch3m"
++ "")
+
+ (define_insn "*mulsidi3adddi"
+ [(set (match_operand:DI 0 "s_register_operand" "=&r")
+@@ -1603,7 +1611,15 @@
+ (set_attr "predicable" "yes")]
+ )
+
+-;; Unnamed template to match long long unsigned multiply-accumulate (umlal)
++(define_expand "umaddsidi4"
++ [(set (match_operand:DI 0 "s_register_operand" "")
++ (plus:DI
++ (mult:DI
++ (zero_extend:DI (match_operand:SI 1 "s_register_operand" ""))
++ (zero_extend:DI (match_operand:SI 2 "s_register_operand" "")))
++ (match_operand:DI 3 "s_register_operand" "")))]
++ "TARGET_32BIT && arm_arch3m"
++ "")
+
+ (define_insn "*umulsidi3adddi"
+ [(set (match_operand:DI 0 "s_register_operand" "=&r")
+@@ -1771,29 +1787,29 @@
+ (set_attr "predicable" "yes")]
+ )
+
+-(define_insn "*mulhisi3addsi"
++(define_insn "maddhisi4"
+ [(set (match_operand:SI 0 "s_register_operand" "=r")
+- (plus:SI (match_operand:SI 1 "s_register_operand" "r")
++ (plus:SI (match_operand:SI 3 "s_register_operand" "r")
+ (mult:SI (sign_extend:SI
+- (match_operand:HI 2 "s_register_operand" "%r"))
++ (match_operand:HI 1 "s_register_operand" "%r"))
+ (sign_extend:SI
+- (match_operand:HI 3 "s_register_operand" "r")))))]
++ (match_operand:HI 2 "s_register_operand" "r")))))]
+ "TARGET_DSP_MULTIPLY"
+- "smlabb%?\\t%0, %2, %3, %1"
++ "smlabb%?\\t%0, %1, %2, %3"
+ [(set_attr "insn" "smlaxy")
+ (set_attr "predicable" "yes")]
+ )
+
+-(define_insn "*mulhidi3adddi"
++(define_insn "*maddhidi4"
+ [(set (match_operand:DI 0 "s_register_operand" "=r")
+ (plus:DI
+- (match_operand:DI 1 "s_register_operand" "0")
++ (match_operand:DI 3 "s_register_operand" "0")
+ (mult:DI (sign_extend:DI
+- (match_operand:HI 2 "s_register_operand" "%r"))
++ (match_operand:HI 1 "s_register_operand" "%r"))
+ (sign_extend:DI
+- (match_operand:HI 3 "s_register_operand" "r")))))]
++ (match_operand:HI 2 "s_register_operand" "r")))))]
+ "TARGET_DSP_MULTIPLY"
+- "smlalbb%?\\t%Q0, %R0, %2, %3"
++ "smlalbb%?\\t%Q0, %R0, %1, %2"
+ [(set_attr "insn" "smlalxy")
+ (set_attr "predicable" "yes")])
+
+Index: gcc-4_5-branch/gcc/doc/gimple.texi
+===================================================================
+--- gcc-4_5-branch.orig/gcc/doc/gimple.texi 2011-07-22 16:58:48.000000000 -0700
++++ gcc-4_5-branch/gcc/doc/gimple.texi 2011-07-22 16:59:28.581747691 -0700
+@@ -554,6 +554,9 @@
+ @item @code{GIMPLE_INVALID_RHS}
+ The tree cannot be used as a GIMPLE operand.
+
++@item @code{GIMPLE_TERNARY_RHS}
++The tree is a valid GIMPLE ternary operation.
++
+ @item @code{GIMPLE_BINARY_RHS}
+ The tree is a valid GIMPLE binary operation.
+
+@@ -575,10 +578,11 @@
+ expressions should be flattened into the operand vector.
+ @end itemize
+
+-For tree nodes in the categories @code{GIMPLE_BINARY_RHS} and
+-@code{GIMPLE_UNARY_RHS}, they cannot be stored inside tuples directly.
+-They first need to be flattened and separated into individual
+-components. For instance, given the GENERIC expression
++For tree nodes in the categories @code{GIMPLE_TERNARY_RHS},
++@code{GIMPLE_BINARY_RHS} and @code{GIMPLE_UNARY_RHS}, they cannot be
++stored inside tuples directly. They first need to be flattened and
++separated into individual components. For instance, given the GENERIC
++expression
+
+ @smallexample
+ a = b + c
+@@ -1082,7 +1086,16 @@
+ Return the address of the second operand on the @code{RHS} of assignment
+ statement @code{G}.
+ @end deftypefn
++
++@deftypefn {GIMPLE function} tree gimple_assign_rhs3 (gimple g)
++Return the third operand on the @code{RHS} of assignment statement @code{G}.
++@end deftypefn
+
++@deftypefn {GIMPLE function} tree *gimple_assign_rhs3_ptr (gimple g)
++Return the address of the third operand on the @code{RHS} of assignment
++statement @code{G}.
++@end deftypefn
++
+ @deftypefn {GIMPLE function} void gimple_assign_set_lhs (gimple g, tree lhs)
+ Set @code{LHS} to be the @code{LHS} operand of assignment statement @code{G}.
+ @end deftypefn
+@@ -1092,17 +1105,13 @@
+ statement @code{G}.
+ @end deftypefn
+
+-@deftypefn {GIMPLE function} tree gimple_assign_rhs2 (gimple g)
+-Return the second operand on the @code{RHS} of assignment statement @code{G}.
+-@end deftypefn
+-
+-@deftypefn {GIMPLE function} tree *gimple_assign_rhs2_ptr (gimple g)
+-Return a pointer to the second operand on the @code{RHS} of assignment
++@deftypefn {GIMPLE function} void gimple_assign_set_rhs2 (gimple g, tree rhs)
++Set @code{RHS} to be the second operand on the @code{RHS} of assignment
+ statement @code{G}.
+ @end deftypefn
+
+-@deftypefn {GIMPLE function} void gimple_assign_set_rhs2 (gimple g, tree rhs)
+-Set @code{RHS} to be the second operand on the @code{RHS} of assignment
++@deftypefn {GIMPLE function} void gimple_assign_set_rhs3 (gimple g, tree rhs)
++Set @code{RHS} to be the third operand on the @code{RHS} of assignment
+ statement @code{G}.
+ @end deftypefn
+
+Index: gcc-4_5-branch/gcc/expr.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/expr.c 2011-07-22 16:59:23.000000000 -0700
++++ gcc-4_5-branch/gcc/expr.c 2011-07-22 16:59:28.591747691 -0700
+@@ -7228,8 +7228,6 @@
+ rtx subtarget, original_target;
+ int ignore;
+ bool reduce_bit_field;
+- gimple subexp0_def, subexp1_def;
+- tree top0, top1;
+ location_t loc = ops->location;
+ tree treeop0, treeop1;
+ #define REDUCE_BIT_FIELD(expr) (reduce_bit_field \
+@@ -7249,7 +7247,8 @@
+ exactly those that are valid in gimple expressions that aren't
+ GIMPLE_SINGLE_RHS (or invalid). */
+ gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
+- || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS);
++ || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
++ || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
+
+ ignore = (target == const0_rtx
+ || ((CONVERT_EXPR_CODE_P (code)
+@@ -7424,58 +7423,6 @@
+ fold_convert_loc (loc, ssizetype,
+ treeop1));
+ case PLUS_EXPR:
+-
+- /* Check if this is a case for multiplication and addition. */
+- if ((TREE_CODE (type) == INTEGER_TYPE
+- || TREE_CODE (type) == FIXED_POINT_TYPE)
+- && (subexp0_def = get_def_for_expr (treeop0,
+- MULT_EXPR)))
+- {
+- tree subsubexp0, subsubexp1;
+- gimple subsubexp0_def, subsubexp1_def;
+- enum tree_code this_code;
+-
+- this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
+- : FIXED_CONVERT_EXPR;
+- subsubexp0 = gimple_assign_rhs1 (subexp0_def);
+- subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
+- subsubexp1 = gimple_assign_rhs2 (subexp0_def);
+- subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
+- if (subsubexp0_def && subsubexp1_def
+- && (top0 = gimple_assign_rhs1 (subsubexp0_def))
+- && (top1 = gimple_assign_rhs1 (subsubexp1_def))
+- && (TYPE_PRECISION (TREE_TYPE (top0))
+- < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
+- && (TYPE_PRECISION (TREE_TYPE (top0))
+- == TYPE_PRECISION (TREE_TYPE (top1)))
+- && (TYPE_UNSIGNED (TREE_TYPE (top0))
+- == TYPE_UNSIGNED (TREE_TYPE (top1))))
+- {
+- tree op0type = TREE_TYPE (top0);
+- enum machine_mode innermode = TYPE_MODE (op0type);
+- bool zextend_p = TYPE_UNSIGNED (op0type);
+- bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
+- if (sat_p == 0)
+- this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
+- else
+- this_optab = zextend_p ? usmadd_widen_optab
+- : ssmadd_widen_optab;
+- if (mode == GET_MODE_2XWIDER_MODE (innermode)
+- && (optab_handler (this_optab, mode)->insn_code
+- != CODE_FOR_nothing))
+- {
+- expand_operands (top0, top1, NULL_RTX, &op0, &op1,
+- EXPAND_NORMAL);
+- op2 = expand_expr (treeop1, subtarget,
+- VOIDmode, EXPAND_NORMAL);
+- temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
+- target, unsignedp);
+- gcc_assert (temp);
+- return REDUCE_BIT_FIELD (temp);
+- }
+- }
+- }
+-
+ /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
+ something else, make sure we add the register to the constant and
+ then to the other thing. This case can occur during strength
+@@ -7590,57 +7537,6 @@
+ return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
+
+ case MINUS_EXPR:
+- /* Check if this is a case for multiplication and subtraction. */
+- if ((TREE_CODE (type) == INTEGER_TYPE
+- || TREE_CODE (type) == FIXED_POINT_TYPE)
+- && (subexp1_def = get_def_for_expr (treeop1,
+- MULT_EXPR)))
+- {
+- tree subsubexp0, subsubexp1;
+- gimple subsubexp0_def, subsubexp1_def;
+- enum tree_code this_code;
+-
+- this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
+- : FIXED_CONVERT_EXPR;
+- subsubexp0 = gimple_assign_rhs1 (subexp1_def);
+- subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
+- subsubexp1 = gimple_assign_rhs2 (subexp1_def);
+- subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
+- if (subsubexp0_def && subsubexp1_def
+- && (top0 = gimple_assign_rhs1 (subsubexp0_def))
+- && (top1 = gimple_assign_rhs1 (subsubexp1_def))
+- && (TYPE_PRECISION (TREE_TYPE (top0))
+- < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
+- && (TYPE_PRECISION (TREE_TYPE (top0))
+- == TYPE_PRECISION (TREE_TYPE (top1)))
+- && (TYPE_UNSIGNED (TREE_TYPE (top0))
+- == TYPE_UNSIGNED (TREE_TYPE (top1))))
+- {
+- tree op0type = TREE_TYPE (top0);
+- enum machine_mode innermode = TYPE_MODE (op0type);
+- bool zextend_p = TYPE_UNSIGNED (op0type);
+- bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
+- if (sat_p == 0)
+- this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
+- else
+- this_optab = zextend_p ? usmsub_widen_optab
+- : ssmsub_widen_optab;
+- if (mode == GET_MODE_2XWIDER_MODE (innermode)
+- && (optab_handler (this_optab, mode)->insn_code
+- != CODE_FOR_nothing))
+- {
+- expand_operands (top0, top1, NULL_RTX, &op0, &op1,
+- EXPAND_NORMAL);
+- op2 = expand_expr (treeop0, subtarget,
+- VOIDmode, EXPAND_NORMAL);
+- temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
+- target, unsignedp);
+- gcc_assert (temp);
+- return REDUCE_BIT_FIELD (temp);
+- }
+- }
+- }
+-
+ /* For initializers, we are allowed to return a MINUS of two
+ symbolic constants. Here we handle all cases when both operands
+ are constant. */
+@@ -7681,6 +7577,14 @@
+
+ goto binop2;
+
++ case WIDEN_MULT_PLUS_EXPR:
++ case WIDEN_MULT_MINUS_EXPR:
++ expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
++ op2 = expand_normal (ops->op2);
++ target = expand_widen_pattern_expr (ops, op0, op1, op2,
++ target, unsignedp);
++ return target;
++
+ case WIDEN_MULT_EXPR:
+ /* If first operand is constant, swap them.
+ Thus the following special case checks need only
+Index: gcc-4_5-branch/gcc/gimple-pretty-print.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/gimple-pretty-print.c 2011-07-22 16:58:48.000000000 -0700
++++ gcc-4_5-branch/gcc/gimple-pretty-print.c 2011-07-22 16:59:28.591747691 -0700
+@@ -376,6 +376,34 @@
+ }
+ }
+
++/* Helper for dump_gimple_assign. Print the ternary RHS of the
++ assignment GS. BUFFER, SPC and FLAGS are as in dump_gimple_stmt. */
++
++static void
++dump_ternary_rhs (pretty_printer *buffer, gimple gs, int spc, int flags)
++{
++ const char *p;
++ enum tree_code code = gimple_assign_rhs_code (gs);
++ switch (code)
++ {
++ case WIDEN_MULT_PLUS_EXPR:
++ case WIDEN_MULT_MINUS_EXPR:
++ for (p = tree_code_name [(int) code]; *p; p++)
++ pp_character (buffer, TOUPPER (*p));
++ pp_string (buffer, " <");
++ dump_generic_node (buffer, gimple_assign_rhs1 (gs), spc, flags, false);
++ pp_string (buffer, ", ");
++ dump_generic_node (buffer, gimple_assign_rhs2 (gs), spc, flags, false);
++ pp_string (buffer, ", ");
++ dump_generic_node (buffer, gimple_assign_rhs3 (gs), spc, flags, false);
++ pp_character (buffer, '>');
++ break;
++
++ default:
++ gcc_unreachable ();
++ }
++}
++
+
+ /* Dump the gimple assignment GS. BUFFER, SPC and FLAGS are as in
+ dump_gimple_stmt. */
+@@ -418,6 +446,8 @@
+ dump_unary_rhs (buffer, gs, spc, flags);
+ else if (gimple_num_ops (gs) == 3)
+ dump_binary_rhs (buffer, gs, spc, flags);
++ else if (gimple_num_ops (gs) == 4)
++ dump_ternary_rhs (buffer, gs, spc, flags);
+ else
+ gcc_unreachable ();
+ if (!(flags & TDF_RHS_ONLY))
+Index: gcc-4_5-branch/gcc/gimple.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/gimple.c 2011-07-22 16:59:25.000000000 -0700
++++ gcc-4_5-branch/gcc/gimple.c 2011-07-22 16:59:28.591747691 -0700
+@@ -289,31 +289,40 @@
+
+
+ /* Extract the operands and code for expression EXPR into *SUBCODE_P,
+- *OP1_P and *OP2_P respectively. */
++ *OP1_P, *OP2_P and *OP3_P respectively. */
+
+ void
+-extract_ops_from_tree (tree expr, enum tree_code *subcode_p, tree *op1_p,
+- tree *op2_p)
++extract_ops_from_tree_1 (tree expr, enum tree_code *subcode_p, tree *op1_p,
++ tree *op2_p, tree *op3_p)
+ {
+ enum gimple_rhs_class grhs_class;
+
+ *subcode_p = TREE_CODE (expr);
+ grhs_class = get_gimple_rhs_class (*subcode_p);
+
+- if (grhs_class == GIMPLE_BINARY_RHS)
++ if (grhs_class == GIMPLE_TERNARY_RHS)
+ {
+ *op1_p = TREE_OPERAND (expr, 0);
+ *op2_p = TREE_OPERAND (expr, 1);
++ *op3_p = TREE_OPERAND (expr, 2);
++ }
++ else if (grhs_class == GIMPLE_BINARY_RHS)
++ {
++ *op1_p = TREE_OPERAND (expr, 0);
++ *op2_p = TREE_OPERAND (expr, 1);
++ *op3_p = NULL_TREE;
+ }
+ else if (grhs_class == GIMPLE_UNARY_RHS)
+ {
+ *op1_p = TREE_OPERAND (expr, 0);
+ *op2_p = NULL_TREE;
++ *op3_p = NULL_TREE;
+ }
+ else if (grhs_class == GIMPLE_SINGLE_RHS)
+ {
+ *op1_p = expr;
+ *op2_p = NULL_TREE;
++ *op3_p = NULL_TREE;
+ }
+ else
+ gcc_unreachable ();
+@@ -329,10 +338,10 @@
+ gimple_build_assign_stat (tree lhs, tree rhs MEM_STAT_DECL)
+ {
+ enum tree_code subcode;
+- tree op1, op2;
++ tree op1, op2, op3;
+
+- extract_ops_from_tree (rhs, &subcode, &op1, &op2);
+- return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2
++ extract_ops_from_tree_1 (rhs, &subcode, &op1, &op2, &op3);
++ return gimple_build_assign_with_ops_stat (subcode, lhs, op1, op2, op3
+ PASS_MEM_STAT);
+ }
+
+@@ -343,7 +352,7 @@
+
+ gimple
+ gimple_build_assign_with_ops_stat (enum tree_code subcode, tree lhs, tree op1,
+- tree op2 MEM_STAT_DECL)
++ tree op2, tree op3 MEM_STAT_DECL)
+ {
+ unsigned num_ops;
+ gimple p;
+@@ -362,6 +371,12 @@
+ gimple_assign_set_rhs2 (p, op2);
+ }
+
++ if (op3)
++ {
++ gcc_assert (num_ops > 3);
++ gimple_assign_set_rhs3 (p, op3);
++ }
++
+ return p;
+ }
+
+@@ -1860,22 +1875,22 @@
+ gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *gsi, tree expr)
+ {
+ enum tree_code subcode;
+- tree op1, op2;
++ tree op1, op2, op3;
+
+- extract_ops_from_tree (expr, &subcode, &op1, &op2);
+- gimple_assign_set_rhs_with_ops (gsi, subcode, op1, op2);
++ extract_ops_from_tree_1 (expr, &subcode, &op1, &op2, &op3);
++ gimple_assign_set_rhs_with_ops_1 (gsi, subcode, op1, op2, op3);
+ }
+
+
+ /* Set the RHS of assignment statement pointed-to by GSI to CODE with
+- operands OP1 and OP2.
++ operands OP1, OP2 and OP3.
+
+ NOTE: The statement pointed-to by GSI may be reallocated if it
+ did not have enough operand slots. */
+
+ void
+-gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
+- tree op1, tree op2)
++gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *gsi, enum tree_code code,
++ tree op1, tree op2, tree op3)
+ {
+ unsigned new_rhs_ops = get_gimple_rhs_num_ops (code);
+ gimple stmt = gsi_stmt (*gsi);
+@@ -1899,6 +1914,8 @@
+ gimple_assign_set_rhs1 (stmt, op1);
+ if (new_rhs_ops > 1)
+ gimple_assign_set_rhs2 (stmt, op2);
++ if (new_rhs_ops > 2)
++ gimple_assign_set_rhs3 (stmt, op3);
+ }
+
+
+@@ -2378,6 +2395,8 @@
+ return 1;
+ else if (rhs_class == GIMPLE_BINARY_RHS)
+ return 2;
++ else if (rhs_class == GIMPLE_TERNARY_RHS)
++ return 3;
+ else
+ gcc_unreachable ();
+ }
+@@ -2394,6 +2413,8 @@
+ || (SYM) == TRUTH_OR_EXPR \
+ || (SYM) == TRUTH_XOR_EXPR) ? GIMPLE_BINARY_RHS \
+ : (SYM) == TRUTH_NOT_EXPR ? GIMPLE_UNARY_RHS \
++ : ((SYM) == WIDEN_MULT_PLUS_EXPR \
++ || (SYM) == WIDEN_MULT_MINUS_EXPR) ? GIMPLE_TERNARY_RHS \
+ : ((SYM) == COND_EXPR \
+ || (SYM) == CONSTRUCTOR \
+ || (SYM) == OBJ_TYPE_REF \
+Index: gcc-4_5-branch/gcc/gimple.h
+===================================================================
+--- gcc-4_5-branch.orig/gcc/gimple.h 2011-07-22 16:59:12.000000000 -0700
++++ gcc-4_5-branch/gcc/gimple.h 2011-07-22 16:59:28.591747691 -0700
+@@ -80,6 +80,7 @@
+ enum gimple_rhs_class
+ {
+ GIMPLE_INVALID_RHS, /* The expression cannot be used on the RHS. */
++ GIMPLE_TERNARY_RHS, /* The expression is a ternary operation. */
+ GIMPLE_BINARY_RHS, /* The expression is a binary operation. */
+ GIMPLE_UNARY_RHS, /* The expression is a unary operation. */
+ GIMPLE_SINGLE_RHS /* The expression is a single object (an SSA
+@@ -786,12 +787,14 @@
+ gimple gimple_build_assign_stat (tree, tree MEM_STAT_DECL);
+ #define gimple_build_assign(l,r) gimple_build_assign_stat (l, r MEM_STAT_INFO)
+
+-void extract_ops_from_tree (tree, enum tree_code *, tree *, tree *);
++void extract_ops_from_tree_1 (tree, enum tree_code *, tree *, tree *, tree *);
+
+ gimple gimple_build_assign_with_ops_stat (enum tree_code, tree, tree,
+- tree MEM_STAT_DECL);
+-#define gimple_build_assign_with_ops(c,o1,o2,o3) \
+- gimple_build_assign_with_ops_stat (c, o1, o2, o3 MEM_STAT_INFO)
++ tree, tree MEM_STAT_DECL);
++#define gimple_build_assign_with_ops(c,o1,o2,o3) \
++ gimple_build_assign_with_ops_stat (c, o1, o2, o3, NULL_TREE MEM_STAT_INFO)
++#define gimple_build_assign_with_ops3(c,o1,o2,o3,o4) \
++ gimple_build_assign_with_ops_stat (c, o1, o2, o3, o4 MEM_STAT_INFO)
+
+ gimple gimple_build_debug_bind_stat (tree, tree, gimple MEM_STAT_DECL);
+ #define gimple_build_debug_bind(var,val,stmt) \
+@@ -850,8 +853,8 @@
+ bool gimple_assign_unary_nop_p (gimple);
+ void gimple_set_bb (gimple, struct basic_block_def *);
+ void gimple_assign_set_rhs_from_tree (gimple_stmt_iterator *, tree);
+-void gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *, enum tree_code,
+- tree, tree);
++void gimple_assign_set_rhs_with_ops_1 (gimple_stmt_iterator *, enum tree_code,
++ tree, tree, tree);
+ tree gimple_get_lhs (const_gimple);
+ void gimple_set_lhs (gimple, tree);
+ void gimple_replace_lhs (gimple, tree);
+@@ -1793,6 +1796,63 @@
+ gimple_set_op (gs, 2, rhs);
+ }
+
++/* Return the third operand on the RHS of assignment statement GS.
++ If GS does not have two operands, NULL is returned instead. */
++
++static inline tree
++gimple_assign_rhs3 (const_gimple gs)
++{
++ GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
++
++ if (gimple_num_ops (gs) >= 4)
++ return gimple_op (gs, 3);
++ else
++ return NULL_TREE;
++}
++
++/* Return a pointer to the third operand on the RHS of assignment
++ statement GS. */
++
++static inline tree *
++gimple_assign_rhs3_ptr (const_gimple gs)
++{
++ GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
++ return gimple_op_ptr (gs, 3);
++}
++
++
++/* Set RHS to be the third operand on the RHS of assignment statement GS. */
++
++static inline void
++gimple_assign_set_rhs3 (gimple gs, tree rhs)
++{
++ GIMPLE_CHECK (gs, GIMPLE_ASSIGN);
++
++ gimple_set_op (gs, 3, rhs);
++}
++
++/* A wrapper around gimple_assign_set_rhs_with_ops_1, for callers which expect
++ to see only a maximum of two operands. */
++
++static inline void
++gimple_assign_set_rhs_with_ops (gimple_stmt_iterator *gsi, enum tree_code code,
++ tree op1, tree op2)
++{
++ gimple_assign_set_rhs_with_ops_1 (gsi, code, op1, op2, NULL);
++}
++
++/* A wrapper around extract_ops_from_tree_1, for callers which expect
++ to see only a maximum of two operands. */
++
++static inline void
++extract_ops_from_tree (tree expr, enum tree_code *code, tree *op0,
++ tree *op1)
++{
++ tree op2;
++ extract_ops_from_tree_1 (expr, code, op0, op1, &op2);
++ gcc_assert (op2 == NULL_TREE);
++}
++
+ /* Returns true if GS is a nontemporal move. */
+
+ static inline bool
+Index: gcc-4_5-branch/gcc/optabs.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/optabs.c 2011-07-22 16:58:48.000000000 -0700
++++ gcc-4_5-branch/gcc/optabs.c 2011-07-22 16:59:28.601747691 -0700
+@@ -408,6 +408,20 @@
+ case DOT_PROD_EXPR:
+ return TYPE_UNSIGNED (type) ? udot_prod_optab : sdot_prod_optab;
+
++ case WIDEN_MULT_PLUS_EXPR:
++ return (TYPE_UNSIGNED (type)
++ ? (TYPE_SATURATING (type)
++ ? usmadd_widen_optab : umadd_widen_optab)
++ : (TYPE_SATURATING (type)
++ ? ssmadd_widen_optab : smadd_widen_optab));
++
++ case WIDEN_MULT_MINUS_EXPR:
++ return (TYPE_UNSIGNED (type)
++ ? (TYPE_SATURATING (type)
++ ? usmsub_widen_optab : umsub_widen_optab)
++ : (TYPE_SATURATING (type)
++ ? ssmsub_widen_optab : smsub_widen_optab));
++
+ case REDUC_MAX_EXPR:
+ return TYPE_UNSIGNED (type) ? reduc_umax_optab : reduc_smax_optab;
+
+@@ -547,7 +561,12 @@
+ tmode0 = TYPE_MODE (TREE_TYPE (oprnd0));
+ widen_pattern_optab =
+ optab_for_tree_code (ops->code, TREE_TYPE (oprnd0), optab_default);
+- icode = (int) optab_handler (widen_pattern_optab, tmode0)->insn_code;
++ if (ops->code == WIDEN_MULT_PLUS_EXPR
++ || ops->code == WIDEN_MULT_MINUS_EXPR)
++ icode = (int) optab_handler (widen_pattern_optab,
++ TYPE_MODE (TREE_TYPE (ops->op2)))->insn_code;
++ else
++ icode = (int) optab_handler (widen_pattern_optab, tmode0)->insn_code;
+ gcc_assert (icode != CODE_FOR_nothing);
+ xmode0 = insn_data[icode].operand[1].mode;
+
+Index: gcc-4_5-branch/gcc/testsuite/gcc.target/arm/wmul-1.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/testsuite/gcc.target/arm/wmul-1.c 2011-07-22 16:59:24.000000000 -0700
++++ gcc-4_5-branch/gcc/testsuite/gcc.target/arm/wmul-1.c 2011-07-22 16:59:28.601747691 -0700
+@@ -15,4 +15,4 @@
+ return sqr;
+ }
+
+-/* { dg-final { scan-assembler-times "smulbb" 2 } } */
++/* { dg-final { scan-assembler-times "smlabb" 2 } } */
+Index: gcc-4_5-branch/gcc/tree-cfg.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/tree-cfg.c 2011-07-22 16:59:24.000000000 -0700
++++ gcc-4_5-branch/gcc/tree-cfg.c 2011-07-22 16:59:28.601747691 -0700
+@@ -3484,6 +3484,65 @@
+ return false;
+ }
+
++/* Verify a gimple assignment statement STMT with a ternary rhs.
++ Returns true if anything is wrong. */
++
++static bool
++verify_gimple_assign_ternary (gimple stmt)
++{
++ enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
++ tree lhs = gimple_assign_lhs (stmt);
++ tree lhs_type = TREE_TYPE (lhs);
++ tree rhs1 = gimple_assign_rhs1 (stmt);
++ tree rhs1_type = TREE_TYPE (rhs1);
++ tree rhs2 = gimple_assign_rhs2 (stmt);
++ tree rhs2_type = TREE_TYPE (rhs2);
++ tree rhs3 = gimple_assign_rhs3 (stmt);
++ tree rhs3_type = TREE_TYPE (rhs3);
++
++ if (!is_gimple_reg (lhs)
++ && !(optimize == 0
++ && TREE_CODE (lhs_type) == COMPLEX_TYPE))
++ {
++ error ("non-register as LHS of ternary operation");
++ return true;
++ }
++
++ if (!is_gimple_val (rhs1)
++ || !is_gimple_val (rhs2)
++ || !is_gimple_val (rhs3))
++ {
++ error ("invalid operands in ternary operation");
++ return true;
++ }
++
++ /* First handle operations that involve different types. */
++ switch (rhs_code)
++ {
++ case WIDEN_MULT_PLUS_EXPR:
++ case WIDEN_MULT_MINUS_EXPR:
++ if ((!INTEGRAL_TYPE_P (rhs1_type)
++ && !FIXED_POINT_TYPE_P (rhs1_type))
++ || !useless_type_conversion_p (rhs1_type, rhs2_type)
++ || !useless_type_conversion_p (lhs_type, rhs3_type)
++ || 2 * TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (lhs_type)
++ || TYPE_PRECISION (rhs1_type) != TYPE_PRECISION (rhs2_type))
++ {
++ error ("type mismatch in widening multiply-accumulate expression");
++ debug_generic_expr (lhs_type);
++ debug_generic_expr (rhs1_type);
++ debug_generic_expr (rhs2_type);
++ debug_generic_expr (rhs3_type);
++ return true;
++ }
++ break;
++
++ default:
++ gcc_unreachable ();
++ }
++ return false;
++}
++
+ /* Verify a gimple assignment statement STMT with a single rhs.
+ Returns true if anything is wrong. */
+
+@@ -3616,6 +3675,9 @@
+ case GIMPLE_BINARY_RHS:
+ return verify_gimple_assign_binary (stmt);
+
++ case GIMPLE_TERNARY_RHS:
++ return verify_gimple_assign_ternary (stmt);
++
+ default:
+ gcc_unreachable ();
+ }
+Index: gcc-4_5-branch/gcc/tree-inline.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/tree-inline.c 2011-07-22 16:59:24.000000000 -0700
++++ gcc-4_5-branch/gcc/tree-inline.c 2011-07-22 16:59:28.601747691 -0700
+@@ -3207,6 +3207,8 @@
+ case WIDEN_SUM_EXPR:
+ case WIDEN_MULT_EXPR:
+ case DOT_PROD_EXPR:
++ case WIDEN_MULT_PLUS_EXPR:
++ case WIDEN_MULT_MINUS_EXPR:
+
+ case VEC_WIDEN_MULT_HI_EXPR:
+ case VEC_WIDEN_MULT_LO_EXPR:
+Index: gcc-4_5-branch/gcc/tree-pretty-print.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/tree-pretty-print.c 2011-07-22 16:58:48.000000000 -0700
++++ gcc-4_5-branch/gcc/tree-pretty-print.c 2011-07-22 16:59:28.611747691 -0700
+@@ -1939,6 +1939,26 @@
+ pp_string (buffer, " > ");
+ break;
+
++ case WIDEN_MULT_PLUS_EXPR:
++ pp_string (buffer, " WIDEN_MULT_PLUS_EXPR < ");
++ dump_generic_node (buffer, TREE_OPERAND (node, 0), spc, flags, false);
++ pp_string (buffer, ", ");
++ dump_generic_node (buffer, TREE_OPERAND (node, 1), spc, flags, false);
++ pp_string (buffer, ", ");
++ dump_generic_node (buffer, TREE_OPERAND (node, 2), spc, flags, false);
++ pp_string (buffer, " > ");
++ break;
++
++ case WIDEN_MULT_MINUS_EXPR:
++ pp_string (buffer, " WIDEN_MULT_MINUS_EXPR < ");
++ dump_generic_node (buffer, TREE_OPERAND (node, 0), spc, flags, false);
++ pp_string (buffer, ", ");
++ dump_generic_node (buffer, TREE_OPERAND (node, 1), spc, flags, false);
++ pp_string (buffer, ", ");
++ dump_generic_node (buffer, TREE_OPERAND (node, 2), spc, flags, false);
++ pp_string (buffer, " > ");
++ break;
++
+ case OMP_PARALLEL:
+ pp_string (buffer, "#pragma omp parallel");
+ dump_omp_clauses (buffer, OMP_PARALLEL_CLAUSES (node), spc, flags);
+@@ -2432,6 +2452,8 @@
+ case VEC_WIDEN_MULT_LO_EXPR:
+ case WIDEN_MULT_EXPR:
+ case DOT_PROD_EXPR:
++ case WIDEN_MULT_PLUS_EXPR:
++ case WIDEN_MULT_MINUS_EXPR:
+ case MULT_EXPR:
+ case TRUNC_DIV_EXPR:
+ case CEIL_DIV_EXPR:
+Index: gcc-4_5-branch/gcc/tree-ssa-ccp.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/tree-ssa-ccp.c 2011-07-22 16:59:12.000000000 -0700
++++ gcc-4_5-branch/gcc/tree-ssa-ccp.c 2011-07-22 16:59:28.611747691 -0700
+@@ -915,6 +915,23 @@
+ TREE_TYPE (TREE_OPERAND (addr, 0))));
+ }
+
++/* Get operand number OPNR from the rhs of STMT. Before returning it,
++ simplify it to a constant if possible. */
++
++static tree
++get_rhs_assign_op_for_ccp (gimple stmt, int opnr)
++{
++ tree op = gimple_op (stmt, opnr);
++
++ if (TREE_CODE (op) == SSA_NAME)
++ {
++ prop_value_t *val = get_value (op);
++ if (val->lattice_val == CONSTANT)
++ op = get_value (op)->value;
++ }
++ return op;
++}
++
+ /* CCP specific front-end to the non-destructive constant folding
+ routines.
+
+@@ -1037,15 +1054,7 @@
+ Note that we know the single operand must be a constant,
+ so this should almost always return a simplified RHS. */
+ tree lhs = gimple_assign_lhs (stmt);
+- tree op0 = gimple_assign_rhs1 (stmt);
+-
+- /* Simplify the operand down to a constant. */
+- if (TREE_CODE (op0) == SSA_NAME)
+- {
+- prop_value_t *val = get_value (op0);
+- if (val->lattice_val == CONSTANT)
+- op0 = get_value (op0)->value;
+- }
++ tree op0 = get_rhs_assign_op_for_ccp (stmt, 1);
+
+ /* Conversions are useless for CCP purposes if they are
+ value-preserving. Thus the restrictions that
+@@ -1082,23 +1091,8 @@
+ case GIMPLE_BINARY_RHS:
+ {
+ /* Handle binary operators that can appear in GIMPLE form. */
+- tree op0 = gimple_assign_rhs1 (stmt);
+- tree op1 = gimple_assign_rhs2 (stmt);
+-
+- /* Simplify the operands down to constants when appropriate. */
+- if (TREE_CODE (op0) == SSA_NAME)
+- {
+- prop_value_t *val = get_value (op0);
+- if (val->lattice_val == CONSTANT)
+- op0 = val->value;
+- }
+-
+- if (TREE_CODE (op1) == SSA_NAME)
+- {
+- prop_value_t *val = get_value (op1);
+- if (val->lattice_val == CONSTANT)
+- op1 = val->value;
+- }
++ tree op0 = get_rhs_assign_op_for_ccp (stmt, 1);
++ tree op1 = get_rhs_assign_op_for_ccp (stmt, 2);
+
+ /* Fold &foo + CST into an invariant reference if possible. */
+ if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
+@@ -1115,6 +1109,17 @@
+ gimple_expr_type (stmt), op0, op1);
+ }
+
++ case GIMPLE_TERNARY_RHS:
++ {
++ /* Handle binary operators that can appear in GIMPLE form. */
++ tree op0 = get_rhs_assign_op_for_ccp (stmt, 1);
++ tree op1 = get_rhs_assign_op_for_ccp (stmt, 2);
++ tree op2 = get_rhs_assign_op_for_ccp (stmt, 3);
++
++ return fold_ternary_loc (loc, subcode,
++ gimple_expr_type (stmt), op0, op1, op2);
++ }
++
+ default:
+ gcc_unreachable ();
+ }
+@@ -2959,6 +2964,33 @@
+ }
+ break;
+
++ case GIMPLE_TERNARY_RHS:
++ result = fold_ternary_loc (loc, subcode,
++ TREE_TYPE (gimple_assign_lhs (stmt)),
++ gimple_assign_rhs1 (stmt),
++ gimple_assign_rhs2 (stmt),
++ gimple_assign_rhs3 (stmt));
++
++ if (result)
++ {
++ STRIP_USELESS_TYPE_CONVERSION (result);
++ if (valid_gimple_rhs_p (result))
++ return result;
++
++ /* Fold might have produced non-GIMPLE, so if we trust it blindly
++ we lose canonicalization opportunities. Do not go again
++ through fold here though, or the same non-GIMPLE will be
++ produced. */
++ if (commutative_ternary_tree_code (subcode)
++ && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
++ gimple_assign_rhs2 (stmt), false))
++ return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
++ gimple_assign_rhs2 (stmt),
++ gimple_assign_rhs1 (stmt),
++ gimple_assign_rhs3 (stmt));
++ }
++ break;
++
+ case GIMPLE_INVALID_RHS:
+ gcc_unreachable ();
+ }
+Index: gcc-4_5-branch/gcc/tree-ssa-dom.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/tree-ssa-dom.c 2011-07-22 16:58:48.000000000 -0700
++++ gcc-4_5-branch/gcc/tree-ssa-dom.c 2011-07-22 17:23:51.501747355 -0700
+@@ -54,6 +54,7 @@
+ EXPR_SINGLE,
+ EXPR_UNARY,
+ EXPR_BINARY,
++ EXPR_TERNARY,
+ EXPR_CALL
+ };
+
+@@ -64,7 +65,8 @@
+ union {
+ struct { tree rhs; } single;
+ struct { enum tree_code op; tree opnd; } unary;
+- struct { enum tree_code op; tree opnd0; tree opnd1; } binary;
++ struct { enum tree_code op; tree opnd0, opnd1; } binary;
++ struct { enum tree_code op; tree opnd0, opnd1, opnd2; } ternary;
+ struct { tree fn; bool pure; size_t nargs; tree *args; } call;
+ } ops;
+ };
+@@ -229,6 +231,14 @@
+ expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
+ expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
+ break;
++ case GIMPLE_TERNARY_RHS:
++ expr->kind = EXPR_TERNARY;
++ expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
++ expr->ops.ternary.op = subcode;
++ expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
++ expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
++ expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
++ break;
+ default:
+ gcc_unreachable ();
+ }
+@@ -373,23 +383,40 @@
+ expr1->ops.unary.opnd, 0);
+
+ case EXPR_BINARY:
+- {
+- if (expr0->ops.binary.op != expr1->ops.binary.op)
+- return false;
++ if (expr0->ops.binary.op != expr1->ops.binary.op)
++ return false;
+
+- if (operand_equal_p (expr0->ops.binary.opnd0,
+- expr1->ops.binary.opnd0, 0)
+- && operand_equal_p (expr0->ops.binary.opnd1,
+- expr1->ops.binary.opnd1, 0))
+- return true;
+-
+- /* For commutative ops, allow the other order. */
+- return (commutative_tree_code (expr0->ops.binary.op)
+- && operand_equal_p (expr0->ops.binary.opnd0,
+- expr1->ops.binary.opnd1, 0)
+- && operand_equal_p (expr0->ops.binary.opnd1,
+- expr1->ops.binary.opnd0, 0));
+- }
++ if (operand_equal_p (expr0->ops.binary.opnd0,
++ expr1->ops.binary.opnd0, 0)
++ && operand_equal_p (expr0->ops.binary.opnd1,
++ expr1->ops.binary.opnd1, 0))
++ return true;
++
++ /* For commutative ops, allow the other order. */
++ return (commutative_tree_code (expr0->ops.binary.op)
++ && operand_equal_p (expr0->ops.binary.opnd0,
++ expr1->ops.binary.opnd1, 0)
++ && operand_equal_p (expr0->ops.binary.opnd1,
++ expr1->ops.binary.opnd0, 0));
++
++ case EXPR_TERNARY:
++ if (expr0->ops.ternary.op != expr1->ops.ternary.op
++ || !operand_equal_p (expr0->ops.ternary.opnd2,
++ expr1->ops.ternary.opnd2, 0))
++ return false;
++
++ if (operand_equal_p (expr0->ops.ternary.opnd0,
++ expr1->ops.ternary.opnd0, 0)
++ && operand_equal_p (expr0->ops.ternary.opnd1,
++ expr1->ops.ternary.opnd1, 0))
++ return true;
++
++ /* For commutative ops, allow the other order. */
++ return (commutative_ternary_tree_code (expr0->ops.ternary.op)
++ && operand_equal_p (expr0->ops.ternary.opnd0,
++ expr1->ops.ternary.opnd1, 0)
++ && operand_equal_p (expr0->ops.ternary.opnd1,
++ expr1->ops.ternary.opnd0, 0));
+
+ case EXPR_CALL:
+ {
+@@ -452,8 +479,8 @@
+ case EXPR_BINARY:
+ val = iterative_hash_object (expr->ops.binary.op, val);
+ if (commutative_tree_code (expr->ops.binary.op))
+- val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
+- expr->ops.binary.opnd1, val);
++ val = iterative_hash_exprs_commutative (expr->ops.binary.opnd0,
++ expr->ops.binary.opnd1, val);
+ else
+ {
+ val = iterative_hash_expr (expr->ops.binary.opnd0, val);
+@@ -461,6 +488,19 @@
+ }
+ break;
+
++ case EXPR_TERNARY:
++ val = iterative_hash_object (expr->ops.ternary.op, val);
++ if (commutative_ternary_tree_code (expr->ops.ternary.op))
++ val = iterative_hash_exprs_commutative (expr->ops.ternary.opnd0,
++ expr->ops.ternary.opnd1, val);
++ else
++ {
++ val = iterative_hash_expr (expr->ops.ternary.opnd0, val);
++ val = iterative_hash_expr (expr->ops.ternary.opnd1, val);
++ }
++ val = iterative_hash_expr (expr->ops.ternary.opnd2, val);
++ break;
++
+ case EXPR_CALL:
+ {
+ size_t i;
+@@ -513,6 +553,16 @@
+ print_generic_expr (stream, element->expr.ops.binary.opnd1, 0);
+ break;
+
++ case EXPR_TERNARY:
++ fprintf (stream, " %s <", tree_code_name[element->expr.ops.ternary.op]);
++ print_generic_expr (stream, element->expr.ops.ternary.opnd0, 0);
++ fputs (", ", stream);
++ print_generic_expr (stream, element->expr.ops.ternary.opnd1, 0);
++ fputs (", ", stream);
++ print_generic_expr (stream, element->expr.ops.ternary.opnd2, 0);
++ fputs (">", stream);
++ break;
++
+ case EXPR_CALL:
+ {
+ size_t i;
+Index: gcc-4_5-branch/gcc/tree-ssa-math-opts.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/tree-ssa-math-opts.c 2011-07-22 16:59:24.000000000 -0700
++++ gcc-4_5-branch/gcc/tree-ssa-math-opts.c 2011-07-22 16:59:28.611747691 -0700
+@@ -1270,6 +1270,235 @@
+ }
+ };
+
++/* Return true if RHS is a suitable operand for a widening multiplication.
++ There are two cases:
++
++ - RHS makes some value twice as wide. Store that value in *NEW_RHS_OUT
++ if so, and store its type in *TYPE_OUT.
++
++ - RHS is an integer constant. Store that value in *NEW_RHS_OUT if so,
++ but leave *TYPE_OUT untouched. */
++
++static bool
++is_widening_mult_rhs_p (tree rhs, tree *type_out, tree *new_rhs_out)
++{
++ gimple stmt;
++ tree type, type1, rhs1;
++ enum tree_code rhs_code;
++
++ if (TREE_CODE (rhs) == SSA_NAME)
++ {
++ type = TREE_TYPE (rhs);
++ stmt = SSA_NAME_DEF_STMT (rhs);
++ if (!is_gimple_assign (stmt))
++ return false;
++
++ rhs_code = gimple_assign_rhs_code (stmt);
++ if (TREE_CODE (type) == INTEGER_TYPE
++ ? !CONVERT_EXPR_CODE_P (rhs_code)
++ : rhs_code != FIXED_CONVERT_EXPR)
++ return false;
++
++ rhs1 = gimple_assign_rhs1 (stmt);
++ type1 = TREE_TYPE (rhs1);
++ if (TREE_CODE (type1) != TREE_CODE (type)
++ || TYPE_PRECISION (type1) * 2 != TYPE_PRECISION (type))
++ return false;
++
++ *new_rhs_out = rhs1;
++ *type_out = type1;
++ return true;
++ }
++
++ if (TREE_CODE (rhs) == INTEGER_CST)
++ {
++ *new_rhs_out = rhs;
++ *type_out = NULL;
++ return true;
++ }
++
++ return false;
++}
++
++/* Return true if STMT performs a widening multiplication. If so,
++ store the unwidened types of the operands in *TYPE1_OUT and *TYPE2_OUT
++ respectively. Also fill *RHS1_OUT and *RHS2_OUT such that converting
++ those operands to types *TYPE1_OUT and *TYPE2_OUT would give the
++ operands of the multiplication. */
++
++static bool
++is_widening_mult_p (gimple stmt,
++ tree *type1_out, tree *rhs1_out,
++ tree *type2_out, tree *rhs2_out)
++{
++ tree type;
++
++ type = TREE_TYPE (gimple_assign_lhs (stmt));
++ if (TREE_CODE (type) != INTEGER_TYPE
++ && TREE_CODE (type) != FIXED_POINT_TYPE)
++ return false;
++
++ if (!is_widening_mult_rhs_p (gimple_assign_rhs1 (stmt), type1_out, rhs1_out))
++ return false;
++
++ if (!is_widening_mult_rhs_p (gimple_assign_rhs2 (stmt), type2_out, rhs2_out))
++ return false;
++
++ if (*type1_out == NULL)
++ {
++ if (*type2_out == NULL || !int_fits_type_p (*rhs1_out, *type2_out))
++ return false;
++ *type1_out = *type2_out;
++ }
++
++ if (*type2_out == NULL)
++ {
++ if (!int_fits_type_p (*rhs2_out, *type1_out))
++ return false;
++ *type2_out = *type1_out;
++ }
++
++ return true;
++}
++
++/* Process a single gimple statement STMT, which has a MULT_EXPR as
++ its rhs, and try to convert it into a WIDEN_MULT_EXPR. The return
++ value is true iff we converted the statement. */
++
++static bool
++convert_mult_to_widen (gimple stmt)
++{
++ tree lhs, rhs1, rhs2, type, type1, type2;
++ enum insn_code handler;
++
++ lhs = gimple_assign_lhs (stmt);
++ type = TREE_TYPE (lhs);
++ if (TREE_CODE (type) != INTEGER_TYPE)
++ return false;
++
++ if (!is_widening_mult_p (stmt, &type1, &rhs1, &type2, &rhs2))
++ return false;
++
++ if (TYPE_UNSIGNED (type1) && TYPE_UNSIGNED (type2))
++ handler = optab_handler (umul_widen_optab, TYPE_MODE (type))->insn_code;
++ else if (!TYPE_UNSIGNED (type1) && !TYPE_UNSIGNED (type2))
++ handler = optab_handler (smul_widen_optab, TYPE_MODE (type))->insn_code;
++ else
++ handler = optab_handler (usmul_widen_optab, TYPE_MODE (type))->insn_code;
++
++ if (handler == CODE_FOR_nothing)
++ return false;
++
++ gimple_assign_set_rhs1 (stmt, fold_convert (type1, rhs1));
++ gimple_assign_set_rhs2 (stmt, fold_convert (type2, rhs2));
++ gimple_assign_set_rhs_code (stmt, WIDEN_MULT_EXPR);
++ update_stmt (stmt);
++ return true;
++}
++
++/* Process a single gimple statement STMT, which is found at the
++ iterator GSI and has a either a PLUS_EXPR or a MINUS_EXPR as its
++ rhs (given by CODE), and try to convert it into a
++ WIDEN_MULT_PLUS_EXPR or a WIDEN_MULT_MINUS_EXPR. The return value
++ is true iff we converted the statement. */
++
++static bool
++convert_plusminus_to_widen (gimple_stmt_iterator *gsi, gimple stmt,
++ enum tree_code code)
++{
++ gimple rhs1_stmt = NULL, rhs2_stmt = NULL;
++ tree type, type1, type2;
++ tree lhs, rhs1, rhs2, mult_rhs1, mult_rhs2, add_rhs;
++ enum tree_code rhs1_code = ERROR_MARK, rhs2_code = ERROR_MARK;
++ optab this_optab;
++ enum tree_code wmult_code;
++
++ lhs = gimple_assign_lhs (stmt);
++ type = TREE_TYPE (lhs);
++ if (TREE_CODE (type) != INTEGER_TYPE
++ && TREE_CODE (type) != FIXED_POINT_TYPE)
++ return false;
++
++ if (code == MINUS_EXPR)
++ wmult_code = WIDEN_MULT_MINUS_EXPR;
++ else
++ wmult_code = WIDEN_MULT_PLUS_EXPR;
++
++ rhs1 = gimple_assign_rhs1 (stmt);
++ rhs2 = gimple_assign_rhs2 (stmt);
++
++ if (TREE_CODE (rhs1) == SSA_NAME)
++ {
++ rhs1_stmt = SSA_NAME_DEF_STMT (rhs1);
++ if (is_gimple_assign (rhs1_stmt))
++ rhs1_code = gimple_assign_rhs_code (rhs1_stmt);
++ }
++ else
++ return false;
++
++ if (TREE_CODE (rhs2) == SSA_NAME)
++ {
++ rhs2_stmt = SSA_NAME_DEF_STMT (rhs2);
++ if (is_gimple_assign (rhs2_stmt))
++ rhs2_code = gimple_assign_rhs_code (rhs2_stmt);
++ }
++ else
++ return false;
++
++ if (code == PLUS_EXPR && rhs1_code == MULT_EXPR)
++ {
++ if (!is_widening_mult_p (rhs1_stmt, &type1, &mult_rhs1,
++ &type2, &mult_rhs2))
++ return false;
++ add_rhs = rhs2;
++ }
++ else if (rhs2_code == MULT_EXPR)
++ {
++ if (!is_widening_mult_p (rhs2_stmt, &type1, &mult_rhs1,
++ &type2, &mult_rhs2))
++ return false;
++ add_rhs = rhs1;
++ }
++ else if (code == PLUS_EXPR && rhs1_code == WIDEN_MULT_EXPR)
++ {
++ mult_rhs1 = gimple_assign_rhs1 (rhs1_stmt);
++ mult_rhs2 = gimple_assign_rhs2 (rhs1_stmt);
++ type1 = TREE_TYPE (mult_rhs1);
++ type2 = TREE_TYPE (mult_rhs2);
++ add_rhs = rhs2;
++ }
++ else if (rhs2_code == WIDEN_MULT_EXPR)
++ {
++ mult_rhs1 = gimple_assign_rhs1 (rhs2_stmt);
++ mult_rhs2 = gimple_assign_rhs2 (rhs2_stmt);
++ type1 = TREE_TYPE (mult_rhs1);
++ type2 = TREE_TYPE (mult_rhs2);
++ add_rhs = rhs1;
++ }
++ else
++ return false;
++
++ if (TYPE_UNSIGNED (type1) != TYPE_UNSIGNED (type2))
++ return false;
++
++ /* Verify that the machine can perform a widening multiply
++ accumulate in this mode/signedness combination, otherwise
++ this transformation is likely to pessimize code. */
++ this_optab = optab_for_tree_code (wmult_code, type1, optab_default);
++ if (optab_handler (this_optab, TYPE_MODE (type))->insn_code
++ == CODE_FOR_nothing)
++ return false;
++
++ /* ??? May need some type verification here? */
++
++ gimple_assign_set_rhs_with_ops_1 (gsi, wmult_code,
++ fold_convert (type1, mult_rhs1),
++ fold_convert (type2, mult_rhs2),
++ add_rhs);
++ update_stmt (gsi_stmt (*gsi));
++ return true;
++}
++
+ /* Find integer multiplications where the operands are extended from
+ smaller types, and replace the MULT_EXPR with a WIDEN_MULT_EXPR
+ where appropriate. */
+@@ -1287,94 +1516,19 @@
+ for (gsi = gsi_after_labels (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+ {
+ gimple stmt = gsi_stmt (gsi);
+- gimple rhs1_stmt = NULL, rhs2_stmt = NULL;
+- tree type, type1 = NULL, type2 = NULL;
+- tree rhs1, rhs2, rhs1_convop = NULL, rhs2_convop = NULL;
+- enum tree_code rhs1_code, rhs2_code;
++ enum tree_code code;
+
+- if (!is_gimple_assign (stmt)
+- || gimple_assign_rhs_code (stmt) != MULT_EXPR)
++ if (!is_gimple_assign (stmt))
+ continue;
+
+- type = TREE_TYPE (gimple_assign_lhs (stmt));
+-
+- if (TREE_CODE (type) != INTEGER_TYPE)
+- continue;
+-
+- rhs1 = gimple_assign_rhs1 (stmt);
+- rhs2 = gimple_assign_rhs2 (stmt);
+-
+- if (TREE_CODE (rhs1) == SSA_NAME)
+- {
+- rhs1_stmt = SSA_NAME_DEF_STMT (rhs1);
+- if (!is_gimple_assign (rhs1_stmt))
+- continue;
+- rhs1_code = gimple_assign_rhs_code (rhs1_stmt);
+- if (!CONVERT_EXPR_CODE_P (rhs1_code))
+- continue;
+- rhs1_convop = gimple_assign_rhs1 (rhs1_stmt);
+- type1 = TREE_TYPE (rhs1_convop);
+- if (TYPE_PRECISION (type1) * 2 != TYPE_PRECISION (type))
+- continue;
+- }
+- else if (TREE_CODE (rhs1) != INTEGER_CST)
+- continue;
+-
+- if (TREE_CODE (rhs2) == SSA_NAME)
+- {
+- rhs2_stmt = SSA_NAME_DEF_STMT (rhs2);
+- if (!is_gimple_assign (rhs2_stmt))
+- continue;
+- rhs2_code = gimple_assign_rhs_code (rhs2_stmt);
+- if (!CONVERT_EXPR_CODE_P (rhs2_code))
+- continue;
+- rhs2_convop = gimple_assign_rhs1 (rhs2_stmt);
+- type2 = TREE_TYPE (rhs2_convop);
+- if (TYPE_PRECISION (type2) * 2 != TYPE_PRECISION (type))
+- continue;
+- }
+- else if (TREE_CODE (rhs2) != INTEGER_CST)
+- continue;
+-
+- if (rhs1_stmt == NULL && rhs2_stmt == NULL)
+- continue;
+-
+- /* Verify that the machine can perform a widening multiply in this
+- mode/signedness combination, otherwise this transformation is
+- likely to pessimize code. */
+- if ((rhs1_stmt == NULL || TYPE_UNSIGNED (type1))
+- && (rhs2_stmt == NULL || TYPE_UNSIGNED (type2))
+- && (optab_handler (umul_widen_optab, TYPE_MODE (type))
+- ->insn_code == CODE_FOR_nothing))
+- continue;
+- else if ((rhs1_stmt == NULL || !TYPE_UNSIGNED (type1))
+- && (rhs2_stmt == NULL || !TYPE_UNSIGNED (type2))
+- && (optab_handler (smul_widen_optab, TYPE_MODE (type))
+- ->insn_code == CODE_FOR_nothing))
+- continue;
+- else if (rhs1_stmt != NULL && rhs2_stmt != 0
+- && (TYPE_UNSIGNED (type1) != TYPE_UNSIGNED (type2))
+- && (optab_handler (usmul_widen_optab, TYPE_MODE (type))
+- ->insn_code == CODE_FOR_nothing))
+- continue;
+-
+- if ((rhs1_stmt == NULL && !int_fits_type_p (rhs1, type2))
+- || (rhs2_stmt == NULL && !int_fits_type_p (rhs2, type1)))
+- continue;
+-
+- if (rhs1_stmt == NULL)
+- gimple_assign_set_rhs1 (stmt, fold_convert (type2, rhs1));
+- else
+- gimple_assign_set_rhs1 (stmt, rhs1_convop);
+- if (rhs2_stmt == NULL)
+- gimple_assign_set_rhs2 (stmt, fold_convert (type1, rhs2));
+- else
+- gimple_assign_set_rhs2 (stmt, rhs2_convop);
+- gimple_assign_set_rhs_code (stmt, WIDEN_MULT_EXPR);
+- update_stmt (stmt);
+- changed = true;
++ code = gimple_assign_rhs_code (stmt);
++ if (code == MULT_EXPR)
++ changed |= convert_mult_to_widen (stmt);
++ else if (code == PLUS_EXPR || code == MINUS_EXPR)
++ changed |= convert_plusminus_to_widen (&gsi, stmt, code);
+ }
+ }
++
+ return (changed ? TODO_dump_func | TODO_update_ssa | TODO_verify_ssa
+ | TODO_verify_stmts : 0);
+ }
+Index: gcc-4_5-branch/gcc/tree-ssa-operands.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/tree-ssa-operands.c 2011-07-22 16:58:48.000000000 -0700
++++ gcc-4_5-branch/gcc/tree-ssa-operands.c 2011-07-22 16:59:28.611747691 -0700
+@@ -994,11 +994,13 @@
+
+ case DOT_PROD_EXPR:
+ case REALIGN_LOAD_EXPR:
++ case WIDEN_MULT_PLUS_EXPR:
++ case WIDEN_MULT_MINUS_EXPR:
+ {
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
+- return;
++ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
++ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), flags);
++ return;
+ }
+
+ case FUNCTION_DECL:
+Index: gcc-4_5-branch/gcc/tree-ssa-sccvn.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/tree-ssa-sccvn.c 2011-07-22 16:58:48.000000000 -0700
++++ gcc-4_5-branch/gcc/tree-ssa-sccvn.c 2011-07-22 16:59:28.611747691 -0700
+@@ -2298,6 +2298,10 @@
+ case GIMPLE_BINARY_RHS:
+ return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
+ || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
++ case GIMPLE_TERNARY_RHS:
++ return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
++ || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
++ || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
+ case GIMPLE_SINGLE_RHS:
+ /* Constants inside reference ops are rarely interesting, but
+ it can take a lot of looking to find them. */
+Index: gcc-4_5-branch/gcc/tree-ssa-threadedge.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/tree-ssa-threadedge.c 2011-07-22 16:58:48.000000000 -0700
++++ gcc-4_5-branch/gcc/tree-ssa-threadedge.c 2011-07-22 16:59:28.611747691 -0700
+@@ -247,14 +247,14 @@
+
+ return fold (rhs);
+ }
+- break;
++
+ case GIMPLE_UNARY_RHS:
+ {
+ tree lhs = gimple_assign_lhs (stmt);
+ tree op0 = gimple_assign_rhs1 (stmt);
+ return fold_unary (subcode, TREE_TYPE (lhs), op0);
+ }
+- break;
++
+ case GIMPLE_BINARY_RHS:
+ {
+ tree lhs = gimple_assign_lhs (stmt);
+@@ -262,7 +262,16 @@
+ tree op1 = gimple_assign_rhs2 (stmt);
+ return fold_binary (subcode, TREE_TYPE (lhs), op0, op1);
+ }
+- break;
++
++ case GIMPLE_TERNARY_RHS:
++ {
++ tree lhs = gimple_assign_lhs (stmt);
++ tree op0 = gimple_assign_rhs1 (stmt);
++ tree op1 = gimple_assign_rhs2 (stmt);
++ tree op2 = gimple_assign_rhs3 (stmt);
++ return fold_ternary (subcode, TREE_TYPE (lhs), op0, op1, op2);
++ }
++
+ default:
+ gcc_unreachable ();
+ }
+Index: gcc-4_5-branch/gcc/tree-vrp.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/tree-vrp.c 2011-07-22 16:58:48.000000000 -0700
++++ gcc-4_5-branch/gcc/tree-vrp.c 2011-07-22 16:59:28.621747691 -0700
+@@ -864,6 +864,8 @@
+ gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt),
+ strict_overflow_p);
++ case GIMPLE_TERNARY_RHS:
++ return false;
+ case GIMPLE_SINGLE_RHS:
+ return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
+ strict_overflow_p);
+@@ -935,6 +937,8 @@
+ gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt),
+ strict_overflow_p);
++ case GIMPLE_TERNARY_RHS:
++ return false;
+ case GIMPLE_SINGLE_RHS:
+ return tree_single_nonzero_warnv_p (gimple_assign_rhs1 (stmt),
+ strict_overflow_p);
+Index: gcc-4_5-branch/gcc/tree.c
+===================================================================
+--- gcc-4_5-branch.orig/gcc/tree.c 2011-07-22 16:59:13.000000000 -0700
++++ gcc-4_5-branch/gcc/tree.c 2011-07-22 16:59:28.621747691 -0700
+@@ -6548,6 +6548,23 @@
+ return false;
+ }
+
++/* Return true if CODE represents a ternary tree code for which the
++ first two operands are commutative. Otherwise return false. */
++bool
++commutative_ternary_tree_code (enum tree_code code)
++{
++ switch (code)
++ {
++ case WIDEN_MULT_PLUS_EXPR:
++ case WIDEN_MULT_MINUS_EXPR:
++ return true;
++
++ default:
++ break;
++ }
++ return false;
++}
++
+ /* Generate a hash value for an expression. This can be used iteratively
+ by passing a previous result as the VAL argument.
+
+Index: gcc-4_5-branch/gcc/tree.def
+===================================================================
+--- gcc-4_5-branch.orig/gcc/tree.def 2011-07-22 16:58:48.000000000 -0700
++++ gcc-4_5-branch/gcc/tree.def 2011-07-22 16:59:28.631747691 -0700
+@@ -1083,6 +1083,18 @@
+ the arguments from type t1 to type t2, and then multiplying them. */
+ DEFTREECODE (WIDEN_MULT_EXPR, "widen_mult_expr", tcc_binary, 2)
+
++/* Widening multiply-accumulate.
++ The first two arguments are of type t1.
++ The third argument and the result are of type t2, such as t2 is at least
++ twice the size of t1. t1 and t2 must be integral or fixed-point types.
++ The expression is equivalent to a WIDEN_MULT_EXPR operation
++ of the first two operands followed by an add or subtract of the third
++ operand. */
++DEFTREECODE (WIDEN_MULT_PLUS_EXPR, "widen_mult_plus_expr", tcc_expression, 3)
++/* This is like the above, except in the final expression the multiply result
++ is subtracted from t3. */
++DEFTREECODE (WIDEN_MULT_MINUS_EXPR, "widen_mult_plus_expr", tcc_expression, 3)
++
+ /* Whole vector left/right shift in bits.
+ Operand 0 is a vector to be shifted.
+ Operand 1 is an integer shift amount in bits. */
+Index: gcc-4_5-branch/gcc/tree.h
+===================================================================
+--- gcc-4_5-branch.orig/gcc/tree.h 2011-07-22 16:59:13.000000000 -0700
++++ gcc-4_5-branch/gcc/tree.h 2011-07-22 16:59:28.631747691 -0700
+@@ -4687,6 +4687,7 @@
+ extern int type_num_arguments (const_tree);
+ extern bool associative_tree_code (enum tree_code);
+ extern bool commutative_tree_code (enum tree_code);
++extern bool commutative_ternary_tree_code (enum tree_code);
+ extern tree upper_bound_in_type (tree, tree);
+ extern tree lower_bound_in_type (tree, tree);
+ extern int operand_equal_for_phi_arg_p (const_tree, const_tree);