diff options
author | Richard Guenther <rguenther@suse.de> | 2016-05-20 09:17:16 +0000 |
---|---|---|
committer | Richard Biener <rguenth@gcc.gnu.org> | 2016-05-20 09:17:16 +0000 |
commit | 483c642948802336899b14fa57b2e76c760c3c36 (patch) | |
tree | e061dbe5dac8633c8babd3bf14639f9e128c156e /gcc/fold-const.c | |
parent | eb066284cb8cdf7f46f455c3e975cf1b349a0487 (diff) | |
download | gcc-483c642948802336899b14fa57b2e76c760c3c36.zip gcc-483c642948802336899b14fa57b2e76c760c3c36.tar.gz gcc-483c642948802336899b14fa57b2e76c760c3c36.tar.bz2 |
re PR target/29756 (SSE intrinsics hard to use without redundant temporaries appearing)
2016-05-20 Richard Guenther <rguenther@suse.de>
PR tree-optimization/29756
* tree.def (BIT_INSERT_EXPR): New tcc_expression tree code.
* expr.c (expand_expr_real_2): Handle BIT_INSERT_EXPR.
* fold-const.c (operand_equal_p): Likewise.
(fold_ternary_loc): Add constant folding of BIT_INSERT_EXPR.
* gimplify.c (gimplify_expr): Handle BIT_INSERT_EXPR.
* tree-inline.c (estimate_operator_cost): Likewise.
* tree-pretty-print.c (dump_generic_node): Likewise.
* tree-ssa-operands.c (get_expr_operands): Likewise.
* cfgexpand.c (expand_debug_expr): Likewise.
* gimple-pretty-print.c (dump_ternary_rhs): Likewise.
* gimple.c (get_gimple_rhs_num_ops): Handle BIT_INSERT_EXPR.
* tree-cfg.c (verify_gimple_assign_ternary): Verify BIT_INSERT_EXPR.
* tree-ssa.c (non_rewritable_lvalue_p): We can rewrite
vector inserts using BIT_FIELD_REF or MEM_REF on the lhs.
(execute_update_addresses_taken): Do it.
* gcc.dg/tree-ssa/vector-6.c: New testcase.
From-SVN: r236501
Diffstat (limited to 'gcc/fold-const.c')
-rw-r--r-- | gcc/fold-const.c | 41 |
1 files changed, 41 insertions, 0 deletions
diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 8a7c93e..556fc73 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -3163,6 +3163,7 @@ operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags) case VEC_COND_EXPR: case DOT_PROD_EXPR: + case BIT_INSERT_EXPR: return OP_SAME (0) && OP_SAME (1) && OP_SAME (2); default: @@ -11860,6 +11861,46 @@ fold_ternary_loc (location_t loc, enum tree_code code, tree type, } return NULL_TREE; + case BIT_INSERT_EXPR: + /* Perform (partial) constant folding of BIT_INSERT_EXPR. */ + if (TREE_CODE (arg0) == INTEGER_CST + && TREE_CODE (arg1) == INTEGER_CST) + { + unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2); + unsigned bitsize = TYPE_PRECISION (TREE_TYPE (arg1)); + wide_int tem = wi::bit_and (arg0, + wi::shifted_mask (bitpos, bitsize, true, + TYPE_PRECISION (type))); + wide_int tem2 + = wi::lshift (wi::zext (wi::to_wide (arg1, TYPE_PRECISION (type)), + bitsize), bitpos); + return wide_int_to_tree (type, wi::bit_or (tem, tem2)); + } + else if (TREE_CODE (arg0) == VECTOR_CST + && CONSTANT_CLASS_P (arg1) + && types_compatible_p (TREE_TYPE (TREE_TYPE (arg0)), + TREE_TYPE (arg1))) + { + unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2); + unsigned HOST_WIDE_INT elsize + = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (arg1))); + if (bitpos % elsize == 0) + { + unsigned k = bitpos / elsize; + if (operand_equal_p (VECTOR_CST_ELT (arg0, k), arg1, 0)) + return arg0; + else + { + tree *elts = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type)); + memcpy (elts, VECTOR_CST_ELTS (arg0), + sizeof (tree) * TYPE_VECTOR_SUBPARTS (type)); + elts[k] = arg1; + return build_vector (type, elts); + } + } + } + return NULL_TREE; + default: return NULL_TREE; } /* switch (code) */ |