aboutsummaryrefslogtreecommitdiff
path: root/gcc/ada/gcc-interface/utils2.c
diff options
context:
space:
mode:
authorEric Botcazou <ebotcazou@adacore.com>2015-05-25 20:18:44 +0000
committerEric Botcazou <ebotcazou@gcc.gnu.org>2015-05-25 20:18:44 +0000
commitf797c2b745ec8a1b5750900caf54c96dcbc904ca (patch)
tree79e475b241a12de74ed6db829e811d0c9aff7480 /gcc/ada/gcc-interface/utils2.c
parent2e24efd3f49524b05dfb198cd60205a7113b10a2 (diff)
downloadgcc-f797c2b745ec8a1b5750900caf54c96dcbc904ca.zip
gcc-f797c2b745ec8a1b5750900caf54c96dcbc904ca.tar.gz
gcc-f797c2b745ec8a1b5750900caf54c96dcbc904ca.tar.bz2
gigi.h (build_atomic_load): Adjust prototype.
* gcc-interface/gigi.h (build_atomic_load): Adjust prototype. (build_atomic_store): Likewise. (build_load_modify_store): Declare. (VECTOR_TYPE_P): Delete. * gcc-interface/decl.c (gnat_to_gnu_entity): Replace Is_Atomic with Is_Atomic_Or_VFA throughout. <E_Array_Type>: Build a variant of the XUA type instead of forcing TYPE_VOLATILE on it. <E_Array_Subtype>: Use the main variant of the base type. Do not force TYPE_VOLATILE on the type being built. <E_Record_Type>: Likewise. <E_Array_Subtype>: Likewise. <E_Subprogram_Type>: Rename local variable. Add Atomic qualifier in conjunction with Volatile on types if needed. Force BLKmode for by-ref types only at the end of the processing. Change qualifiers only after changing the mode of the type. Set TYPE_UNIVERSAL_ALIASING_P on the type directly. (check_ok_for_atomic_type): Issue specific error message for VFA. (gnat_to_gnu_component_type): Replace Is_Atomic with Is_Atomic_Or_VFA throughout. * gcc-interface/misc.c (gnat_get_alias_set): Test TYPE_UNIVERSAL_ALIASING_P on the type directly. * gcc-interface/trans.c (lvalue_required_p): Replace Is_Atomic with Is_Atomic_Or_VFA throughout. Add missing guard. (node_is_atomic): New predicate. (node_has_volatile_full_access): Likewise. (gnat_strip_type_conversion): New function. (outer_atomic_access_required_p): New predicate. (atomic_sync_required_p): Rename into... (atomic_access_required_p): ...this. Add SYNC parameter, scan the parent node first and then look for the atomic setting. Add support for Volatile_Full_Access. (Call_to_gnu): Add atomic_access and outer_atomic_access parameters and adjusts calls to above functions. Use load-modify-store sequence for updates of In/Out and Out parameters if required, as well as for moving the result to the target if required. Add couple of missing guards. (gnat_to_gnu): Adjust calls to above functions. <N_Object_Renaming_Declaration>: If the renamed object has side-effects evaluate only its address. <N_Assignment_Statement>: Adjust call to Call_to_gnu. Use load-modify store sequence if required. <N_Function_Call>: Adjust call to Call_to_gnu. (extract_values): Adjust comment. * gcc-interface/utils2.c (build_atomic_load): Add SYNC parameter and use relaxed memory model if it is not set. (build_atomic_store): Likewise. (call_is_atomic_load): New predicate. (build_load_modify_store): New function. (build_binary_op) <MODIFY_EXPR>: Accept SAVE_EXPR on the LHS. (gnat_stabilize_reference) <CALL_EXPR>: Deal with atomic loads. From-SVN: r223652
Diffstat (limited to 'gcc/ada/gcc-interface/utils2.c')
-rw-r--r--gcc/ada/gcc-interface/utils2.c117
1 files changed, 107 insertions, 10 deletions
diff --git a/gcc/ada/gcc-interface/utils2.c b/gcc/ada/gcc-interface/utils2.c
index e09b5b9..aa92382 100644
--- a/gcc/ada/gcc-interface/utils2.c
+++ b/gcc/ada/gcc-interface/utils2.c
@@ -658,15 +658,19 @@ resolve_atomic_size (tree type)
return 0;
}
-/* Build an atomic load for the underlying atomic object in SRC. */
+/* Build an atomic load for the underlying atomic object in SRC. SYNC is
+ true if the load requires synchronization. */
tree
-build_atomic_load (tree src)
+build_atomic_load (tree src, bool sync)
{
tree ptr_type
= build_pointer_type
- (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
- tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
+ (build_qualified_type (void_type_node,
+ TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
+ tree mem_model
+ = build_int_cst (integer_type_node,
+ sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
tree orig_src = src;
tree t, addr, val;
unsigned int size;
@@ -690,15 +694,19 @@ build_atomic_load (tree src)
return convert (TREE_TYPE (orig_src), t);
}
-/* Build an atomic store from SRC to the underlying atomic object in DEST. */
+/* Build an atomic store from SRC to the underlying atomic object in DEST.
+ SYNC is true if the store requires synchronization. */
tree
-build_atomic_store (tree dest, tree src)
+build_atomic_store (tree dest, tree src, bool sync)
{
tree ptr_type
= build_pointer_type
- (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
- tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
+ (build_qualified_type (void_type_node,
+ TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
+ tree mem_model
+ = build_int_cst (integer_type_node,
+ sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
tree orig_dest = dest;
tree t, int_type, addr;
unsigned int size;
@@ -729,6 +737,87 @@ build_atomic_store (tree dest, tree src)
return build_call_expr (t, 3, addr, src, mem_model);
}
+
+/* Return true if EXP, a CALL_EXPR, is an atomic load. */
+
+static bool
+call_is_atomic_load (tree exp)
+{
+ tree fndecl = get_callee_fndecl (exp);
+
+ if (!(fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL))
+ return false;
+
+ enum built_in_function code = DECL_FUNCTION_CODE (fndecl);
+ return BUILT_IN_ATOMIC_LOAD_N <= code && code <= BUILT_IN_ATOMIC_LOAD_16;
+}
+
+/* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
+ the location of the sequence. Note that, even if the load and the store are
+ both atomic, the sequence itself is not atomic. */
+
+tree
+build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
+{
+ tree ref = dest;
+
+ while (handled_component_p (ref))
+ {
+ /* The load should already have been generated during the translation
+ of the GNAT destination tree; find it out in the GNU tree. */
+ if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
+ {
+ tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
+ if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
+ {
+ tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
+ tree t = CALL_EXPR_ARG (op, 0);
+ tree obj, temp, stmt;
+
+ /* Find out the loaded object. */
+ if (TREE_CODE (t) == NOP_EXPR)
+ t = TREE_OPERAND (t, 0);
+ if (TREE_CODE (t) == ADDR_EXPR)
+ obj = TREE_OPERAND (t, 0);
+ else
+ obj = build1 (INDIRECT_REF, type, t);
+
+ /* Drop atomic and volatile qualifiers for the temporary. */
+ type = TYPE_MAIN_VARIANT (type);
+
+ /* And drop BLKmode, if need be, to put it into a register. */
+ if (TYPE_MODE (type) == BLKmode)
+ {
+ unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
+ type = copy_type (type);
+ SET_TYPE_MODE (type, mode_for_size (size, MODE_INT, 0));
+ }
+
+ /* Create the temporary by inserting a SAVE_EXPR. */
+ temp = build1 (SAVE_EXPR, type,
+ build1 (VIEW_CONVERT_EXPR, type, op));
+ TREE_OPERAND (ref, 0) = temp;
+
+ start_stmt_group ();
+
+ /* Build the modify of the temporary. */
+ stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
+ add_stmt_with_node (stmt, gnat_node);
+
+ /* Build the store to the object. */
+ stmt = build_atomic_store (obj, temp, false);
+ add_stmt_with_node (stmt, gnat_node);
+
+ return end_stmt_group ();
+ }
+ }
+
+ ref = TREE_OPERAND (ref, 0);
+ }
+
+ /* Something went wrong earlier if we have not found the atomic load. */
+ gcc_unreachable ();
+}
/* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
desired for the result. Usually the operation is to be performed
@@ -870,7 +959,7 @@ build_binary_op (enum tree_code op_code, tree result_type,
strip anything that get_inner_reference can handle. Then remove any
conversions between types having the same code and mode. And mark
VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
- either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
+ either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
result = left_operand;
while (true)
{
@@ -903,6 +992,7 @@ build_binary_op (enum tree_code op_code, tree result_type,
gcc_assert (TREE_CODE (result) == INDIRECT_REF
|| TREE_CODE (result) == NULL_EXPR
+ || TREE_CODE (result) == SAVE_EXPR
|| DECL_P (result));
/* Convert the right operand to the operation type unless it is
@@ -2716,7 +2806,14 @@ gnat_stabilize_reference (tree ref, bool force, bool *success)
break;
case CALL_EXPR:
- result = gnat_stabilize_reference_1 (ref, force);
+ if (call_is_atomic_load (ref))
+ result
+ = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
+ gnat_stabilize_reference (CALL_EXPR_ARG (ref, 0),
+ force, success),
+ CALL_EXPR_ARG (ref, 1));
+ else
+ result = gnat_stabilize_reference_1 (ref, force);
break;
case COMPOUND_EXPR: