aboutsummaryrefslogtreecommitdiff
path: root/gcc/ada/gcc-interface
diff options
context:
space:
mode:
authorEric Botcazou <ebotcazou@adacore.com>2015-11-08 18:33:42 +0000
committerEric Botcazou <ebotcazou@gcc.gnu.org>2015-11-08 18:33:42 +0000
commitee45a32dae253f7daa966573eb8cb64b2cf7bf52 (patch)
treecf927ff52a6d5ba28290472db09363fe67a835d6 /gcc/ada/gcc-interface
parenteb11eb157cf07500e2915da8a72f2f3a501cc5ae (diff)
downloadgcc-ee45a32dae253f7daa966573eb8cb64b2cf7bf52.zip
gcc-ee45a32dae253f7daa966573eb8cb64b2cf7bf52.tar.gz
gcc-ee45a32dae253f7daa966573eb8cb64b2cf7bf52.tar.bz2
Merge of the scalar-storage-order branch.
From-SVN: r229965
Diffstat (limited to 'gcc/ada/gcc-interface')
-rw-r--r--gcc/ada/gcc-interface/decl.c49
-rw-r--r--gcc/ada/gcc-interface/gigi.h3
-rw-r--r--gcc/ada/gcc-interface/trans.c4
-rw-r--r--gcc/ada/gcc-interface/utils.c84
-rw-r--r--gcc/ada/gcc-interface/utils2.c11
5 files changed, 128 insertions, 23 deletions
diff --git a/gcc/ada/gcc-interface/decl.c b/gcc/ada/gcc-interface/decl.c
index 7f15b7c..627eace 100644
--- a/gcc/ada/gcc-interface/decl.c
+++ b/gcc/ada/gcc-interface/decl.c
@@ -1805,6 +1805,12 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
TYPE_ALIGN (gnu_type)
= align > 0 ? align : TYPE_ALIGN (gnu_field_type);
+ /* Propagate the reverse storage order flag to the record type so
+ that the required byte swapping is performed when retrieving the
+ enclosed modular value. */
+ TYPE_REVERSE_STORAGE_ORDER (gnu_type)
+ = Reverse_Storage_Order (Original_Array_Type (gnat_entity));
+
relate_alias_sets (gnu_type, gnu_field_type, ALIAS_SET_COPY);
/* Don't declare the field as addressable since we won't be taking
@@ -2152,8 +2158,9 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
for (index = ndim - 1; index >= 0; index--)
{
tem = build_nonshared_array_type (tem, gnu_index_types[index]);
- if (Reverse_Storage_Order (gnat_entity) && !GNAT_Mode)
- sorry ("non-default Scalar_Storage_Order");
+ if (index == ndim - 1)
+ TYPE_REVERSE_STORAGE_ORDER (tem)
+ = Reverse_Storage_Order (gnat_entity);
TYPE_MULTI_ARRAY_P (tem) = (index > 0);
if (array_type_has_nonaliased_component (tem, gnat_entity))
TYPE_NONALIASED_COMPONENT (tem) = 1;
@@ -2516,6 +2523,9 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
{
gnu_type = build_nonshared_array_type (gnu_type,
gnu_index_types[index]);
+ if (index == ndim - 1)
+ TYPE_REVERSE_STORAGE_ORDER (gnu_type)
+ = Reverse_Storage_Order (gnat_entity);
TYPE_MULTI_ARRAY_P (gnu_type) = (index > 0);
if (array_type_has_nonaliased_component (gnu_type, gnat_entity))
TYPE_NONALIASED_COMPONENT (gnu_type) = 1;
@@ -2876,8 +2886,8 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
gnu_type = make_node (tree_code_for_record_type (gnat_entity));
TYPE_NAME (gnu_type) = gnu_entity_name;
TYPE_PACKED (gnu_type) = (packed != 0) || has_rep;
- if (Reverse_Storage_Order (gnat_entity) && !GNAT_Mode)
- sorry ("non-default Scalar_Storage_Order");
+ TYPE_REVERSE_STORAGE_ORDER (gnu_type)
+ = Reverse_Storage_Order (gnat_entity);
process_attributes (&gnu_type, &attr_list, true, gnat_entity);
if (!definition)
@@ -3287,6 +3297,8 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
gnu_type = make_node (RECORD_TYPE);
TYPE_NAME (gnu_type) = gnu_entity_name;
TYPE_PACKED (gnu_type) = TYPE_PACKED (gnu_base_type);
+ TYPE_REVERSE_STORAGE_ORDER (gnu_type)
+ = Reverse_Storage_Order (gnat_entity);
process_attributes (&gnu_type, &attr_list, true, gnat_entity);
/* Set the size, alignment and alias set of the new type to
@@ -3341,6 +3353,8 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition)
TYPE_NAME (new_variant)
= concat_name (TYPE_NAME (gnu_type),
IDENTIFIER_POINTER (suffix));
+ TYPE_REVERSE_STORAGE_ORDER (new_variant)
+ = TYPE_REVERSE_STORAGE_ORDER (gnu_type);
copy_and_substitute_in_size (new_variant, old_variant,
gnu_subst_list);
v->new_type = new_variant;
@@ -5548,6 +5562,16 @@ gnat_to_gnu_component_type (Entity_Id gnat_array, bool definition,
gnat_array);
}
+ /* If the component type is a padded type made for a non-bit-packed array
+ of scalars with reverse storage order, we need to propagate the reverse
+ storage order to the padding type since it is the innermost enclosing
+ aggregate type around the scalar. */
+ if (TYPE_IS_PADDING_P (gnu_type)
+ && Reverse_Storage_Order (gnat_array)
+ && !Is_Bit_Packed_Array (gnat_array)
+ && Is_Scalar_Type (gnat_type))
+ gnu_type = set_reverse_storage_order_on_pad_type (gnu_type);
+
if (Has_Volatile_Components (gnat_array))
{
const int quals
@@ -6718,6 +6742,15 @@ gnat_to_gnu_field (Entity_Id gnat_field, tree gnu_record_type, int packed,
else
gnu_pos = NULL_TREE;
+ /* If the field's type is a padded type made for a scalar field of a record
+ type with reverse storage order, we need to propagate the reverse storage
+ order to the padding type since it is the innermost enclosing aggregate
+ type around the scalar. */
+ if (TYPE_IS_PADDING_P (gnu_field_type)
+ && TYPE_REVERSE_STORAGE_ORDER (gnu_record_type)
+ && Is_Scalar_Type (gnat_field_type))
+ gnu_field_type = set_reverse_storage_order_on_pad_type (gnu_field_type);
+
gcc_assert (TREE_CODE (gnu_field_type) != RECORD_TYPE
|| !TYPE_CONTAINS_TEMPLATE_P (gnu_field_type));
@@ -7034,6 +7067,8 @@ components_to_record (tree gnu_record_type, Node_Id gnat_component_list,
TYPE_NAME (gnu_union_type) = gnu_union_name;
TYPE_ALIGN (gnu_union_type) = 0;
TYPE_PACKED (gnu_union_type) = TYPE_PACKED (gnu_record_type);
+ TYPE_REVERSE_STORAGE_ORDER (gnu_union_type)
+ = TYPE_REVERSE_STORAGE_ORDER (gnu_record_type);
}
/* If all the fields down to this level have a rep clause, find out
@@ -7085,6 +7120,8 @@ components_to_record (tree gnu_record_type, Node_Id gnat_component_list,
record actually gets only the alignment required. */
TYPE_ALIGN (gnu_variant_type) = TYPE_ALIGN (gnu_record_type);
TYPE_PACKED (gnu_variant_type) = TYPE_PACKED (gnu_record_type);
+ TYPE_REVERSE_STORAGE_ORDER (gnu_variant_type)
+ = TYPE_REVERSE_STORAGE_ORDER (gnu_record_type);
/* Similarly, if the outer record has a size specified and all
the fields have a rep clause, we can propagate the size. */
@@ -7177,6 +7214,8 @@ components_to_record (tree gnu_record_type, Node_Id gnat_component_list,
position at this level. */
tree gnu_rep_type = make_node (RECORD_TYPE);
tree gnu_rep_part;
+ TYPE_REVERSE_STORAGE_ORDER (gnu_rep_type)
+ = TYPE_REVERSE_STORAGE_ORDER (gnu_variant_type);
finish_record_type (gnu_rep_type, NULL_TREE, 0, debug_info);
gnu_rep_part
= create_rep_part (gnu_rep_type, gnu_variant_type,
@@ -7384,6 +7423,8 @@ components_to_record (tree gnu_record_type, Node_Id gnat_component_list,
gnu_field_list = gnu_rep_list;
else
{
+ TYPE_REVERSE_STORAGE_ORDER (gnu_rep_type)
+ = TYPE_REVERSE_STORAGE_ORDER (gnu_record_type);
finish_record_type (gnu_rep_type, gnu_rep_list, 1, debug_info);
/* If FIRST_FREE_POS is nonzero, we need to ensure that the fields
diff --git a/gcc/ada/gcc-interface/gigi.h b/gcc/ada/gcc-interface/gigi.h
index 9420fd8..d7a2566 100644
--- a/gcc/ada/gcc-interface/gigi.h
+++ b/gcc/ada/gcc-interface/gigi.h
@@ -154,6 +154,9 @@ extern tree maybe_pad_type (tree type, tree size, unsigned int align,
bool is_user_type, bool definition,
bool set_rm_size);
+/* Return a copy of the padded TYPE but with reverse storage order. */
+extern tree set_reverse_storage_order_on_pad_type (tree type);
+
enum alias_set_op
{
ALIAS_SET_COPY,
diff --git a/gcc/ada/gcc-interface/trans.c b/gcc/ada/gcc-interface/trans.c
index 946c91a..5c093fb 100644
--- a/gcc/ada/gcc-interface/trans.c
+++ b/gcc/ada/gcc-interface/trans.c
@@ -2172,7 +2172,7 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute)
tree gnu_field_offset;
tree gnu_inner;
machine_mode mode;
- int unsignedp, volatilep;
+ int unsignedp, reversep, volatilep;
gnu_result_type = get_unpadded_type (Etype (gnat_node));
gnu_prefix = remove_conversions (gnu_prefix, true);
@@ -2194,7 +2194,7 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute)
&& TREE_CODE (gnu_prefix) == FIELD_DECL));
get_inner_reference (gnu_prefix, &bitsize, &bitpos, &gnu_offset,
- &mode, &unsignedp, &volatilep, false);
+ &mode, &unsignedp, &reversep, &volatilep, false);
if (TREE_CODE (gnu_prefix) == COMPONENT_REF)
{
diff --git a/gcc/ada/gcc-interface/utils.c b/gcc/ada/gcc-interface/utils.c
index 3b893b8..224dc00 100644
--- a/gcc/ada/gcc-interface/utils.c
+++ b/gcc/ada/gcc-interface/utils.c
@@ -957,6 +957,7 @@ make_packable_type (tree type, bool in_record)
TYPE_NAME (new_type) = TYPE_NAME (type);
TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
+ TYPE_REVERSE_STORAGE_ORDER (new_type) = TYPE_REVERSE_STORAGE_ORDER (type);
if (TREE_CODE (type) == RECORD_TYPE)
TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
@@ -1175,14 +1176,15 @@ pad_type_hasher::equal (pad_type_hash *t1, pad_type_hash *t2)
type1 = t1->type;
type2 = t2->type;
- /* We consider that the padded types are equivalent if they pad the same
- type and have the same size, alignment and RM size. Taking the mode
- into account is redundant since it is determined by the others. */
+ /* We consider that the padded types are equivalent if they pad the same type
+ and have the same size, alignment, RM size and storage order. Taking the
+ mode into account is redundant since it is determined by the others. */
return
TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
&& TYPE_SIZE (type1) == TYPE_SIZE (type2)
&& TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
- && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
+ && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2)
+ && TYPE_REVERSE_STORAGE_ORDER (type1) == TYPE_REVERSE_STORAGE_ORDER (type2);
}
/* Look up the padded TYPE in the hash table and return its canonical version
@@ -1452,6 +1454,31 @@ built:
return record;
}
+
+/* Return a copy of the padded TYPE but with reverse storage order. */
+
+tree
+set_reverse_storage_order_on_pad_type (tree type)
+{
+ tree field, canonical_pad_type;
+
+#ifdef ENABLE_CHECKING
+ /* If the inner type is not scalar then the function does nothing. */
+ tree inner_type = TREE_TYPE (TYPE_FIELDS (type));
+ gcc_assert (!AGGREGATE_TYPE_P (inner_type) && !VECTOR_TYPE_P (inner_type));
+#endif
+
+ /* This is required for the canonicalization. */
+ gcc_assert (TREE_CONSTANT (TYPE_SIZE (type)));
+
+ field = copy_node (TYPE_FIELDS (type));
+ type = copy_type (type);
+ DECL_CONTEXT (field) = type;
+ TYPE_FIELDS (type) = field;
+ TYPE_REVERSE_STORAGE_ORDER (type) = 1;
+ canonical_pad_type = lookup_and_insert_pad_type (type);
+ return canonical_pad_type ? canonical_pad_type : type;
+}
/* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
If this is a multi-dimensional array type, do this recursively.
@@ -3357,7 +3384,7 @@ gnat_types_compatible_p (tree t1, tree t2)
return 1;
/* Array types are also compatible if they are constrained and have the same
- domain(s) and the same component type. */
+ domain(s), the same component type and the same scalar storage order. */
if (code == ARRAY_TYPE
&& (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
|| (TYPE_DOMAIN (t1)
@@ -3368,7 +3395,8 @@ gnat_types_compatible_p (tree t1, tree t2)
TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
&& (TREE_TYPE (t1) == TREE_TYPE (t2)
|| (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
- && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
+ && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2))))
+ && TYPE_REVERSE_STORAGE_ORDER (t1) == TYPE_REVERSE_STORAGE_ORDER (t2))
return 1;
return 0;
@@ -4849,17 +4877,38 @@ unchecked_convert (tree type, tree expr, bool notrunc_p)
}
/* If we are converting to an integral type whose precision is not equal
- to its size, first unchecked convert to a record type that contains an
- field of the given precision. Then extract the field. */
+ to its size, first unchecked convert to a record type that contains a
+ field of the given precision. Then extract the result from the field.
+
+ There is a subtlety if the source type is an aggregate type with reverse
+ storage order because its representation is not contiguous in the native
+ storage order, i.e. a direct unchecked conversion to an integral type
+ with N bits of precision cannot read the first N bits of the aggregate
+ type. To overcome it, we do an unchecked conversion to an integral type
+ with reverse storage order and return the resulting value. This also
+ ensures that the result of the unchecked conversion doesn't depend on
+ the endianness of the target machine, but only on the storage order of
+ the aggregate type.
+
+ Finally, for the sake of consistency, we do the unchecked conversion
+ to an integral type with reverse storage order as soon as the source
+ type is an aggregate type with reverse storage order, even if there
+ are no considerations of precision or size involved. */
else if (INTEGRAL_TYPE_P (type)
&& TYPE_RM_SIZE (type)
- && 0 != compare_tree_int (TYPE_RM_SIZE (type),
- GET_MODE_BITSIZE (TYPE_MODE (type))))
+ && (0 != compare_tree_int (TYPE_RM_SIZE (type),
+ GET_MODE_BITSIZE (TYPE_MODE (type)))
+ || (AGGREGATE_TYPE_P (etype)
+ && TYPE_REVERSE_STORAGE_ORDER (etype))))
{
tree rec_type = make_node (RECORD_TYPE);
unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
tree field_type, field;
+ if (AGGREGATE_TYPE_P (etype))
+ TYPE_REVERSE_STORAGE_ORDER (rec_type)
+ = TYPE_REVERSE_STORAGE_ORDER (etype);
+
if (TYPE_UNSIGNED (type))
field_type = make_unsigned_type (prec);
else
@@ -4878,11 +4927,16 @@ unchecked_convert (tree type, tree expr, bool notrunc_p)
/* Similarly if we are converting from an integral type whose precision is
not equal to its size, first copy into a field of the given precision
- and unchecked convert the record type. */
+ and unchecked convert the record type.
+
+ The same considerations as above apply if the target type is an aggregate
+ type with reverse storage order and we also proceed similarly. */
else if (INTEGRAL_TYPE_P (etype)
&& TYPE_RM_SIZE (etype)
- && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
- GET_MODE_BITSIZE (TYPE_MODE (etype))))
+ && (0 != compare_tree_int (TYPE_RM_SIZE (etype),
+ GET_MODE_BITSIZE (TYPE_MODE (etype)))
+ || (AGGREGATE_TYPE_P (type)
+ && TYPE_REVERSE_STORAGE_ORDER (type))))
{
tree rec_type = make_node (RECORD_TYPE);
unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
@@ -4890,6 +4944,10 @@ unchecked_convert (tree type, tree expr, bool notrunc_p)
vec_alloc (v, 1);
tree field_type, field;
+ if (AGGREGATE_TYPE_P (type))
+ TYPE_REVERSE_STORAGE_ORDER (rec_type)
+ = TYPE_REVERSE_STORAGE_ORDER (type);
+
if (TYPE_UNSIGNED (etype))
field_type = make_unsigned_type (prec);
else
diff --git a/gcc/ada/gcc-interface/utils2.c b/gcc/ada/gcc-interface/utils2.c
index 47446ba..73a9b10 100644
--- a/gcc/ada/gcc-interface/utils2.c
+++ b/gcc/ada/gcc-interface/utils2.c
@@ -1408,11 +1408,11 @@ build_unary_op (enum tree_code op_code, tree result_type, tree operand)
HOST_WIDE_INT bitpos;
tree offset, inner;
machine_mode mode;
- int unsignedp, volatilep;
+ int unsignedp, reversep, volatilep;
inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep,
- false);
+ &mode, &unsignedp, &reversep,
+ &volatilep, false);
/* If INNER is a padding type whose field has a self-referential
size, convert to that inner type. We know the offset is zero
@@ -1916,7 +1916,9 @@ gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
|| (TREE_CODE (type) == RECORD_TYPE
&& CONSTRUCTOR_BITFIELD_P (obj)
&& !initializer_constant_valid_for_bitfield_p (val))
- || !initializer_constant_valid_p (val, TREE_TYPE (val)))
+ || !initializer_constant_valid_p (val,
+ TREE_TYPE (val),
+ TYPE_REVERSE_STORAGE_ORDER (type)))
allconstant = false;
if (!TREE_READONLY (val))
@@ -2749,6 +2751,7 @@ gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
data, init),
TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
+ REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
break;
case ARRAY_REF: