diff options
author | Jakub Jelinek <jakub@redhat.com> | 2020-12-03 15:46:54 +0100 |
---|---|---|
committer | Jakub Jelinek <jakub@redhat.com> | 2020-12-03 15:46:54 +0100 |
commit | 896048cf43d5eb21ab7c16553bb9d13b0f890b81 (patch) | |
tree | 3d929f74e3ad0cd70a86d101744be4a961a57450 /gcc/fold-const.c | |
parent | 341035a54aa8954c66b0492f366b8c65fdb34299 (diff) | |
download | gcc-896048cf43d5eb21ab7c16553bb9d13b0f890b81.zip gcc-896048cf43d5eb21ab7c16553bb9d13b0f890b81.tar.gz gcc-896048cf43d5eb21ab7c16553bb9d13b0f890b81.tar.bz2 |
c++: Add __builtin_bit_cast to implement std::bit_cast [PR93121]
The following patch adds __builtin_bit_cast builtin, similarly to
clang or MSVC which implement std::bit_cast using such an builtin too.
It checks the various std::bit_cast requirements, when not constexpr
evaluated acts pretty much like VIEW_CONVERT_EXPR of the source argument
to the destination type and the hardest part is obviously the constexpr
evaluation.
I've left out PDP11 handling of those, couldn't figure out how exactly are
bitfields laid out there
2020-12-03 Jakub Jelinek <jakub@redhat.com>
PR libstdc++/93121
* fold-const.h (native_encode_initializer): Add mask argument
defaulted to nullptr.
(find_bitfield_repr_type): Declare.
(native_interpret_aggregate): Declare.
* fold-const.c (find_bitfield_repr_type): New function.
(native_encode_initializer): Add mask argument and support for
filling it. Handle also some bitfields without integral
DECL_BIT_FIELD_REPRESENTATIVE.
(native_interpret_aggregate): New function.
* gimple-fold.h (clear_type_padding_in_mask): Declare.
* gimple-fold.c (struct clear_padding_struct): Add clear_in_mask
member.
(clear_padding_flush): Handle buf->clear_in_mask.
(clear_padding_union): Copy clear_in_mask. Don't error if
buf->clear_in_mask is set.
(clear_padding_type): Don't error if buf->clear_in_mask is set.
(clear_type_padding_in_mask): New function.
(gimple_fold_builtin_clear_padding): Set buf.clear_in_mask to false.
* doc/extend.texi (__builtin_bit_cast): Document.
* c-common.h (enum rid): Add RID_BUILTIN_BIT_CAST.
* c-common.c (c_common_reswords): Add __builtin_bit_cast.
* cp-tree.h (cp_build_bit_cast): Declare.
* cp-tree.def (BIT_CAST_EXPR): New tree code.
* cp-objcp-common.c (names_builtin_p): Handle RID_BUILTIN_BIT_CAST.
(cp_common_init_ts): Handle BIT_CAST_EXPR.
* cxx-pretty-print.c (cxx_pretty_printer::postfix_expression):
Likewise.
* parser.c (cp_parser_postfix_expression): Handle
RID_BUILTIN_BIT_CAST.
* semantics.c (cp_build_bit_cast): New function.
* tree.c (cp_tree_equal): Handle BIT_CAST_EXPR.
(cp_walk_subtrees): Likewise.
* pt.c (tsubst_copy): Likewise.
* constexpr.c (check_bit_cast_type, cxx_eval_bit_cast): New functions.
(cxx_eval_constant_expression): Handle BIT_CAST_EXPR.
(potential_constant_expression_1): Likewise.
* cp-gimplify.c (cp_genericize_r): Likewise.
* g++.dg/cpp2a/bit-cast1.C: New test.
* g++.dg/cpp2a/bit-cast2.C: New test.
* g++.dg/cpp2a/bit-cast3.C: New test.
* g++.dg/cpp2a/bit-cast4.C: New test.
* g++.dg/cpp2a/bit-cast5.C: New test.
Diffstat (limited to 'gcc/fold-const.c')
-rw-r--r-- | gcc/fold-const.c | 440 |
1 files changed, 396 insertions, 44 deletions
diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 632a241..e77d74e 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -7968,25 +7968,78 @@ native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off) } } +/* Try to find a type whose byte size is smaller or equal to LEN bytes larger + or equal to FIELDSIZE bytes, with underlying mode precision/size multiple + of BITS_PER_UNIT. As native_{interpret,encode}_int works in term of + machine modes, we can't just use build_nonstandard_integer_type. */ + +tree +find_bitfield_repr_type (int fieldsize, int len) +{ + machine_mode mode; + for (int pass = 0; pass < 2; pass++) + { + enum mode_class mclass = pass ? MODE_PARTIAL_INT : MODE_INT; + FOR_EACH_MODE_IN_CLASS (mode, mclass) + if (known_ge (GET_MODE_SIZE (mode), fieldsize) + && known_eq (GET_MODE_PRECISION (mode), + GET_MODE_BITSIZE (mode)) + && known_le (GET_MODE_SIZE (mode), len)) + { + tree ret = lang_hooks.types.type_for_mode (mode, 1); + if (ret && TYPE_MODE (ret) == mode) + return ret; + } + } + + for (int i = 0; i < NUM_INT_N_ENTS; i ++) + if (int_n_enabled_p[i] + && int_n_data[i].bitsize >= (unsigned) (BITS_PER_UNIT * fieldsize) + && int_n_trees[i].unsigned_type) + { + tree ret = int_n_trees[i].unsigned_type; + mode = TYPE_MODE (ret); + if (known_ge (GET_MODE_SIZE (mode), fieldsize) + && known_eq (GET_MODE_PRECISION (mode), + GET_MODE_BITSIZE (mode)) + && known_le (GET_MODE_SIZE (mode), len)) + return ret; + } + + return NULL_TREE; +} + /* Similar to native_encode_expr, but also handle CONSTRUCTORs, VCEs, - NON_LVALUE_EXPRs and nops. */ + NON_LVALUE_EXPRs and nops. If MASK is non-NULL (then PTR has + to be non-NULL and OFF zero), then in addition to filling the + bytes pointed by PTR with the value also clear any bits pointed + by MASK that are known to be initialized, keep them as is for + e.g. uninitialized padding bits or uninitialized fields. */ int native_encode_initializer (tree init, unsigned char *ptr, int len, - int off) + int off, unsigned char *mask) { + int r; + /* We don't support starting at negative offset and -1 is special. */ if (off < -1 || init == NULL_TREE) return 0; + gcc_assert (mask == NULL || (off == 0 && ptr)); + STRIP_NOPS (init); switch (TREE_CODE (init)) { case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR: - return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off); + return native_encode_initializer (TREE_OPERAND (init, 0), ptr, len, off, + mask); default: - return native_encode_expr (init, ptr, len, off); + r = native_encode_expr (init, ptr, len, off); + if (mask) + memset (mask, 0, r); + return r; case CONSTRUCTOR: tree type = TREE_TYPE (init); HOST_WIDE_INT total_bytes = int_size_in_bytes (type); @@ -7999,7 +8052,7 @@ native_encode_initializer (tree init, unsigned char *ptr, int len, { HOST_WIDE_INT min_index; unsigned HOST_WIDE_INT cnt; - HOST_WIDE_INT curpos = 0, fieldsize; + HOST_WIDE_INT curpos = 0, fieldsize, valueinit = -1; constructor_elt *ce; if (TYPE_DOMAIN (type) == NULL_TREE @@ -8014,12 +8067,22 @@ native_encode_initializer (tree init, unsigned char *ptr, int len, if (ptr != NULL) memset (ptr, '\0', MIN (total_bytes - off, len)); - FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce) + for (cnt = 0; ; cnt++) { - tree val = ce->value; - tree index = ce->index; + tree val = NULL_TREE, index = NULL_TREE; HOST_WIDE_INT pos = curpos, count = 0; bool full = false; + if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce)) + { + val = ce->value; + index = ce->index; + } + else if (mask == NULL + || CONSTRUCTOR_NO_CLEARING (init) + || curpos >= total_bytes) + break; + else + pos = total_bytes; if (index && TREE_CODE (index) == RANGE_EXPR) { if (!tree_fits_shwi_p (TREE_OPERAND (index, 0)) @@ -8037,6 +8100,28 @@ native_encode_initializer (tree init, unsigned char *ptr, int len, pos = (tree_to_shwi (index) - min_index) * fieldsize; } + if (mask && !CONSTRUCTOR_NO_CLEARING (init) && curpos != pos) + { + if (valueinit == -1) + { + tree zero = build_constructor (TREE_TYPE (type), NULL); + r = native_encode_initializer (zero, ptr + curpos, + fieldsize, 0, + mask + curpos); + ggc_free (zero); + if (!r) + return 0; + valueinit = curpos; + curpos += fieldsize; + } + while (curpos != pos) + { + memcpy (ptr + curpos, ptr + valueinit, fieldsize); + memcpy (mask + curpos, mask + valueinit, fieldsize); + curpos += fieldsize; + } + } + curpos = pos; if (val) do @@ -8051,6 +8136,8 @@ native_encode_initializer (tree init, unsigned char *ptr, int len, if (ptr) memcpy (ptr + (curpos - o), ptr + (pos - o), fieldsize); + if (mask) + memcpy (mask + curpos, mask + pos, fieldsize); } else if (!native_encode_initializer (val, ptr @@ -8058,7 +8145,10 @@ native_encode_initializer (tree init, unsigned char *ptr, int len, : NULL, fieldsize, off == -1 ? -1 - : 0)) + : 0, + mask + ? mask + curpos + : NULL)) return 0; else { @@ -8073,6 +8163,7 @@ native_encode_initializer (tree init, unsigned char *ptr, int len, unsigned char *p = NULL; int no = 0; int l; + gcc_assert (mask == NULL); if (curpos >= off) { if (ptr) @@ -8086,7 +8177,7 @@ native_encode_initializer (tree init, unsigned char *ptr, int len, no = off - curpos; l = len; } - if (!native_encode_initializer (val, p, l, no)) + if (!native_encode_initializer (val, p, l, no, NULL)) return 0; } curpos += fieldsize; @@ -8100,22 +8191,74 @@ native_encode_initializer (tree init, unsigned char *ptr, int len, { unsigned HOST_WIDE_INT cnt; constructor_elt *ce; + tree fld_base = TYPE_FIELDS (type); + tree to_free = NULL_TREE; + gcc_assert (TREE_CODE (type) == RECORD_TYPE || mask == NULL); if (ptr != NULL) memset (ptr, '\0', MIN (total_bytes - off, len)); - FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce) + for (cnt = 0; ; cnt++) { - tree field = ce->index; - tree val = ce->value; - HOST_WIDE_INT pos, fieldsize; + tree val = NULL_TREE, field = NULL_TREE; + HOST_WIDE_INT pos = 0, fieldsize; unsigned HOST_WIDE_INT bpos = 0, epos = 0; - if (field == NULL_TREE) - return 0; + if (to_free) + { + ggc_free (to_free); + to_free = NULL_TREE; + } - pos = int_byte_position (field); - if (off != -1 && (HOST_WIDE_INT) off + len <= pos) - continue; + if (vec_safe_iterate (CONSTRUCTOR_ELTS (init), cnt, &ce)) + { + val = ce->value; + field = ce->index; + if (field == NULL_TREE) + return 0; + + pos = int_byte_position (field); + if (off != -1 && (HOST_WIDE_INT) off + len <= pos) + continue; + } + else if (mask == NULL + || CONSTRUCTOR_NO_CLEARING (init)) + break; + else + pos = total_bytes; + + if (mask && !CONSTRUCTOR_NO_CLEARING (init)) + { + tree fld; + for (fld = fld_base; fld; fld = DECL_CHAIN (fld)) + { + if (TREE_CODE (fld) != FIELD_DECL) + continue; + if (fld == field) + break; + if (DECL_PADDING_P (fld)) + continue; + if (DECL_SIZE_UNIT (fld) == NULL_TREE + || !tree_fits_shwi_p (DECL_SIZE_UNIT (fld))) + return 0; + if (integer_zerop (DECL_SIZE_UNIT (fld))) + continue; + break; + } + if (fld == NULL_TREE) + { + if (ce == NULL) + break; + return 0; + } + fld_base = DECL_CHAIN (fld); + if (fld != field) + { + cnt--; + field = fld; + val = build_constructor (TREE_TYPE (fld), NULL); + to_free = val; + } + } if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE && TYPE_DOMAIN (TREE_TYPE (field)) @@ -8156,30 +8299,49 @@ native_encode_initializer (tree init, unsigned char *ptr, int len, if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN) return 0; - tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field); - if (repr == NULL_TREE - || TREE_CODE (val) != INTEGER_CST - || !INTEGRAL_TYPE_P (TREE_TYPE (repr))) + if (TREE_CODE (val) != INTEGER_CST) return 0; - HOST_WIDE_INT rpos = int_byte_position (repr); + tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field); + tree repr_type = NULL_TREE; + HOST_WIDE_INT rpos = 0; + if (repr && INTEGRAL_TYPE_P (TREE_TYPE (repr))) + { + rpos = int_byte_position (repr); + repr_type = TREE_TYPE (repr); + } + else + { + repr_type = find_bitfield_repr_type (fieldsize, len); + if (repr_type == NULL_TREE) + return 0; + HOST_WIDE_INT repr_size = int_size_in_bytes (repr_type); + gcc_assert (repr_size > 0 && repr_size <= len); + if (pos + repr_size <= len) + rpos = pos; + else + { + rpos = len - repr_size; + gcc_assert (rpos <= pos); + } + } + if (rpos > pos) return 0; - wide_int w = wi::to_wide (val, - TYPE_PRECISION (TREE_TYPE (repr))); - int diff = (TYPE_PRECISION (TREE_TYPE (repr)) + wide_int w = wi::to_wide (val, TYPE_PRECISION (repr_type)); + int diff = (TYPE_PRECISION (repr_type) - TYPE_PRECISION (TREE_TYPE (field))); HOST_WIDE_INT bitoff = (pos - rpos) * BITS_PER_UNIT + bpos; if (!BYTES_BIG_ENDIAN) w = wi::lshift (w, bitoff); else w = wi::lshift (w, diff - bitoff); - val = wide_int_to_tree (TREE_TYPE (repr), w); + val = wide_int_to_tree (repr_type, w); unsigned char buf[MAX_BITSIZE_MODE_ANY_INT / BITS_PER_UNIT + 1]; int l = native_encode_int (val, buf, sizeof buf, 0); - if (l * BITS_PER_UNIT != TYPE_PRECISION (TREE_TYPE (repr))) + if (l * BITS_PER_UNIT != TYPE_PRECISION (repr_type)) return 0; if (ptr == NULL) @@ -8192,15 +8354,31 @@ native_encode_initializer (tree init, unsigned char *ptr, int len, { if (!BYTES_BIG_ENDIAN) { - int mask = (1 << bpos) - 1; - buf[pos - rpos] &= ~mask; - buf[pos - rpos] |= ptr[pos - o] & mask; + int msk = (1 << bpos) - 1; + buf[pos - rpos] &= ~msk; + buf[pos - rpos] |= ptr[pos - o] & msk; + if (mask) + { + if (fieldsize > 1 || epos == 0) + mask[pos] &= msk; + else + mask[pos] &= (msk | ~((1 << epos) - 1)); + } } else { - int mask = (1 << (BITS_PER_UNIT - bpos)) - 1; - buf[pos - rpos] &= mask; - buf[pos - rpos] |= ptr[pos - o] & ~mask; + int msk = (1 << (BITS_PER_UNIT - bpos)) - 1; + buf[pos - rpos] &= msk; + buf[pos - rpos] |= ptr[pos - o] & ~msk; + if (mask) + { + if (fieldsize > 1 || epos == 0) + mask[pos] &= ~msk; + else + mask[pos] &= (~msk + | ((1 << (BITS_PER_UNIT - epos)) + - 1)); + } } } /* If the bitfield does not end at byte boundary, handle @@ -8211,27 +8389,37 @@ native_encode_initializer (tree init, unsigned char *ptr, int len, { if (!BYTES_BIG_ENDIAN) { - int mask = (1 << epos) - 1; - buf[pos - rpos + fieldsize - 1] &= mask; + int msk = (1 << epos) - 1; + buf[pos - rpos + fieldsize - 1] &= msk; buf[pos - rpos + fieldsize - 1] - |= ptr[pos + fieldsize - 1 - o] & ~mask; + |= ptr[pos + fieldsize - 1 - o] & ~msk; + if (mask && (fieldsize > 1 || bpos == 0)) + mask[pos + fieldsize - 1] &= ~msk; } else { - int mask = (1 << (BITS_PER_UNIT - epos)) - 1; - buf[pos - rpos + fieldsize - 1] &= ~mask; + int msk = (1 << (BITS_PER_UNIT - epos)) - 1; + buf[pos - rpos + fieldsize - 1] &= ~msk; buf[pos - rpos + fieldsize - 1] - |= ptr[pos + fieldsize - 1 - o] & mask; + |= ptr[pos + fieldsize - 1 - o] & msk; + if (mask && (fieldsize > 1 || bpos == 0)) + mask[pos + fieldsize - 1] &= msk; } } if (off == -1 || (pos >= off && (pos + fieldsize <= (HOST_WIDE_INT) off + len))) - memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize); + { + memcpy (ptr + pos - o, buf + (pos - rpos), fieldsize); + if (mask && (fieldsize > (bpos != 0) + (epos != 0))) + memset (mask + pos + (bpos != 0), 0, + fieldsize - (bpos != 0) - (epos != 0)); + } else { /* Partial overlap. */ HOST_WIDE_INT fsz = fieldsize; + gcc_assert (mask == NULL); if (pos < off) { fsz -= (off - pos); @@ -8251,7 +8439,8 @@ native_encode_initializer (tree init, unsigned char *ptr, int len, if (!native_encode_initializer (val, ptr ? ptr + pos - o : NULL, fieldsize, - off == -1 ? -1 : 0)) + off == -1 ? -1 : 0, + mask ? mask + pos : NULL)) return 0; } else @@ -8260,6 +8449,7 @@ native_encode_initializer (tree init, unsigned char *ptr, int len, unsigned char *p = NULL; int no = 0; int l; + gcc_assert (mask == NULL); if (pos >= off) { if (ptr) @@ -8273,7 +8463,7 @@ native_encode_initializer (tree init, unsigned char *ptr, int len, no = off - pos; l = len; } - if (!native_encode_initializer (val, p, l, no)) + if (!native_encode_initializer (val, p, l, no, NULL)) return 0; } } @@ -8549,6 +8739,168 @@ can_native_interpret_type_p (tree type) } } +/* Attempt to interpret aggregate of TYPE from bytes encoded in target + byte order at PTR + OFF with LEN bytes. Does not handle unions. */ + +tree +native_interpret_aggregate (tree type, const unsigned char *ptr, int off, + int len) +{ + vec<constructor_elt, va_gc> *elts = NULL; + if (TREE_CODE (type) == ARRAY_TYPE) + { + HOST_WIDE_INT eltsz = int_size_in_bytes (TREE_TYPE (type)); + if (eltsz < 0 || eltsz > len || TYPE_DOMAIN (type) == NULL_TREE) + return NULL_TREE; + + HOST_WIDE_INT cnt = 0; + if (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + { + if (!tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))) + return NULL_TREE; + cnt = tree_to_shwi (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1; + } + if (eltsz == 0) + cnt = 0; + HOST_WIDE_INT pos = 0; + for (HOST_WIDE_INT i = 0; i < cnt; i++, pos += eltsz) + { + tree v = NULL_TREE; + if (pos >= len || pos + eltsz > len) + return NULL_TREE; + if (can_native_interpret_type_p (TREE_TYPE (type))) + { + v = native_interpret_expr (TREE_TYPE (type), + ptr + off + pos, eltsz); + if (v == NULL_TREE) + return NULL_TREE; + } + else if (TREE_CODE (TREE_TYPE (type)) == RECORD_TYPE + || TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE) + v = native_interpret_aggregate (TREE_TYPE (type), ptr, off + pos, + eltsz); + if (v == NULL_TREE) + return NULL_TREE; + CONSTRUCTOR_APPEND_ELT (elts, size_int (i), v); + } + return build_constructor (type, elts); + } + if (TREE_CODE (type) != RECORD_TYPE) + return NULL_TREE; + for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) + { + if (TREE_CODE (field) != FIELD_DECL || DECL_PADDING_P (field)) + continue; + tree fld = field; + HOST_WIDE_INT bitoff = 0, pos = 0, sz = 0; + int diff = 0; + tree v = NULL_TREE; + if (DECL_BIT_FIELD (field)) + { + fld = DECL_BIT_FIELD_REPRESENTATIVE (field); + if (fld && INTEGRAL_TYPE_P (TREE_TYPE (fld))) + { + poly_int64 bitoffset; + poly_uint64 field_offset, fld_offset; + if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset) + && poly_int_tree_p (DECL_FIELD_OFFSET (fld), &fld_offset)) + bitoffset = (field_offset - fld_offset) * BITS_PER_UNIT; + else + bitoffset = 0; + bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)) + - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (fld))); + diff = (TYPE_PRECISION (TREE_TYPE (fld)) + - TYPE_PRECISION (TREE_TYPE (field))); + if (!bitoffset.is_constant (&bitoff) + || bitoff < 0 + || bitoff > diff) + return NULL_TREE; + } + else + { + if (!tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (field))) + return NULL_TREE; + int fieldsize = TYPE_PRECISION (TREE_TYPE (field)); + int bpos = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field)); + bpos %= BITS_PER_UNIT; + fieldsize += bpos; + fieldsize += BITS_PER_UNIT - 1; + fieldsize /= BITS_PER_UNIT; + tree repr_type = find_bitfield_repr_type (fieldsize, len); + if (repr_type == NULL_TREE) + return NULL_TREE; + sz = int_size_in_bytes (repr_type); + if (sz < 0 || sz > len) + return NULL_TREE; + pos = int_byte_position (field); + if (pos < 0 || pos > len || pos + fieldsize > len) + return NULL_TREE; + HOST_WIDE_INT rpos; + if (pos + sz <= len) + rpos = pos; + else + { + rpos = len - sz; + gcc_assert (rpos <= pos); + } + bitoff = (HOST_WIDE_INT) (pos - rpos) * BITS_PER_UNIT + bpos; + pos = rpos; + diff = (TYPE_PRECISION (repr_type) + - TYPE_PRECISION (TREE_TYPE (field))); + v = native_interpret_expr (repr_type, ptr + off + pos, sz); + if (v == NULL_TREE) + return NULL_TREE; + fld = NULL_TREE; + } + } + + if (fld) + { + sz = int_size_in_bytes (TREE_TYPE (fld)); + if (sz < 0 || sz > len) + return NULL_TREE; + tree byte_pos = byte_position (fld); + if (!tree_fits_shwi_p (byte_pos)) + return NULL_TREE; + pos = tree_to_shwi (byte_pos); + if (pos < 0 || pos > len || pos + sz > len) + return NULL_TREE; + } + if (fld == NULL_TREE) + /* Already handled above. */; + else if (can_native_interpret_type_p (TREE_TYPE (fld))) + { + v = native_interpret_expr (TREE_TYPE (fld), + ptr + off + pos, sz); + if (v == NULL_TREE) + return NULL_TREE; + } + else if (TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE + || TREE_CODE (TREE_TYPE (fld)) == ARRAY_TYPE) + v = native_interpret_aggregate (TREE_TYPE (fld), ptr, off + pos, sz); + if (v == NULL_TREE) + return NULL_TREE; + if (fld != field) + { + if (TREE_CODE (v) != INTEGER_CST) + return NULL_TREE; + + /* FIXME: Figure out how to handle PDP endian bitfields. */ + if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN) + return NULL_TREE; + if (!BYTES_BIG_ENDIAN) + v = wide_int_to_tree (TREE_TYPE (field), + wi::lrshift (wi::to_wide (v), bitoff)); + else + v = wide_int_to_tree (TREE_TYPE (field), + wi::lrshift (wi::to_wide (v), + diff - bitoff)); + } + CONSTRUCTOR_APPEND_ELT (elts, field, v); + } + return build_constructor (type, elts); +} + /* Routines for manipulation of native_encode_expr encoded data if the encoded or extracted constant positions and/or sizes aren't byte aligned. */ |