aboutsummaryrefslogtreecommitdiff
path: root/libcpp/internal.h
diff options
context:
space:
mode:
Diffstat (limited to 'libcpp/internal.h')
-rw-r--r--libcpp/internal.h651
1 files changed, 651 insertions, 0 deletions
diff --git a/libcpp/internal.h b/libcpp/internal.h
new file mode 100644
index 0000000..9cd9be5
--- /dev/null
+++ b/libcpp/internal.h
@@ -0,0 +1,651 @@
+/* Part of CPP library.
+ Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
+ Free Software Foundation, Inc.
+
+This program is free software; you can redistribute it and/or modify it
+under the terms of the GNU General Public License as published by the
+Free Software Foundation; either version 2, or (at your option) any
+later version.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU General Public License
+along with this program; if not, write to the Free Software
+Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
+
+/* This header defines all the internal data structures and functions
+ that need to be visible across files. It should not be used outside
+ cpplib. */
+
+#ifndef LIBCPP_INTERNAL_H
+#define LIBCPP_INTERNAL_H
+
+#include "symtab.h"
+
+#if defined HAVE_ICONV_H && defined HAVE_ICONV
+#include <iconv.h>
+#else
+#define HAVE_ICONV 0
+typedef int iconv_t; /* dummy */
+#endif
+
+struct directive; /* Deliberately incomplete. */
+struct pending_option;
+struct op;
+struct _cpp_strbuf;
+
+typedef bool (*convert_f) (iconv_t, const unsigned char *, size_t,
+ struct _cpp_strbuf *);
+struct cset_converter
+{
+ convert_f func;
+ iconv_t cd;
+};
+
+#ifndef HAVE_UCHAR
+typedef unsigned char uchar;
+#endif
+#define U (const uchar *) /* Intended use: U"string" */
+
+#define BITS_PER_CPPCHAR_T (CHAR_BIT * sizeof (cppchar_t))
+
+/* Test if a sign is valid within a preprocessing number. */
+#define VALID_SIGN(c, prevc) \
+ (((c) == '+' || (c) == '-') && \
+ ((prevc) == 'e' || (prevc) == 'E' \
+ || (((prevc) == 'p' || (prevc) == 'P') \
+ && CPP_OPTION (pfile, extended_numbers))))
+
+#define CPP_OPTION(PFILE, OPTION) ((PFILE)->opts.OPTION)
+#define CPP_BUFFER(PFILE) ((PFILE)->buffer)
+#define CPP_BUF_COLUMN(BUF, CUR) ((CUR) - (BUF)->line_base)
+#define CPP_BUF_COL(BUF) CPP_BUF_COLUMN(BUF, (BUF)->cur)
+
+#define CPP_INCREMENT_LINE(PFILE, COLS_HINT) do { \
+ const struct line_maps *line_table = PFILE->line_table; \
+ const struct line_map *map = &line_table->maps[line_table->used-1]; \
+ unsigned int line = SOURCE_LINE (map, line_table->highest_line); \
+ linemap_line_start (PFILE->line_table, line + 1, COLS_HINT); \
+ } while (0)
+
+/* Maximum nesting of cpp_buffers. We use a static limit, partly for
+ efficiency, and partly to limit runaway recursion. */
+#define CPP_STACK_MAX 200
+
+/* Host alignment handling. */
+struct dummy
+{
+ char c;
+ union
+ {
+ double d;
+ int *p;
+ } u;
+};
+
+#define DEFAULT_ALIGNMENT offsetof (struct dummy, u)
+#define CPP_ALIGN2(size, align) (((size) + ((align) - 1)) & ~((align) - 1))
+#define CPP_ALIGN(size) CPP_ALIGN2 (size, DEFAULT_ALIGNMENT)
+
+/* Each macro definition is recorded in a cpp_macro structure.
+ Variadic macros cannot occur with traditional cpp. */
+struct cpp_macro
+{
+ /* Parameters, if any. */
+ cpp_hashnode **params;
+
+ /* Replacement tokens (ISO) or replacement text (traditional). See
+ comment at top of cpptrad.c for how traditional function-like
+ macros are encoded. */
+ union
+ {
+ cpp_token *tokens;
+ const uchar *text;
+ } exp;
+
+ /* Definition line number. */
+ fileline line;
+
+ /* Number of tokens in expansion, or bytes for traditional macros. */
+ unsigned int count;
+
+ /* Number of parameters. */
+ unsigned short paramc;
+
+ /* If a function-like macro. */
+ unsigned int fun_like : 1;
+
+ /* If a variadic macro. */
+ unsigned int variadic : 1;
+
+ /* If macro defined in system header. */
+ unsigned int syshdr : 1;
+
+ /* Nonzero if it has been expanded or had its existence tested. */
+ unsigned int used : 1;
+};
+
+#define _cpp_mark_macro_used(NODE) do { \
+ if ((NODE)->type == NT_MACRO && !((NODE)->flags & NODE_BUILTIN)) \
+ (NODE)->value.macro->used = 1; } while (0)
+
+/* A generic memory buffer, and operations on it. */
+typedef struct _cpp_buff _cpp_buff;
+struct _cpp_buff
+{
+ struct _cpp_buff *next;
+ unsigned char *base, *cur, *limit;
+};
+
+extern _cpp_buff *_cpp_get_buff (cpp_reader *, size_t);
+extern void _cpp_release_buff (cpp_reader *, _cpp_buff *);
+extern void _cpp_extend_buff (cpp_reader *, _cpp_buff **, size_t);
+extern _cpp_buff *_cpp_append_extend_buff (cpp_reader *, _cpp_buff *, size_t);
+extern void _cpp_free_buff (_cpp_buff *);
+extern unsigned char *_cpp_aligned_alloc (cpp_reader *, size_t);
+extern unsigned char *_cpp_unaligned_alloc (cpp_reader *, size_t);
+
+#define BUFF_ROOM(BUFF) (size_t) ((BUFF)->limit - (BUFF)->cur)
+#define BUFF_FRONT(BUFF) ((BUFF)->cur)
+#define BUFF_LIMIT(BUFF) ((BUFF)->limit)
+
+/* #include types. */
+enum include_type {IT_INCLUDE, IT_INCLUDE_NEXT, IT_IMPORT, IT_CMDLINE};
+
+union utoken
+{
+ const cpp_token *token;
+ const cpp_token **ptoken;
+};
+
+/* A "run" of tokens; part of a chain of runs. */
+typedef struct tokenrun tokenrun;
+struct tokenrun
+{
+ tokenrun *next, *prev;
+ cpp_token *base, *limit;
+};
+
+/* Accessor macros for struct cpp_context. */
+#define FIRST(c) ((c)->u.iso.first)
+#define LAST(c) ((c)->u.iso.last)
+#define CUR(c) ((c)->u.trad.cur)
+#define RLIMIT(c) ((c)->u.trad.rlimit)
+
+typedef struct cpp_context cpp_context;
+struct cpp_context
+{
+ /* Doubly-linked list. */
+ cpp_context *next, *prev;
+
+ union
+ {
+ /* For ISO macro expansion. Contexts other than the base context
+ are contiguous tokens. e.g. macro expansions, expanded
+ argument tokens. */
+ struct
+ {
+ union utoken first;
+ union utoken last;
+ } iso;
+
+ /* For traditional macro expansion. */
+ struct
+ {
+ const uchar *cur;
+ const uchar *rlimit;
+ } trad;
+ } u;
+
+ /* If non-NULL, a buffer used for storage related to this context.
+ When the context is popped, the buffer is released. */
+ _cpp_buff *buff;
+
+ /* For a macro context, the macro node, otherwise NULL. */
+ cpp_hashnode *macro;
+
+ /* True if utoken element is token, else ptoken. */
+ bool direct_p;
+};
+
+struct lexer_state
+{
+ /* Nonzero if first token on line is CPP_HASH. */
+ unsigned char in_directive;
+
+ /* Nonzero if in a directive that will handle padding tokens itself.
+ #include needs this to avoid problems with computed include and
+ spacing between tokens. */
+ unsigned char directive_wants_padding;
+
+ /* True if we are skipping a failed conditional group. */
+ unsigned char skipping;
+
+ /* Nonzero if in a directive that takes angle-bracketed headers. */
+ unsigned char angled_headers;
+
+ /* Nonzero if in a #if or #elif directive. */
+ unsigned char in_expression;
+
+ /* Nonzero to save comments. Turned off if discard_comments, and in
+ all directives apart from #define. */
+ unsigned char save_comments;
+
+ /* Nonzero if lexing __VA_ARGS__ is valid. */
+ unsigned char va_args_ok;
+
+ /* Nonzero if lexing poisoned identifiers is valid. */
+ unsigned char poisoned_ok;
+
+ /* Nonzero to prevent macro expansion. */
+ unsigned char prevent_expansion;
+
+ /* Nonzero when parsing arguments to a function-like macro. */
+ unsigned char parsing_args;
+
+ /* Nonzero to skip evaluating part of an expression. */
+ unsigned int skip_eval;
+};
+
+/* Special nodes - identifiers with predefined significance. */
+struct spec_nodes
+{
+ cpp_hashnode *n_defined; /* defined operator */
+ cpp_hashnode *n_true; /* C++ keyword true */
+ cpp_hashnode *n_false; /* C++ keyword false */
+ cpp_hashnode *n__VA_ARGS__; /* C99 vararg macros */
+};
+
+typedef struct _cpp_line_note _cpp_line_note;
+struct _cpp_line_note
+{
+ /* Location in the clean line the note refers to. */
+ const uchar *pos;
+
+ /* Type of note. The 9 'from' trigraph characters represent those
+ trigraphs, '\\' an escaped newline, ' ' an escaped newline with
+ intervening space, and anything else is invalid. */
+ unsigned int type;
+};
+
+/* Represents the contents of a file cpplib has read in. */
+struct cpp_buffer
+{
+ const uchar *cur; /* Current location. */
+ const uchar *line_base; /* Start of current physical line. */
+ const uchar *next_line; /* Start of to-be-cleaned logical line. */
+
+ const uchar *buf; /* Entire character buffer. */
+ const uchar *rlimit; /* Writable byte at end of file. */
+
+ _cpp_line_note *notes; /* Array of notes. */
+ unsigned int cur_note; /* Next note to process. */
+ unsigned int notes_used; /* Number of notes. */
+ unsigned int notes_cap; /* Size of allocated array. */
+
+ struct cpp_buffer *prev;
+
+ /* Pointer into the file table; non-NULL if this is a file buffer.
+ Used for include_next and to record control macros. */
+ struct _cpp_file *file;
+
+ /* Value of if_stack at start of this file.
+ Used to prohibit unmatched #endif (etc) in an include file. */
+ struct if_stack *if_stack;
+
+ /* True if we need to get the next clean line. */
+ bool need_line;
+
+ /* True if we have already warned about C++ comments in this file.
+ The warning happens only for C89 extended mode with -pedantic on,
+ or for -Wtraditional, and only once per file (otherwise it would
+ be far too noisy). */
+ unsigned int warned_cplusplus_comments : 1;
+
+ /* True if we don't process trigraphs and escaped newlines. True
+ for preprocessed input, command line directives, and _Pragma
+ buffers. */
+ unsigned int from_stage3 : 1;
+
+ /* At EOF, a buffer is automatically popped. If RETURN_AT_EOF is
+ true, a CPP_EOF token is then returned. Otherwise, the next
+ token from the enclosing buffer is returned. */
+ unsigned int return_at_eof : 1;
+
+ /* One for a system header, two for a C system header file that therefore
+ needs to be extern "C" protected in C++, and zero otherwise. */
+ unsigned char sysp;
+
+ /* The directory of the this buffer's file. Its NAME member is not
+ allocated, so we don't need to worry about freeing it. */
+ struct cpp_dir dir;
+
+ /* Descriptor for converting from the input character set to the
+ source character set. */
+ struct cset_converter input_cset_desc;
+};
+
+/* A cpp_reader encapsulates the "state" of a pre-processor run.
+ Applying cpp_get_token repeatedly yields a stream of pre-processor
+ tokens. Usually, there is only one cpp_reader object active. */
+struct cpp_reader
+{
+ /* Top of buffer stack. */
+ cpp_buffer *buffer;
+
+ /* Overlaid buffer (can be different after processing #include). */
+ cpp_buffer *overlaid_buffer;
+
+ /* Lexer state. */
+ struct lexer_state state;
+
+ /* Source line tracking. */
+ struct line_maps *line_table;
+
+ /* The line of the '#' of the current directive. */
+ fileline directive_line;
+
+ /* Memory buffers. */
+ _cpp_buff *a_buff; /* Aligned permanent storage. */
+ _cpp_buff *u_buff; /* Unaligned permanent storage. */
+ _cpp_buff *free_buffs; /* Free buffer chain. */
+
+ /* Context stack. */
+ struct cpp_context base_context;
+ struct cpp_context *context;
+
+ /* If in_directive, the directive if known. */
+ const struct directive *directive;
+
+ /* Search paths for include files. */
+ struct cpp_dir *quote_include; /* "" */
+ struct cpp_dir *bracket_include; /* <> */
+ struct cpp_dir no_search_path; /* No path. */
+
+ /* Chain of all hashed _cpp_file instances. */
+ struct _cpp_file *all_files;
+
+ struct _cpp_file *main_file;
+
+ /* File and directory hash table. */
+ struct htab *file_hash;
+ struct file_hash_entry *file_hash_entries;
+ unsigned int file_hash_entries_allocated, file_hash_entries_used;
+
+ /* Nonzero means don't look for #include "foo" the source-file
+ directory. */
+ bool quote_ignores_source_dir;
+
+ /* Nonzero if any file has contained #pragma once or #import has
+ been used. */
+ bool seen_once_only;
+
+ /* Multiple include optimization. */
+ const cpp_hashnode *mi_cmacro;
+ const cpp_hashnode *mi_ind_cmacro;
+ bool mi_valid;
+
+ /* Lexing. */
+ cpp_token *cur_token;
+ tokenrun base_run, *cur_run;
+ unsigned int lookaheads;
+
+ /* Nonzero prevents the lexer from re-using the token runs. */
+ unsigned int keep_tokens;
+
+ /* Error counter for exit code. */
+ unsigned int errors;
+
+ /* Buffer to hold macro definition string. */
+ unsigned char *macro_buffer;
+ unsigned int macro_buffer_len;
+
+ /* Descriptor for converting from the source character set to the
+ execution character set. */
+ struct cset_converter narrow_cset_desc;
+
+ /* Descriptor for converting from the source character set to the
+ wide execution character set. */
+ struct cset_converter wide_cset_desc;
+
+ /* Date and time text. Calculated together if either is requested. */
+ const uchar *date;
+ const uchar *time;
+
+ /* EOF token, and a token forcing paste avoidance. */
+ cpp_token avoid_paste;
+ cpp_token eof;
+
+ /* Opaque handle to the dependencies of mkdeps.c. */
+ struct deps *deps;
+
+ /* Obstack holding all macro hash nodes. This never shrinks.
+ See cpphash.c */
+ struct obstack hash_ob;
+
+ /* Obstack holding buffer and conditional structures. This is a
+ real stack. See cpplib.c. */
+ struct obstack buffer_ob;
+
+ /* Pragma table - dynamic, because a library user can add to the
+ list of recognized pragmas. */
+ struct pragma_entry *pragmas;
+
+ /* Call backs to cpplib client. */
+ struct cpp_callbacks cb;
+
+ /* Identifier hash table. */
+ struct ht *hash_table;
+
+ /* Expression parser stack. */
+ struct op *op_stack, *op_limit;
+
+ /* User visible options. */
+ struct cpp_options opts;
+
+ /* Special nodes - identifiers with predefined significance to the
+ preprocessor. */
+ struct spec_nodes spec_nodes;
+
+ /* Whether cpplib owns the hashtable. */
+ bool our_hashtable;
+
+ /* Traditional preprocessing output buffer (a logical line). */
+ struct
+ {
+ uchar *base;
+ uchar *limit;
+ uchar *cur;
+ fileline first_line;
+ } out;
+
+ /* Used for buffer overlays by cpptrad.c. */
+ const uchar *saved_cur, *saved_rlimit, *saved_line_base;
+
+ /* A saved list of the defined macros, for dependency checking
+ of precompiled headers. */
+ struct cpp_savedstate *savedstate;
+};
+
+/* Character classes. Based on the more primitive macros in safe-ctype.h.
+ If the definition of `numchar' looks odd to you, please look up the
+ definition of a pp-number in the C standard [section 6.4.8 of C99].
+
+ In the unlikely event that characters other than \r and \n enter
+ the set is_vspace, the macro handle_newline() in cpplex.c must be
+ updated. */
+#define _dollar_ok(x) ((x) == '$' && CPP_OPTION (pfile, dollars_in_ident))
+
+#define is_idchar(x) (ISIDNUM(x) || _dollar_ok(x))
+#define is_numchar(x) ISIDNUM(x)
+#define is_idstart(x) (ISIDST(x) || _dollar_ok(x))
+#define is_numstart(x) ISDIGIT(x)
+#define is_hspace(x) ISBLANK(x)
+#define is_vspace(x) IS_VSPACE(x)
+#define is_nvspace(x) IS_NVSPACE(x)
+#define is_space(x) IS_SPACE_OR_NUL(x)
+
+/* This table is constant if it can be initialized at compile time,
+ which is the case if cpp was compiled with GCC >=2.7, or another
+ compiler that supports C99. */
+#if HAVE_DESIGNATED_INITIALIZERS
+extern const unsigned char _cpp_trigraph_map[UCHAR_MAX + 1];
+#else
+extern unsigned char _cpp_trigraph_map[UCHAR_MAX + 1];
+#endif
+
+/* Macros. */
+
+static inline int cpp_in_system_header (cpp_reader *);
+static inline int
+cpp_in_system_header (cpp_reader *pfile)
+{
+ return pfile->buffer ? pfile->buffer->sysp : 0;
+}
+#define CPP_PEDANTIC(PF) CPP_OPTION (PF, pedantic)
+#define CPP_WTRADITIONAL(PF) CPP_OPTION (PF, warn_traditional)
+
+/* In cpperror.c */
+extern int _cpp_begin_message (cpp_reader *, int,
+ source_location, unsigned int);
+
+/* In cppmacro.c */
+extern void _cpp_free_definition (cpp_hashnode *);
+extern bool _cpp_create_definition (cpp_reader *, cpp_hashnode *);
+extern void _cpp_pop_context (cpp_reader *);
+extern void _cpp_push_text_context (cpp_reader *, cpp_hashnode *,
+ const uchar *, size_t);
+extern bool _cpp_save_parameter (cpp_reader *, cpp_macro *, cpp_hashnode *);
+extern bool _cpp_arguments_ok (cpp_reader *, cpp_macro *, const cpp_hashnode *,
+ unsigned int);
+extern const uchar *_cpp_builtin_macro_text (cpp_reader *, cpp_hashnode *);
+int _cpp_warn_if_unused_macro (cpp_reader *, cpp_hashnode *, void *);
+/* In cpphash.c */
+extern void _cpp_init_hashtable (cpp_reader *, hash_table *);
+extern void _cpp_destroy_hashtable (cpp_reader *);
+
+/* In cppfiles.c */
+typedef struct _cpp_file _cpp_file;
+extern _cpp_file *_cpp_find_file (cpp_reader *, const char *fname,
+ cpp_dir *start_dir, bool fake);
+extern bool _cpp_find_failed (_cpp_file *);
+extern void _cpp_mark_file_once_only (cpp_reader *, struct _cpp_file *);
+extern void _cpp_fake_include (cpp_reader *, const char *);
+extern bool _cpp_stack_file (cpp_reader *, _cpp_file*, bool);
+extern bool _cpp_stack_include (cpp_reader *, const char *, int,
+ enum include_type);
+extern int _cpp_compare_file_date (cpp_reader *, const char *, int);
+extern void _cpp_report_missing_guards (cpp_reader *);
+extern void _cpp_init_files (cpp_reader *);
+extern void _cpp_cleanup_files (cpp_reader *);
+extern void _cpp_pop_file_buffer (cpp_reader *, struct _cpp_file *);
+extern bool _cpp_save_file_entries (cpp_reader *pfile, FILE *f);
+extern bool _cpp_read_file_entries (cpp_reader *, FILE *);
+
+/* In cppexp.c */
+extern bool _cpp_parse_expr (cpp_reader *);
+extern struct op *_cpp_expand_op_stack (cpp_reader *);
+
+/* In cpplex.c */
+extern void _cpp_process_line_notes (cpp_reader *, int);
+extern void _cpp_clean_line (cpp_reader *);
+extern bool _cpp_get_fresh_line (cpp_reader *);
+extern bool _cpp_skip_block_comment (cpp_reader *);
+extern cpp_token *_cpp_temp_token (cpp_reader *);
+extern const cpp_token *_cpp_lex_token (cpp_reader *);
+extern cpp_token *_cpp_lex_direct (cpp_reader *);
+extern int _cpp_equiv_tokens (const cpp_token *, const cpp_token *);
+extern void _cpp_init_tokenrun (tokenrun *, unsigned int);
+
+/* In cppinit.c. */
+extern void _cpp_maybe_push_include_file (cpp_reader *);
+
+/* In cpplib.c */
+extern int _cpp_test_assertion (cpp_reader *, unsigned int *);
+extern int _cpp_handle_directive (cpp_reader *, int);
+extern void _cpp_define_builtin (cpp_reader *, const char *);
+extern char ** _cpp_save_pragma_names (cpp_reader *);
+extern void _cpp_restore_pragma_names (cpp_reader *, char **);
+extern void _cpp_do__Pragma (cpp_reader *);
+extern void _cpp_init_directives (cpp_reader *);
+extern void _cpp_init_internal_pragmas (cpp_reader *);
+extern void _cpp_do_file_change (cpp_reader *, enum lc_reason, const char *,
+ unsigned int, unsigned int);
+extern void _cpp_pop_buffer (cpp_reader *);
+
+/* In cpptrad.c. */
+extern bool _cpp_scan_out_logical_line (cpp_reader *, cpp_macro *);
+extern bool _cpp_read_logical_line_trad (cpp_reader *);
+extern void _cpp_overlay_buffer (cpp_reader *pfile, const uchar *, size_t);
+extern void _cpp_remove_overlay (cpp_reader *);
+extern bool _cpp_create_trad_definition (cpp_reader *, cpp_macro *);
+extern bool _cpp_expansions_different_trad (const cpp_macro *,
+ const cpp_macro *);
+extern uchar *_cpp_copy_replacement_text (const cpp_macro *, uchar *);
+extern size_t _cpp_replacement_text_len (const cpp_macro *);
+
+/* In cppcharset.c. */
+extern cppchar_t _cpp_valid_ucn (cpp_reader *, const uchar **,
+ const uchar *, int);
+extern void _cpp_destroy_iconv (cpp_reader *);
+extern uchar *_cpp_convert_input (cpp_reader *, const char *, uchar *,
+ size_t, size_t, off_t *);
+extern const char *_cpp_default_encoding (void);
+
+/* Utility routines and macros. */
+#define DSC(str) (const uchar *)str, sizeof str - 1
+#define xnew(T) (T *) xmalloc (sizeof(T))
+#define xcnew(T) (T *) xcalloc (1, sizeof(T))
+#define xnewvec(T, N) (T *) xmalloc (sizeof(T) * (N))
+#define xcnewvec(T, N) (T *) xcalloc (N, sizeof(T))
+#define xobnew(O, T) (T *) obstack_alloc (O, sizeof(T))
+
+/* These are inline functions instead of macros so we can get type
+ checking. */
+static inline int ustrcmp (const uchar *, const uchar *);
+static inline int ustrncmp (const uchar *, const uchar *, size_t);
+static inline size_t ustrlen (const uchar *);
+static inline uchar *uxstrdup (const uchar *);
+static inline uchar *ustrchr (const uchar *, int);
+static inline int ufputs (const uchar *, FILE *);
+
+static inline int
+ustrcmp (const uchar *s1, const uchar *s2)
+{
+ return strcmp ((const char *)s1, (const char *)s2);
+}
+
+static inline int
+ustrncmp (const uchar *s1, const uchar *s2, size_t n)
+{
+ return strncmp ((const char *)s1, (const char *)s2, n);
+}
+
+static inline size_t
+ustrlen (const uchar *s1)
+{
+ return strlen ((const char *)s1);
+}
+
+static inline uchar *
+uxstrdup (const uchar *s1)
+{
+ return (uchar *) xstrdup ((const char *)s1);
+}
+
+static inline uchar *
+ustrchr (const uchar *s1, int c)
+{
+ return (uchar *) strchr ((const char *)s1, c);
+}
+
+static inline int
+ufputs (const uchar *s, FILE *f)
+{
+ return fputs ((const char *)s, f);
+}
+
+#endif /* ! LIBCPP_INTERNAL_H */
id='n1132' href='#n1132'>1132 1133 1134 1135 1136 1137 1138 1139 1140 1141 1142 1143 1144 1145 1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156 1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167 1168 1169 1170 1171 1172 1173 1174 1175 1176 1177 1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201 1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214 1215 1216 1217 1218 1219 1220 1221 1222 1223 1224 1225 1226 1227 1228 1229 1230 1231 1232 1233 1234 1235 1236 1237 1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325 1326 1327 1328 1329 1330 1331 1332 1333 1334 1335 1336 1337 1338 1339 1340 1341 1342 1343 1344 1345 1346 1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359 1360 1361 1362 1363 1364 1365 1366 1367 1368 1369 1370 1371 1372 1373 1374 1375 1376 1377 1378 1379 1380 1381 1382 1383 1384 1385 1386 1387 1388 1389 1390 1391 1392 1393 1394 1395 1396 1397 1398 1399 1400 1401 1402 1403 1404 1405 1406 1407 1408 1409 1410 1411 1412 1413 1414 1415 1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441 1442 1443 1444 1445 1446 1447 1448 1449 1450 1451 1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463 1464 1465 1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525 1526 1527 1528 1529 1530 1531 1532 1533 1534 1535 1536 1537 1538 1539 1540 1541 1542 1543 1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579 1580 1581 1582 1583 1584 1585 1586 1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600 1601 1602 1603 1604 1605 1606 1607 1608 1609 1610 1611 1612 1613 1614 1615 1616 1617 1618 1619 1620 1621 1622 1623 1624 1625 1626 1627 1628 1629 1630 1631 1632 1633 1634 1635 1636 1637 1638 1639 1640 1641 1642 1643 1644 1645 1646 1647 1648 1649 1650 1651 1652 1653 1654 1655 1656 1657 1658 1659 1660 1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686 1687 1688 1689 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714 1715 1716 1717 1718 1719 1720 1721 1722 1723 1724 1725 1726 1727 1728 1729 1730 1731 1732 1733 1734 1735 1736 1737 1738 1739 1740 1741 1742 1743 1744 1745 1746 1747 1748 1749 1750 1751 1752 1753 1754 1755 1756 1757 1758 1759 1760 1761 1762 1763 1764 1765 1766 1767 1768 1769 1770 1771 1772 1773 1774 1775 1776 1777 1778 1779 1780 1781 1782 1783 1784 1785 1786 1787 1788 1789 1790 1791 1792 1793 1794 1795 1796 1797 1798 1799 1800 1801 1802 1803 1804 1805 1806 1807 1808 1809 1810 1811 1812 1813 1814 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824 1825 1826 1827 1828 1829 1830 1831 1832 1833 1834 1835 1836 1837 1838 1839 1840 1841 1842 1843 1844 1845 1846 1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857 1858 1859 1860 1861 1862 1863 1864 1865 1866 1867 1868 1869 1870 1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888 1889 1890 1891 1892 1893 1894 1895 1896 1897 1898 1899 1900 1901 1902 1903 1904 1905 1906 1907 1908 1909 1910 1911 1912 1913 1914 1915 1916 1917 1918 1919 1920 1921 1922 1923 1924 1925 1926 1927 1928 1929 1930 1931 1932 1933 1934 1935 1936 1937 1938 1939 1940 1941 1942 1943 1944 1945 1946 1947 1948 1949 1950 1951 1952 1953 1954 1955 1956 1957 1958 1959 1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973 1974 1975 1976 1977 1978 1979 1980 1981 1982 1983 1984 1985 1986 1987 1988 1989 1990 1991 1992 1993 1994 1995 1996 1997 1998 1999 2000 2001 2002 2003 2004 2005 2006 2007 2008 2009 2010 2011 2012 2013 2014 2015 2016 2017 2018 2019 2020 2021 2022 2023 2024 2025 2026 2027 2028 2029 2030 2031 2032 2033 2034 2035 2036 2037 2038 2039 2040 2041 2042 2043 2044 2045 2046 2047 2048 2049 2050 2051 2052 2053 2054 2055 2056 2057 2058 2059 2060 2061 2062 2063 2064 2065 2066 2067 2068 2069 2070 2071 2072 2073 2074 2075 2076 2077 2078 2079 2080 2081 2082 2083 2084 2085 2086 2087 2088 2089 2090 2091 2092 2093 2094 2095 2096 2097 2098 2099 2100 2101 2102 2103 2104 2105 2106 2107 2108 2109 2110 2111 2112 2113 2114 2115 2116 2117 2118 2119 2120 2121 2122 2123 2124 2125 2126 2127 2128 2129 2130 2131 2132 2133 2134 2135 2136 2137 2138 2139 2140 2141 2142 2143 2144 2145 2146 2147 2148 2149 2150 2151 2152 2153 2154 2155 2156 2157 2158 2159 2160 2161 2162 2163 2164 2165 2166 2167 2168 2169 2170 2171 2172 2173 2174 2175 2176 2177 2178 2179 2180 2181 2182 2183 2184 2185 2186 2187 2188 2189 2190 2191 2192 2193 2194 2195 2196 2197 2198 2199 2200 2201 2202 2203 2204 2205 2206 2207 2208 2209 2210 2211 2212 2213 2214 2215 2216 2217 2218 2219 2220 2221 2222 2223 2224 2225 2226 2227 2228 2229 2230 2231 2232 2233 2234 2235 2236 2237 2238 2239 2240 2241 2242 2243 2244 2245 2246 2247 2248 2249 2250 2251 2252 2253 2254 2255 2256 2257 2258 2259 2260 2261 2262 2263 2264 2265 2266 2267 2268 2269 2270 2271 2272 2273 2274 2275 2276 2277 2278 2279 2280 2281 2282 2283 2284 2285 2286 2287 2288 2289 2290 2291 2292 2293 2294 2295 2296 2297 2298 2299 2300 2301 2302 2303 2304 2305 2306 2307 2308 2309 2310 2311 2312 2313 2314 2315 2316 2317 2318 2319 2320 2321 2322 2323 2324 2325 2326 2327 2328 2329 2330 2331 2332 2333 2334 2335 2336 2337 2338 2339 2340 2341 2342 2343 2344 2345 2346 2347 2348 2349 2350 2351 2352 2353 2354 2355 2356 2357 2358 2359 2360 2361 2362 2363 2364 2365 2366 2367 2368 2369 2370 2371 2372 2373 2374 2375 2376 2377 2378 2379 2380 2381 2382 2383 2384 2385 2386 2387 2388 2389 2390 2391 2392 2393 2394 2395 2396 2397 2398 2399 2400 2401 2402 2403 2404 2405 2406 2407 2408 2409 2410 2411 2412 2413 2414 2415 2416 2417 2418 2419 2420 2421 2422
/* Expand builtin functions.
   Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.

This file is part of GNU CC.

GNU CC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.

GNU CC is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with GNU CC; see the file COPYING.  If not, write to
the Free Software Foundation, 59 Temple Place - Suite 330,
Boston, MA 02111-1307, USA.  */

#include "config.h"
#include "system.h"
#include "machmode.h"
#include "rtl.h"
#include "tree.h"
#include "obstack.h"
#include "flags.h"
#include "regs.h"
#include "hard-reg-set.h"
#include "except.h"
#include "function.h"
#include "insn-flags.h"
#include "insn-codes.h"
#include "insn-config.h"
#include "expr.h"
#include "recog.h"
#include "output.h"
#include "typeclass.h"
#include "defaults.h"
#include "toplev.h"

#define CALLED_AS_BUILT_IN(NODE) \
   (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))

#define CEIL(x,y) (((x) + (y) - 1) / (y))

/* Register mappings for target machines without register windows.  */
#ifndef INCOMING_REGNO
#define INCOMING_REGNO(OUT) (OUT)
#endif
#ifndef OUTGOING_REGNO
#define OUTGOING_REGNO(IN) (IN)
#endif

static int get_pointer_alignment	PROTO((tree, unsigned));
static tree c_strlen			PROTO((tree));
static rtx get_memory_rtx		PROTO((tree));
static int apply_args_size		PROTO((void));
static int apply_result_size		PROTO((void));
static rtx result_vector		PROTO((int, rtx));
static rtx expand_builtin_apply_args	PROTO((void));
static rtx expand_builtin_apply_args_1	PROTO((void));
static rtx expand_builtin_apply		PROTO((rtx, rtx, rtx));
static void expand_builtin_return	PROTO((rtx));
static rtx expand_builtin_classify_type	PROTO((tree));
static rtx expand_builtin_mathfn	PROTO((tree, rtx, rtx));
static rtx expand_builtin_constant_p	PROTO((tree));
static rtx expand_builtin_args_info	PROTO((tree));
static rtx expand_builtin_next_arg	PROTO((tree));
static rtx expand_builtin_va_start	PROTO((int, tree));
static rtx expand_builtin_va_end	PROTO((tree));
static rtx expand_builtin_va_copy	PROTO((tree));
#ifdef HAVE_cmpstrsi
static rtx expand_builtin_memcmp	PROTO((tree, tree, rtx));
static rtx expand_builtin_strcmp	PROTO((tree, rtx));
#endif
static rtx expand_builtin_memcpy	PROTO((tree));
static rtx expand_builtin_strcpy	PROTO((tree));
static rtx expand_builtin_memset	PROTO((tree));
static rtx expand_builtin_strlen	PROTO((tree, rtx, enum machine_mode));
static rtx expand_builtin_alloca	PROTO((tree, rtx));
static rtx expand_builtin_ffs		PROTO((tree, rtx, rtx));
static rtx expand_builtin_frame_address	PROTO((tree));

/* Return the alignment in bits of EXP, a pointer valued expression.
   But don't return more than MAX_ALIGN no matter what.
   The alignment returned is, by default, the alignment of the thing that
   EXP points to (if it is not a POINTER_TYPE, 0 is returned).

   Otherwise, look at the expression to see if we can do better, i.e., if the
   expression is actually pointing at an object whose alignment is tighter.  */

static int
get_pointer_alignment (exp, max_align)
     tree exp;
     unsigned max_align;
{
  unsigned align, inner;

  if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
    return 0;

  align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
  align = MIN (align, max_align);

  while (1)
    {
      switch (TREE_CODE (exp))
	{
	case NOP_EXPR:
	case CONVERT_EXPR:
	case NON_LVALUE_EXPR:
	  exp = TREE_OPERAND (exp, 0);
	  if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
	    return align;
	  inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
	  align = MIN (inner, max_align);
	  break;

	case PLUS_EXPR:
	  /* If sum of pointer + int, restrict our maximum alignment to that
	     imposed by the integer.  If not, we can't do any better than
	     ALIGN.  */
	  if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
	    return align;

	  while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
		  & (max_align - 1))
		 != 0)
	    max_align >>= 1;

	  exp = TREE_OPERAND (exp, 0);
	  break;

	case ADDR_EXPR:
	  /* See what we are pointing at and look at its alignment.  */
	  exp = TREE_OPERAND (exp, 0);
	  if (TREE_CODE (exp) == FUNCTION_DECL)
	    align = FUNCTION_BOUNDARY;
	  else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
	    align = DECL_ALIGN (exp);
#ifdef CONSTANT_ALIGNMENT
	  else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
	    align = CONSTANT_ALIGNMENT (exp, align);
#endif
	  return MIN (align, max_align);

	default:
	  return align;
	}
    }
}

/* Compute the length of a C string.  TREE_STRING_LENGTH is not the right
   way, because it could contain a zero byte in the middle.
   TREE_STRING_LENGTH is the size of the character array, not the string.

   Unfortunately, string_constant can't access the values of const char
   arrays with initializers, so neither can we do so here.  */

static tree
c_strlen (src)
     tree src;
{
  tree offset_node;
  int offset, max;
  char *ptr;

  src = string_constant (src, &offset_node);
  if (src == 0)
    return 0;
  max = TREE_STRING_LENGTH (src);
  ptr = TREE_STRING_POINTER (src);
  if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
    {
      /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
	 compute the offset to the following null if we don't know where to
	 start searching for it.  */
      int i;
      for (i = 0; i < max; i++)
	if (ptr[i] == 0)
	  return 0;
      /* We don't know the starting offset, but we do know that the string
	 has no internal zero bytes.  We can assume that the offset falls
	 within the bounds of the string; otherwise, the programmer deserves
	 what he gets.  Subtract the offset from the length of the string,
	 and return that.  */
      /* This would perhaps not be valid if we were dealing with named
         arrays in addition to literal string constants.  */
      return size_binop (MINUS_EXPR, size_int (max), offset_node);
    }

  /* We have a known offset into the string.  Start searching there for
     a null character.  */
  if (offset_node == 0)
    offset = 0;
  else
    {
      /* Did we get a long long offset?  If so, punt.  */
      if (TREE_INT_CST_HIGH (offset_node) != 0)
	return 0;
      offset = TREE_INT_CST_LOW (offset_node);
    }
  /* If the offset is known to be out of bounds, warn, and call strlen at
     runtime.  */
  if (offset < 0 || offset > max)
    {
      warning ("offset outside bounds of constant string");
      return 0;
    }
  /* Use strlen to search for the first zero byte.  Since any strings
     constructed with build_string will have nulls appended, we win even
     if we get handed something like (char[4])"abcd".

     Since OFFSET is our starting index into the string, no further
     calculation is needed.  */
  return size_int (strlen (ptr + offset));
}

/* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
   times to get the address of either a higher stack frame, or a return
   address located within it (depending on FNDECL_CODE).  */
rtx
expand_builtin_return_addr (fndecl_code, count, tem)
     enum built_in_function fndecl_code;
     int count;
     rtx tem;
{
  int i;

  /* Some machines need special handling before we can access
     arbitrary frames.  For example, on the sparc, we must first flush
     all register windows to the stack.  */
#ifdef SETUP_FRAME_ADDRESSES
  if (count > 0)
    SETUP_FRAME_ADDRESSES ();
#endif

  /* On the sparc, the return address is not in the frame, it is in a
     register.  There is no way to access it off of the current frame
     pointer, but it can be accessed off the previous frame pointer by
     reading the value from the register window save area.  */
#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
  if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
    count--;
#endif

  /* Scan back COUNT frames to the specified frame.  */
  for (i = 0; i < count; i++)
    {
      /* Assume the dynamic chain pointer is in the word that the
	 frame address points to, unless otherwise specified.  */
#ifdef DYNAMIC_CHAIN_ADDRESS
      tem = DYNAMIC_CHAIN_ADDRESS (tem);
#endif
      tem = memory_address (Pmode, tem);
      tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
    }

  /* For __builtin_frame_address, return what we've got.  */
  if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
    return tem;

  /* For __builtin_return_address, Get the return address from that
     frame.  */
#ifdef RETURN_ADDR_RTX
  tem = RETURN_ADDR_RTX (count, tem);
#else
  tem = memory_address (Pmode,
			plus_constant (tem, GET_MODE_SIZE (Pmode)));
  tem = gen_rtx_MEM (Pmode, tem);
#endif
  return tem;
}

/* __builtin_setjmp is passed a pointer to an array of five words (not
   all will be used on all machines).  It operates similarly to the C
   library function of the same name, but is more efficient.  Much of
   the code below (and for longjmp) is copied from the handling of
   non-local gotos.

   NOTE: This is intended for use by GNAT and the exception handling
   scheme in the compiler and will only work in the method used by
   them.  */

rtx
expand_builtin_setjmp (buf_addr, target, first_label, next_label)
     rtx buf_addr;
     rtx target;
     rtx first_label, next_label;
{
  rtx lab1 = gen_label_rtx ();
  enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
  enum machine_mode value_mode;
  rtx stack_save;

  value_mode = TYPE_MODE (integer_type_node);

#ifdef POINTERS_EXTEND_UNSIGNED
  buf_addr = convert_memory_address (Pmode, buf_addr);
#endif

  buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));

  if (target == 0 || GET_CODE (target) != REG
      || REGNO (target) < FIRST_PSEUDO_REGISTER)
    target = gen_reg_rtx (value_mode);

  emit_queue ();

  /* We store the frame pointer and the address of lab1 in the buffer
     and use the rest of it for the stack save area, which is
     machine-dependent.  */

#ifndef BUILTIN_SETJMP_FRAME_VALUE
#define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
#endif

  emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
		  BUILTIN_SETJMP_FRAME_VALUE);
  emit_move_insn (validize_mem
		  (gen_rtx_MEM (Pmode,
				plus_constant (buf_addr,
					       GET_MODE_SIZE (Pmode)))),
		  force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, lab1)));

  stack_save = gen_rtx_MEM (sa_mode,
			    plus_constant (buf_addr,
					   2 * GET_MODE_SIZE (Pmode)));
  emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);

  /* If there is further processing to do, do it.  */
#ifdef HAVE_builtin_setjmp_setup
  if (HAVE_builtin_setjmp_setup)
    emit_insn (gen_builtin_setjmp_setup (buf_addr));
#endif

  /* Set TARGET to zero and branch to the first-time-through label.  */
  emit_move_insn (target, const0_rtx);
  emit_jump_insn (gen_jump (first_label));
  emit_barrier ();
  emit_label (lab1);

  /* Tell flow about the strange goings on.  Putting `lab1' on
     `nonlocal_goto_handler_labels' to indicates that function
     calls may traverse the arc back to this label.  */

  current_function_has_nonlocal_label = 1;
  nonlocal_goto_handler_labels =
    gen_rtx_EXPR_LIST (VOIDmode, lab1, nonlocal_goto_handler_labels);

  /* Clobber the FP when we get here, so we have to make sure it's
     marked as used by this function.  */
  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));

  /* Mark the static chain as clobbered here so life information
     doesn't get messed up for it.  */
  emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));

  /* Now put in the code to restore the frame pointer, and argument
     pointer, if needed.  The code below is from expand_end_bindings
     in stmt.c; see detailed documentation there.  */
#ifdef HAVE_nonlocal_goto
  if (! HAVE_nonlocal_goto)
#endif
    emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);

#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
  if (fixed_regs[ARG_POINTER_REGNUM])
    {
#ifdef ELIMINABLE_REGS
      size_t i;
      static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;

      for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
	if (elim_regs[i].from == ARG_POINTER_REGNUM
	    && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
	  break;

      if (i == sizeof elim_regs / sizeof elim_regs [0])
#endif
	{
	  /* Now restore our arg pointer from the address at which it
	     was saved in our stack frame.
	     If there hasn't be space allocated for it yet, make
	     some now.  */
	  if (arg_pointer_save_area == 0)
	    arg_pointer_save_area
	      = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
	  emit_move_insn (virtual_incoming_args_rtx,
			  copy_to_reg (arg_pointer_save_area));
	}
    }
#endif

#ifdef HAVE_builtin_setjmp_receiver
  if (HAVE_builtin_setjmp_receiver)
    emit_insn (gen_builtin_setjmp_receiver (lab1));
  else
#endif
#ifdef HAVE_nonlocal_goto_receiver
    if (HAVE_nonlocal_goto_receiver)
      emit_insn (gen_nonlocal_goto_receiver ());
    else
#endif
      {
	; /* Nothing */
      }

  /* Set TARGET, and branch to the next-time-through label.  */
  emit_move_insn (target, const1_rtx);
  emit_jump_insn (gen_jump (next_label));
  emit_barrier ();

  return target;
}

/* __builtin_longjmp is passed a pointer to an array of five words (not
   all will be used on all machines).  It operates similarly to the C
   library function of the same name, but is more efficient.  Much of
   the code below is copied from the handling of non-local gotos.

   NOTE: This is intended for use by GNAT and the exception handling
   scheme in the compiler and will only work in the method used by
   them.  */

void
expand_builtin_longjmp (buf_addr, value)
     rtx buf_addr, value;
{
  rtx fp, lab, stack;
  enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);

#ifdef POINTERS_EXTEND_UNSIGNED
  buf_addr = convert_memory_address (Pmode, buf_addr);
#endif
  buf_addr = force_reg (Pmode, buf_addr);

  /* We used to store value in static_chain_rtx, but that fails if pointers
     are smaller than integers.  We instead require that the user must pass
     a second argument of 1, because that is what builtin_setjmp will
     return.  This also makes EH slightly more efficient, since we are no
     longer copying around a value that we don't care about.  */
  if (value != const1_rtx)
    abort ();

#ifdef HAVE_builtin_longjmp
  if (HAVE_builtin_longjmp)
    emit_insn (gen_builtin_longjmp (buf_addr));
  else
#endif
    {
      fp = gen_rtx_MEM (Pmode, buf_addr);
      lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
					       GET_MODE_SIZE (Pmode)));

      stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
						   2 * GET_MODE_SIZE (Pmode)));

      /* Pick up FP, label, and SP from the block and jump.  This code is
	 from expand_goto in stmt.c; see there for detailed comments.  */
#if HAVE_nonlocal_goto
      if (HAVE_nonlocal_goto)
	/* We have to pass a value to the nonlocal_goto pattern that will
	   get copied into the static_chain pointer, but it does not matter
	   what that value is, because builtin_setjmp does not use it.  */
	emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
      else
#endif
	{
	  lab = copy_to_reg (lab);

	  emit_move_insn (hard_frame_pointer_rtx, fp);
	  emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);

	  emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
	  emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
	  emit_indirect_jump (lab);
	}
    }
}

/* Get a MEM rtx for expression EXP which can be used in a string instruction
   (cmpstrsi, movstrsi, ..).  */
static rtx
get_memory_rtx (exp)
     tree exp;
{
  rtx mem;
  int is_aggregate;

  mem = gen_rtx_MEM (BLKmode,
		     memory_address (BLKmode,
				     expand_expr (exp, NULL_RTX,
						  ptr_mode, EXPAND_SUM)));

  RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);

  /* Figure out the type of the object pointed to.  Set MEM_IN_STRUCT_P
     if the value is the address of a structure or if the expression is
     cast to a pointer to structure type.  */
  is_aggregate = 0;

  while (TREE_CODE (exp) == NOP_EXPR)
    {
      tree cast_type = TREE_TYPE (exp);
      if (TREE_CODE (cast_type) == POINTER_TYPE
	  && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
	{
	  is_aggregate = 1;
	  break;
	}
      exp = TREE_OPERAND (exp, 0);
    }

  if (is_aggregate == 0)
    {
      tree type;

      if (TREE_CODE (exp) == ADDR_EXPR)
	/* If this is the address of an object, check whether the
	   object is an array.  */
	type = TREE_TYPE (TREE_OPERAND (exp, 0));
      else
	type = TREE_TYPE (TREE_TYPE (exp));
      is_aggregate = AGGREGATE_TYPE_P (type);
    }

  MEM_SET_IN_STRUCT_P (mem, is_aggregate);
  return mem;
}

/* Built-in functions to perform an untyped call and return.  */

/* For each register that may be used for calling a function, this
   gives a mode used to copy the register's value.  VOIDmode indicates
   the register is not used for calling a function.  If the machine
   has register windows, this gives only the outbound registers.
   INCOMING_REGNO gives the corresponding inbound register.  */
static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];

/* For each register that may be used for returning values, this gives
   a mode used to copy the register's value.  VOIDmode indicates the
   register is not used for returning values.  If the machine has
   register windows, this gives only the outbound registers.
   INCOMING_REGNO gives the corresponding inbound register.  */
static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];

/* For each register that may be used for calling a function, this
   gives the offset of that register into the block returned by
   __builtin_apply_args.  0 indicates that the register is not
   used for calling a function.  */
static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];

/* Return the offset of register REGNO into the block returned by 
   __builtin_apply_args.  This is not declared static, since it is
   needed in objc-act.c.  */

int 
apply_args_register_offset (regno)
     int regno;
{
  apply_args_size ();

  /* Arguments are always put in outgoing registers (in the argument
     block) if such make sense.  */
#ifdef OUTGOING_REGNO
  regno = OUTGOING_REGNO(regno);
#endif
  return apply_args_reg_offset[regno];
}

/* Return the size required for the block returned by __builtin_apply_args,
   and initialize apply_args_mode.  */

static int
apply_args_size ()
{
  static int size = -1;
  int align, regno;
  enum machine_mode mode;

  /* The values computed by this function never change.  */
  if (size < 0)
    {
      /* The first value is the incoming arg-pointer.  */
      size = GET_MODE_SIZE (Pmode);

      /* The second value is the structure value address unless this is
	 passed as an "invisible" first argument.  */
      if (struct_value_rtx)
	size += GET_MODE_SIZE (Pmode);

      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
	if (FUNCTION_ARG_REGNO_P (regno))
	  {
	    /* Search for the proper mode for copying this register's
	       value.  I'm not sure this is right, but it works so far.  */
	    enum machine_mode best_mode = VOIDmode;

	    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
		 mode != VOIDmode;
		 mode = GET_MODE_WIDER_MODE (mode))
	      if (HARD_REGNO_MODE_OK (regno, mode)
		  && HARD_REGNO_NREGS (regno, mode) == 1)
		best_mode = mode;

	    if (best_mode == VOIDmode)
	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
		   mode != VOIDmode;
		   mode = GET_MODE_WIDER_MODE (mode))
		if (HARD_REGNO_MODE_OK (regno, mode)
		    && (mov_optab->handlers[(int) mode].insn_code
			!= CODE_FOR_nothing))
		  best_mode = mode;

	    mode = best_mode;
	    if (mode == VOIDmode)
	      abort ();

	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
	    if (size % align != 0)
	      size = CEIL (size, align) * align;
	    apply_args_reg_offset[regno] = size;
	    size += GET_MODE_SIZE (mode);
	    apply_args_mode[regno] = mode;
	  }
	else
	  {
	    apply_args_mode[regno] = VOIDmode;
	    apply_args_reg_offset[regno] = 0;
	  }
    }
  return size;
}

/* Return the size required for the block returned by __builtin_apply,
   and initialize apply_result_mode.  */

static int
apply_result_size ()
{
  static int size = -1;
  int align, regno;
  enum machine_mode mode;

  /* The values computed by this function never change.  */
  if (size < 0)
    {
      size = 0;

      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
	if (FUNCTION_VALUE_REGNO_P (regno))
	  {
	    /* Search for the proper mode for copying this register's
	       value.  I'm not sure this is right, but it works so far.  */
	    enum machine_mode best_mode = VOIDmode;

	    for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
		 mode != TImode;
		 mode = GET_MODE_WIDER_MODE (mode))
	      if (HARD_REGNO_MODE_OK (regno, mode))
		best_mode = mode;

	    if (best_mode == VOIDmode)
	      for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
		   mode != VOIDmode;
		   mode = GET_MODE_WIDER_MODE (mode))
		if (HARD_REGNO_MODE_OK (regno, mode)
		    && (mov_optab->handlers[(int) mode].insn_code
			!= CODE_FOR_nothing))
		  best_mode = mode;

	    mode = best_mode;
	    if (mode == VOIDmode)
	      abort ();

	    align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
	    if (size % align != 0)
	      size = CEIL (size, align) * align;
	    size += GET_MODE_SIZE (mode);
	    apply_result_mode[regno] = mode;
	  }
	else
	  apply_result_mode[regno] = VOIDmode;

      /* Allow targets that use untyped_call and untyped_return to override
	 the size so that machine-specific information can be stored here.  */
#ifdef APPLY_RESULT_SIZE
      size = APPLY_RESULT_SIZE;
#endif
    }
  return size;
}

#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
/* Create a vector describing the result block RESULT.  If SAVEP is true,
   the result block is used to save the values; otherwise it is used to
   restore the values.  */

static rtx
result_vector (savep, result)
     int savep;
     rtx result;
{
  int regno, size, align, nelts;
  enum machine_mode mode;
  rtx reg, mem;
  rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
  
  size = nelts = 0;
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
    if ((mode = apply_result_mode[regno]) != VOIDmode)
      {
	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
	if (size % align != 0)
	  size = CEIL (size, align) * align;
	reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
	mem = change_address (result, mode,
			      plus_constant (XEXP (result, 0), size));
	savevec[nelts++] = (savep
			    ? gen_rtx_SET (VOIDmode, mem, reg)
			    : gen_rtx_SET (VOIDmode, reg, mem));
	size += GET_MODE_SIZE (mode);
      }
  return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
}
#endif /* HAVE_untyped_call or HAVE_untyped_return */

/* Save the state required to perform an untyped call with the same
   arguments as were passed to the current function.  */

static rtx
expand_builtin_apply_args_1 ()
{
  rtx registers;
  int size, align, regno;
  enum machine_mode mode;

  /* Create a block where the arg-pointer, structure value address,
     and argument registers can be saved.  */
  registers = assign_stack_local (BLKmode, apply_args_size (), -1);

  /* Walk past the arg-pointer and structure value address.  */
  size = GET_MODE_SIZE (Pmode);
  if (struct_value_rtx)
    size += GET_MODE_SIZE (Pmode);

  /* Save each register used in calling a function to the block.  */
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
    if ((mode = apply_args_mode[regno]) != VOIDmode)
      {
	rtx tem;

	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
	if (size % align != 0)
	  size = CEIL (size, align) * align;

	tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));

#ifdef STACK_REGS
        /* For reg-stack.c's stack register household.
	   Compare with a similar piece of code in function.c.  */

        emit_insn (gen_rtx_USE (mode, tem));
#endif

	emit_move_insn (change_address (registers, mode,
					plus_constant (XEXP (registers, 0),
						       size)),
			tem);
	size += GET_MODE_SIZE (mode);
      }

  /* Save the arg pointer to the block.  */
  emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
		  copy_to_reg (virtual_incoming_args_rtx));
  size = GET_MODE_SIZE (Pmode);

  /* Save the structure value address unless this is passed as an
     "invisible" first argument.  */
  if (struct_value_incoming_rtx)
    {
      emit_move_insn (change_address (registers, Pmode,
				      plus_constant (XEXP (registers, 0),
						     size)),
		      copy_to_reg (struct_value_incoming_rtx));
      size += GET_MODE_SIZE (Pmode);
    }

  /* Return the address of the block.  */
  return copy_addr_to_reg (XEXP (registers, 0));
}

/* __builtin_apply_args returns block of memory allocated on
   the stack into which is stored the arg pointer, structure
   value address, static chain, and all the registers that might
   possibly be used in performing a function call.  The code is
   moved to the start of the function so the incoming values are
   saved.  */
static rtx
expand_builtin_apply_args ()
{
  /* Don't do __builtin_apply_args more than once in a function.
     Save the result of the first call and reuse it.  */
  if (apply_args_value != 0)
    return apply_args_value;
  {
    /* When this function is called, it means that registers must be
       saved on entry to this function.  So we migrate the
       call to the first insn of this function.  */
    rtx temp;
    rtx seq;

    start_sequence ();
    temp = expand_builtin_apply_args_1 ();
    seq = get_insns ();
    end_sequence ();

    apply_args_value = temp;

    /* Put the sequence after the NOTE that starts the function.
       If this is inside a SEQUENCE, make the outer-level insn
       chain current, so the code is placed at the start of the
       function.  */
    push_topmost_sequence ();
    emit_insns_before (seq, NEXT_INSN (get_insns ()));
    pop_topmost_sequence ();
    return temp;
  }
}

/* Perform an untyped call and save the state required to perform an
   untyped return of whatever value was returned by the given function.  */

static rtx
expand_builtin_apply (function, arguments, argsize)
     rtx function, arguments, argsize;
{
  int size, align, regno;
  enum machine_mode mode;
  rtx incoming_args, result, reg, dest, call_insn;
  rtx old_stack_level = 0;
  rtx call_fusage = 0;

  /* Create a block where the return registers can be saved.  */
  result = assign_stack_local (BLKmode, apply_result_size (), -1);

  /* ??? The argsize value should be adjusted here.  */

  /* Fetch the arg pointer from the ARGUMENTS block.  */
  incoming_args = gen_reg_rtx (Pmode);
  emit_move_insn (incoming_args,
		  gen_rtx_MEM (Pmode, arguments));
#ifndef STACK_GROWS_DOWNWARD
  incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
				incoming_args, 0, OPTAB_LIB_WIDEN);
#endif

  /* Perform postincrements before actually calling the function.  */
  emit_queue ();

  /* Push a new argument block and copy the arguments.  */
  do_pending_stack_adjust ();

  /* Save the stack with nonlocal if available */
#ifdef HAVE_save_stack_nonlocal
  if (HAVE_save_stack_nonlocal)
    emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
  else
#endif
    emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);

  /* Push a block of memory onto the stack to store the memory arguments.
     Save the address in a register, and copy the memory arguments.  ??? I
     haven't figured out how the calling convention macros effect this,
     but it's likely that the source and/or destination addresses in
     the block copy will need updating in machine specific ways.  */
  dest = allocate_dynamic_stack_space (argsize, 0, 0);
  emit_block_move (gen_rtx_MEM (BLKmode, dest),
		   gen_rtx_MEM (BLKmode, incoming_args),
		   argsize,
		   PARM_BOUNDARY / BITS_PER_UNIT);

  /* Refer to the argument block.  */
  apply_args_size ();
  arguments = gen_rtx_MEM (BLKmode, arguments);

  /* Walk past the arg-pointer and structure value address.  */
  size = GET_MODE_SIZE (Pmode);
  if (struct_value_rtx)
    size += GET_MODE_SIZE (Pmode);

  /* Restore each of the registers previously saved.  Make USE insns
     for each of these registers for use in making the call.  */
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
    if ((mode = apply_args_mode[regno]) != VOIDmode)
      {
	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
	if (size % align != 0)
	  size = CEIL (size, align) * align;
	reg = gen_rtx_REG (mode, regno);
	emit_move_insn (reg,
			change_address (arguments, mode,
					plus_constant (XEXP (arguments, 0),
						       size)));

	use_reg (&call_fusage, reg);
	size += GET_MODE_SIZE (mode);
      }

  /* Restore the structure value address unless this is passed as an
     "invisible" first argument.  */
  size = GET_MODE_SIZE (Pmode);
  if (struct_value_rtx)
    {
      rtx value = gen_reg_rtx (Pmode);
      emit_move_insn (value,
		      change_address (arguments, Pmode,
				      plus_constant (XEXP (arguments, 0),
						     size)));
      emit_move_insn (struct_value_rtx, value);
      if (GET_CODE (struct_value_rtx) == REG)
	  use_reg (&call_fusage, struct_value_rtx);
      size += GET_MODE_SIZE (Pmode);
    }

  /* All arguments and registers used for the call are set up by now!  */
  function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);

  /* Ensure address is valid.  SYMBOL_REF is already valid, so no need,
     and we don't want to load it into a register as an optimization,
     because prepare_call_address already did it if it should be done.  */
  if (GET_CODE (function) != SYMBOL_REF)
    function = memory_address (FUNCTION_MODE, function);

  /* Generate the actual call instruction and save the return value.  */
#ifdef HAVE_untyped_call
  if (HAVE_untyped_call)
    emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
				      result, result_vector (1, result)));
  else
#endif
#ifdef HAVE_call_value
  if (HAVE_call_value)
    {
      rtx valreg = 0;

      /* Locate the unique return register.  It is not possible to
	 express a call that sets more than one return register using
	 call_value; use untyped_call for that.  In fact, untyped_call
	 only needs to save the return registers in the given block.  */
      for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
	if ((mode = apply_result_mode[regno]) != VOIDmode)
	  {
	    if (valreg)
	      abort (); /* HAVE_untyped_call required.  */
	    valreg = gen_rtx_REG (mode, regno);
	  }

      emit_call_insn (gen_call_value (valreg,
				      gen_rtx_MEM (FUNCTION_MODE, function),
				      const0_rtx, NULL_RTX, const0_rtx));

      emit_move_insn (change_address (result, GET_MODE (valreg),
				      XEXP (result, 0)),
		      valreg);
    }
  else
#endif
    abort ();

  /* Find the CALL insn we just emitted.  */
  for (call_insn = get_last_insn ();
       call_insn && GET_CODE (call_insn) != CALL_INSN;
       call_insn = PREV_INSN (call_insn))
    ;

  if (! call_insn)
    abort ();

  /* Put the register usage information on the CALL.  If there is already
     some usage information, put ours at the end.  */
  if (CALL_INSN_FUNCTION_USAGE (call_insn))
    {
      rtx link;

      for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
	   link = XEXP (link, 1))
	;

      XEXP (link, 1) = call_fusage;
    }
  else
    CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;

  /* Restore the stack.  */
#ifdef HAVE_save_stack_nonlocal
  if (HAVE_save_stack_nonlocal)
    emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
  else
#endif
    emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);

  /* Return the address of the result block.  */
  return copy_addr_to_reg (XEXP (result, 0));
}

/* Perform an untyped return.  */

static void
expand_builtin_return (result)
     rtx result;
{
  int size, align, regno;
  enum machine_mode mode;
  rtx reg;
  rtx call_fusage = 0;

  apply_result_size ();
  result = gen_rtx_MEM (BLKmode, result);

#ifdef HAVE_untyped_return
  if (HAVE_untyped_return)
    {
      emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
      emit_barrier ();
      return;
    }
#endif

  /* Restore the return value and note that each value is used.  */
  size = 0;
  for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
    if ((mode = apply_result_mode[regno]) != VOIDmode)
      {
	align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
	if (size % align != 0)
	  size = CEIL (size, align) * align;
	reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
	emit_move_insn (reg,
			change_address (result, mode,
					plus_constant (XEXP (result, 0),
						       size)));

	push_to_sequence (call_fusage);
	emit_insn (gen_rtx_USE (VOIDmode, reg));
	call_fusage = get_insns ();
	end_sequence ();
	size += GET_MODE_SIZE (mode);
      }

  /* Put the USE insns before the return.  */
  emit_insns (call_fusage);

  /* Return whatever values was restored by jumping directly to the end
     of the function.  */
  expand_null_return ();
}

/* Expand a call to __builtin_classify_type with arguments found in
   ARGLIST.  */
static rtx
expand_builtin_classify_type (arglist)
     tree arglist;
{
  if (arglist != 0)
    {
      tree type = TREE_TYPE (TREE_VALUE (arglist));
      enum tree_code code = TREE_CODE (type);
      if (code == VOID_TYPE)
	return GEN_INT (void_type_class);
      if (code == INTEGER_TYPE)
	return GEN_INT (integer_type_class);
      if (code == CHAR_TYPE)
	return GEN_INT (char_type_class);
      if (code == ENUMERAL_TYPE)
	return GEN_INT (enumeral_type_class);
      if (code == BOOLEAN_TYPE)
	return GEN_INT (boolean_type_class);
      if (code == POINTER_TYPE)
	return GEN_INT (pointer_type_class);
      if (code == REFERENCE_TYPE)
	return GEN_INT (reference_type_class);
      if (code == OFFSET_TYPE)
	return GEN_INT (offset_type_class);
      if (code == REAL_TYPE)
	return GEN_INT (real_type_class);
      if (code == COMPLEX_TYPE)
	return GEN_INT (complex_type_class);
      if (code == FUNCTION_TYPE)
	return GEN_INT (function_type_class);
      if (code == METHOD_TYPE)
	return GEN_INT (method_type_class);
      if (code == RECORD_TYPE)
	return GEN_INT (record_type_class);
      if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
	return GEN_INT (union_type_class);
      if (code == ARRAY_TYPE)
	{
	  if (TYPE_STRING_FLAG (type))
	    return GEN_INT (string_type_class);
	  else
	    return GEN_INT (array_type_class);
	}
      if (code == SET_TYPE)
	return GEN_INT (set_type_class);
      if (code == FILE_TYPE)
	return GEN_INT (file_type_class);
      if (code == LANG_TYPE)
	return GEN_INT (lang_type_class);
    }
  return GEN_INT (no_type_class);
}

/* Expand expression EXP, which is a call to __builtin_constant_p.  */
static rtx
expand_builtin_constant_p (exp)
     tree exp;
{
  tree arglist = TREE_OPERAND (exp, 1);
  enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));

  if (arglist == 0)
    return const0_rtx;
  else
    {
      tree arg = TREE_VALUE (arglist);
      rtx tmp;

      /* We return 1 for a numeric type that's known to be a constant
	 value at compile-time or for an aggregate type that's a
	 literal constant.  */
      STRIP_NOPS (arg);

      /* If we know this is a constant, emit the constant of one.  */
      if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
	  || (TREE_CODE (arg) == CONSTRUCTOR
	      && TREE_CONSTANT (arg))
	  || (TREE_CODE (arg) == ADDR_EXPR
	      && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
	return const1_rtx;

      /* If we aren't going to be running CSE or this expression
	 has side effects, show we don't know it to be a constant.
	 Likewise if it's a pointer or aggregate type since in those
	 case we only want literals, since those are only optimized
	 when generating RTL, not later.  */
      if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
	  || AGGREGATE_TYPE_P (TREE_TYPE (arg))
	  || POINTER_TYPE_P (TREE_TYPE (arg)))
	return const0_rtx;

      /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
	 chance to see if it can deduce whether ARG is constant.  */

      tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
      tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
      return tmp;
    }
}

/* Expand a call to one of the builtin math functions (sin, cos, or sqrt).
   Return 0 if a normal call should be emitted rather than expanding the
   function in-line.  EXP is the expression that is a call to the builtin
   function; if convenient, the result should be placed in TARGET.
   SUBTARGET may be used as the target for computing one of EXP's operands.  */
static rtx
expand_builtin_mathfn (exp, target, subtarget)
     tree exp;
     rtx target, subtarget;
{
  optab builtin_optab;  
  rtx op0, insns;
  tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
  tree arglist = TREE_OPERAND (exp, 1);

  if (arglist == 0
      /* Arg could be wrong type if user redeclared this fcn wrong.  */
      || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
    return 0;

  /* Stabilize and compute the argument.  */
  if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
      && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
    {
      exp = copy_node (exp);
      arglist = copy_node (arglist);
      TREE_OPERAND (exp, 1) = arglist;
      TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
    }
  op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);

  /* Make a suitable register to place result in.  */
  target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));

  emit_queue ();
  start_sequence ();

  switch (DECL_FUNCTION_CODE (fndecl))
    {
     case BUILT_IN_SIN:
      builtin_optab = sin_optab; break;
     case BUILT_IN_COS:
      builtin_optab = cos_optab; break;
     case BUILT_IN_FSQRT:
      builtin_optab = sqrt_optab; break;
     default:
      abort ();
    }

  /* Compute into TARGET.
     Set TARGET to wherever the result comes back.  */
  target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
			builtin_optab, op0, target, 0);

  /* If we were unable to expand via the builtin, stop the
     sequence (without outputting the insns) and return 0, causing
     a call to the library function.  */
  if (target == 0)
    {
      end_sequence ();
      return 0;
    }

  /* Check the results by default.  But if flag_fast_math is turned on,
     then assume sqrt will always be called with valid arguments.  */

  if (flag_errno_math && ! flag_fast_math)
    {
      rtx lab1;

      /* Don't define the builtin FP instructions
	 if your machine is not IEEE.  */
      if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
	abort ();

      lab1 = gen_label_rtx ();

      /* Test the result; if it is NaN, set errno=EDOM because
	 the argument was not in the domain.  */
      emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
			       0, 0, lab1);

#ifdef TARGET_EDOM
	{
#ifdef GEN_ERRNO_RTX
	  rtx errno_rtx = GEN_ERRNO_RTX;
#else
	  rtx errno_rtx
	    = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
#endif

	  emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
	}
#else
      /* We can't set errno=EDOM directly; let the library call do it.
	 Pop the arguments right away in case the call gets deleted.  */
      NO_DEFER_POP;
      expand_call (exp, target, 0);
      OK_DEFER_POP;
#endif

      emit_label (lab1);
    }

  /* Output the entire sequence.  */
  insns = get_insns ();
  end_sequence ();
  emit_insns (insns);
 
  return target;
}

/* Expand expression EXP which is a call to the strlen builtin.  Return 0
   if we failed the caller should emit a normal call, otherwise
   try to get the result in TARGET, if convenient (and in mode MODE if that's
   convenient).  */
static rtx
expand_builtin_strlen (exp, target, mode)
     tree exp;
     rtx target;
     enum machine_mode mode;
{
  tree arglist = TREE_OPERAND (exp, 1);
  enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));

  if (arglist == 0
      /* Arg could be non-pointer if user redeclared this fcn wrong.  */
      || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
    return 0;
  else
    {
      tree src = TREE_VALUE (arglist);
      tree len = c_strlen (src);

      int align
	= get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;

      rtx result, src_rtx, char_rtx;
      enum machine_mode insn_mode = value_mode, char_mode;
      enum insn_code icode;

      /* If the length is known, just return it.  */
      if (len != 0)
	return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);

      /* If SRC is not a pointer type, don't do this operation inline.  */
      if (align == 0)
	return 0;

      /* Call a function if we can't compute strlen in the right mode.  */

      while (insn_mode != VOIDmode)
	{
	  icode = strlen_optab->handlers[(int) insn_mode].insn_code;
	  if (icode != CODE_FOR_nothing)
	    return 0;

	  insn_mode = GET_MODE_WIDER_MODE (insn_mode);
	}
      if (insn_mode == VOIDmode)
	return 0;

      /* Make a place to write the result of the instruction.  */
      result = target;
      if (! (result != 0
	     && GET_CODE (result) == REG
	     && GET_MODE (result) == insn_mode
	     && REGNO (result) >= FIRST_PSEUDO_REGISTER))
	result = gen_reg_rtx (insn_mode);

      /* Make sure the operands are acceptable to the predicates.  */

      if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
	result = gen_reg_rtx (insn_mode);
      src_rtx = memory_address (BLKmode,
				expand_expr (src, NULL_RTX, ptr_mode,
					     EXPAND_NORMAL));

      if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
	src_rtx = copy_to_mode_reg (Pmode, src_rtx);

      /* Check the string is readable and has an end.  */
      if (current_function_check_memory_usage)
	emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
			   src_rtx, Pmode,
			   GEN_INT (MEMORY_USE_RO),
			   TYPE_MODE (integer_type_node));

      char_rtx = const0_rtx;
      char_mode = insn_operand_mode[(int)icode][2];
      if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
	char_rtx = copy_to_mode_reg (char_mode, char_rtx);

      emit_insn (GEN_FCN (icode) (result,
				  gen_rtx_MEM (BLKmode, src_rtx),
				  char_rtx, GEN_INT (align)));

      /* Return the value in the proper mode for this function.  */
      if (GET_MODE (result) == value_mode)
	return result;
      else if (target != 0)
	{
	  convert_move (target, result, 0);
	  return target;
	}
      else
	return convert_to_mode (value_mode, result, 0);
    }
}

/* Expand a call to the memcpy builtin, with arguments in ARGLIST.  */
static rtx
expand_builtin_memcpy (arglist)
     tree arglist;
{
  if (arglist == 0
      /* Arg could be non-pointer if user redeclared this fcn wrong.  */
      || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
      || TREE_CHAIN (arglist) == 0
      || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
	  != POINTER_TYPE)
      || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
      || (TREE_CODE (TREE_TYPE (TREE_VALUE
				(TREE_CHAIN (TREE_CHAIN (arglist)))))
	  != INTEGER_TYPE))
    return 0;
  else
    {
      tree dest = TREE_VALUE (arglist);
      tree src = TREE_VALUE (TREE_CHAIN (arglist));
      tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));

      int src_align
	= get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
      int dest_align
	= get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
      rtx dest_mem, src_mem, dest_addr, len_rtx;

      /* If either SRC or DEST is not a pointer type, don't do
	 this operation in-line.  */
      if (src_align == 0 || dest_align == 0)
	return 0;

      dest_mem = get_memory_rtx (dest);
      src_mem = get_memory_rtx (src);
      len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);

      /* Just copy the rights of SRC to the rights of DEST.  */
      if (current_function_check_memory_usage)
	emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
			   XEXP (dest_mem, 0), Pmode,
			   XEXP (src_mem, 0), Pmode,
			   len_rtx, TYPE_MODE (sizetype));

      /* Copy word part most expediently.  */
      dest_addr
	= emit_block_move (dest_mem, src_mem, len_rtx,
			   MIN (src_align, dest_align));

      if (dest_addr == 0)
	dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);

      return dest_addr;
    }
}

/* Expand expression EXP, which is a call to the strcpy builtin.  Return 0
   if we failed the caller should emit a normal call.  */
static rtx
expand_builtin_strcpy (exp)
     tree exp;
{
  tree arglist = TREE_OPERAND (exp, 1);
  rtx result;

  if (arglist == 0
      /* Arg could be non-pointer if user redeclared this fcn wrong.  */
      || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
      || TREE_CHAIN (arglist) == 0
      || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
    return 0;
  else
    {
      tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));

      if (len == 0)
	return 0;

      len = size_binop (PLUS_EXPR, len, integer_one_node);

      chainon (arglist, build_tree_list (NULL_TREE, len));
    }
  result = expand_builtin_memcpy (arglist);
  if (! result)
    TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
  return result;
}

/* Expand expression EXP, which is a call to the memset builtin.  Return 0
   if we failed the caller should emit a normal call.  */
static rtx
expand_builtin_memset (exp)
     tree exp;
{
  tree arglist = TREE_OPERAND (exp, 1);

  if (arglist == 0
      /* Arg could be non-pointer if user redeclared this fcn wrong.  */
      || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
      || TREE_CHAIN (arglist) == 0
      || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
	  != INTEGER_TYPE)
      || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
      || (INTEGER_TYPE
	  != (TREE_CODE (TREE_TYPE
			 (TREE_VALUE
			  (TREE_CHAIN (TREE_CHAIN (arglist))))))))
    return 0;
  else
    {
      tree dest = TREE_VALUE (arglist);
      tree val = TREE_VALUE (TREE_CHAIN (arglist));
      tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));

      int dest_align
	= get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
      rtx dest_mem, dest_addr, len_rtx;

      /* If DEST is not a pointer type, don't do this 
	 operation in-line.  */
      if (dest_align == 0)
	return 0;

      /* If the arguments have side-effects, then we can only evaluate
	 them at most once.  The following code evaluates them twice if
	 they are not constants because we break out to expand_call
	 in that case.  They can't be constants if they have side-effects
	 so we can check for that first.  Alternatively, we could call
	 save_expr to make multiple evaluation safe.  */
      if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
	return 0;

      /* If VAL is not 0, don't do this operation in-line. */
      if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
	return 0;

      /* If LEN does not expand to a constant, don't do this
	 operation in-line.  */
      len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
      if (GET_CODE (len_rtx) != CONST_INT)
	return 0;

      dest_mem = get_memory_rtx (dest);
	   
      /* Just check DST is writable and mark it as readable.  */
      if (current_function_check_memory_usage)
	emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
			   XEXP (dest_mem, 0), Pmode,
			   len_rtx, TYPE_MODE (sizetype),
			   GEN_INT (MEMORY_USE_WO),
			   TYPE_MODE (integer_type_node));


      dest_addr = clear_storage (dest_mem, len_rtx, dest_align);

      if (dest_addr == 0)
	dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);

      return dest_addr;
    }
}

#ifdef HAVE_cmpstrsi
/* Expand expression EXP, which is a call to the memcmp or the strcmp builtin.
   ARGLIST is the argument list for this call.  Return 0 if we failed and the
   caller should emit a normal call, otherwise try to get the result in
   TARGET, if convenient.  */
static rtx
expand_builtin_memcmp (exp, arglist, target)
     tree exp;
     tree arglist;
     rtx target;
{
  /* If we need to check memory accesses, call the library function.  */
  if (current_function_check_memory_usage)
    return 0;

  if (arglist == 0
      /* Arg could be non-pointer if user redeclared this fcn wrong.  */
      || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
      || TREE_CHAIN (arglist) == 0
      || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
      || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
      || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
    return 0;
  else if (!HAVE_cmpstrsi)
    return 0;

  {
    enum machine_mode mode;
    tree arg1 = TREE_VALUE (arglist);
    tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
    tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
    rtx result;

    int arg1_align
      = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
    int arg2_align
      = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
    enum machine_mode insn_mode
      = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];

    /* If we don't have POINTER_TYPE, call the function.  */
    if (arg1_align == 0 || arg2_align == 0)
      return 0;

    /* Make a place to write the result of the instruction.  */
    result = target;
    if (! (result != 0
	   && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
	   && REGNO (result) >= FIRST_PSEUDO_REGISTER))
      result = gen_reg_rtx (insn_mode);

    emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
			     get_memory_rtx (arg2),
			     expand_expr (len, NULL_RTX, VOIDmode, 0),
			     GEN_INT (MIN (arg1_align, arg2_align))));

    /* Return the value in the proper mode for this function.  */
    mode = TYPE_MODE (TREE_TYPE (exp));
    if (GET_MODE (result) == mode)
      return result;
    else if (target != 0)
      {
	convert_move (target, result, 0);
	return target;
      }
    else
      return convert_to_mode (mode, result, 0);
  }
}

/* Expand expression EXP, which is a call to the strcmp builtin.  Return 0
   if we failed the caller should emit a normal call, otherwise try to get
   the result in TARGET, if convenient.  */
static rtx
expand_builtin_strcmp (exp, target)
     tree exp;
     rtx target;
{
  tree arglist = TREE_OPERAND (exp, 1);

  /* If we need to check memory accesses, call the library function.  */
  if (current_function_check_memory_usage)
    return 0;

  if (arglist == 0
      /* Arg could be non-pointer if user redeclared this fcn wrong.  */
      || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
      || TREE_CHAIN (arglist) == 0
      || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
    return 0;
  else if (!HAVE_cmpstrsi)
    return 0;
  {
    tree arg1 = TREE_VALUE (arglist);
    tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
    tree len, len2;
    rtx result;
    len = c_strlen (arg1);
    if (len)
      len = size_binop (PLUS_EXPR, integer_one_node, len);
    len2 = c_strlen (arg2);
    if (len2)
      len2 = size_binop (PLUS_EXPR, integer_one_node, len2);

    /* If we don't have a constant length for the first, use the length
       of the second, if we know it.  We don't require a constant for
       this case; some cost analysis could be done if both are available
       but neither is constant.  For now, assume they're equally cheap.

       If both strings have constant lengths, use the smaller.  This
       could arise if optimization results in strcpy being called with
       two fixed strings, or if the code was machine-generated.  We should
       add some code to the `memcmp' handler below to deal with such
       situations, someday.  */
    if (!len || TREE_CODE (len) != INTEGER_CST)
      {
	if (len2)
	  len = len2;
	else if (len == 0)
	  return 0;
      }
    else if (len2 && TREE_CODE (len2) == INTEGER_CST)
      {
	if (tree_int_cst_lt (len2, len))
	  len = len2;
      }

    chainon (arglist, build_tree_list (NULL_TREE, len));
    result = expand_builtin_memcmp (exp, arglist, target);
    if (! result)
      TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
    return result;
  }
}
#endif

/* Expand a call to __builtin_saveregs, generating the result in TARGET,
   if that's convenient.  */
rtx
expand_builtin_saveregs ()
{
  rtx val, seq;

  /* Don't do __builtin_saveregs more than once in a function.
     Save the result of the first call and reuse it.  */
  if (saveregs_value != 0)
    return saveregs_value;

  /* When this function is called, it means that registers must be
     saved on entry to this function.  So we migrate the call to the
     first insn of this function.  */

  start_sequence ();

#ifdef EXPAND_BUILTIN_SAVEREGS
  /* Do whatever the machine needs done in this case.  */
  val = EXPAND_BUILTIN_SAVEREGS ();
#else
  /* ??? We used to try and build up a call to the out of line function,
     guessing about what registers needed saving etc.  This became much
     harder with __builtin_va_start, since we don't have a tree for a
     call to __builtin_saveregs to fall back on.  There was exactly one
     port (i860) that used this code, and I'm unconvinced it could actually
     handle the general case.  So we no longer try to handle anything
     weird and make the backend absorb the evil.  */

  error ("__builtin_saveregs not supported by this target");
  val = const0_rtx;
#endif

  seq = get_insns ();
  end_sequence ();

  saveregs_value = val;

  /* Put the sequence after the NOTE that starts the function.  If this
     is inside a SEQUENCE, make the outer-level insn chain current, so
     the code is placed at the start of the function.  */
  push_topmost_sequence ();
  emit_insns_after (seq, get_insns ());
  pop_topmost_sequence ();

  return val;
}

/* __builtin_args_info (N) returns word N of the arg space info
   for the current function.  The number and meanings of words
   is controlled by the definition of CUMULATIVE_ARGS.  */
static rtx
expand_builtin_args_info (exp)
     tree exp;
{
  tree arglist = TREE_OPERAND (exp, 1);
  int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
  int *word_ptr = (int *) &current_function_args_info;
#if 0	
  /* These are used by the code below that is if 0'ed away */
  int i;
  tree type, elts, result;
#endif

  if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
    fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
	   __FILE__, __LINE__);

  if (arglist != 0)
    {
      tree arg = TREE_VALUE (arglist);
      if (TREE_CODE (arg) != INTEGER_CST)
	error ("argument of `__builtin_args_info' must be constant");
      else
	{
	  int wordnum = TREE_INT_CST_LOW (arg);

	  if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
	    error ("argument of `__builtin_args_info' out of range");
	  else
	    return GEN_INT (word_ptr[wordnum]);
	}
    }
  else
    error ("missing argument in `__builtin_args_info'");

  return const0_rtx;

#if 0
  for (i = 0; i < nwords; i++)
    elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));

  type = build_array_type (integer_type_node,
			   build_index_type (build_int_2 (nwords, 0)));
  result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
  TREE_CONSTANT (result) = 1;
  TREE_STATIC (result) = 1;
  result = build1 (INDIRECT_REF, build_pointer_type (type), result);
  TREE_CONSTANT (result) = 1;
  return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
#endif
}

/* Expand ARGLIST, from a call to __builtin_next_arg.  */
static rtx
expand_builtin_next_arg (arglist)
     tree arglist;
{
  tree fntype = TREE_TYPE (current_function_decl);

  if ((TYPE_ARG_TYPES (fntype) == 0
       || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
	   == void_type_node))
      && ! current_function_varargs)
    {
      error ("`va_start' used in function with fixed args");
      return const0_rtx;
    }

  if (arglist)
    {
      tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
      tree arg = TREE_VALUE (arglist);

      /* Strip off all nops for the sake of the comparison.  This
	 is not quite the same as STRIP_NOPS.  It does more.  
	 We must also strip off INDIRECT_EXPR for C++ reference
	 parameters.  */
      while (TREE_CODE (arg) == NOP_EXPR
	     || TREE_CODE (arg) == CONVERT_EXPR
	     || TREE_CODE (arg) == NON_LVALUE_EXPR
	     || TREE_CODE (arg) == INDIRECT_REF)
	arg = TREE_OPERAND (arg, 0);
      if (arg != last_parm)
	warning ("second parameter of `va_start' not last named argument");
    }
  else if (! current_function_varargs)
    /* Evidently an out of date version of <stdarg.h>; can't validate
       va_start's second argument, but can still work as intended.  */
    warning ("`__builtin_next_arg' called without an argument");

  return expand_binop (Pmode, add_optab,
		       current_function_internal_arg_pointer,
		       current_function_arg_offset_rtx,
		       NULL_RTX, 0, OPTAB_LIB_WIDEN);
}

/* Make it easier for the backends by protecting the valist argument
   from multiple evaluations.  */

static tree
stabilize_va_list (valist, was_ptr)
     tree valist;
     int was_ptr;
{
  int is_array = TREE_CODE (va_list_type_node) == ARRAY_TYPE;

  if (was_ptr)
    {
      /* If stdarg.h took the address of an array-type valist that was passed
         as a parameter, we'll have taken the address of the parameter itself
         rather than the array as we'd intended.  Undo this mistake.  */
      if (is_array
	  && TREE_CODE (valist) == ADDR_EXPR
	  && TREE_CODE (TREE_TYPE (TREE_OPERAND (valist, 0))) == POINTER_TYPE)
	{
	  valist = TREE_OPERAND (valist, 0);
	  if (TREE_SIDE_EFFECTS (valist))
	    valist = save_expr (valist);
	}
      else
	{
	  if (TREE_SIDE_EFFECTS (valist))
	    valist = save_expr (valist);
	  valist = fold (build1 (INDIRECT_REF, va_list_type_node, valist));
	}
    }
  else if (TREE_SIDE_EFFECTS (valist))
    {
      if (is_array)
	valist = save_expr (valist);
      else
	{
          valist = build1 (ADDR_EXPR, build_pointer_type (va_list_type_node),
			   valist);
	  TREE_SIDE_EFFECTS (valist) = 1;
	  valist = save_expr (valist);
	  valist = fold (build1 (INDIRECT_REF, va_list_type_node, valist));
	}
    }

  return valist;
}

/* The "standard" implementation of va_start: just assign `nextarg' to
   the variable.  */
void
std_expand_builtin_va_start (stdarg_p, valist, nextarg)
     int stdarg_p ATTRIBUTE_UNUSED;
     tree valist;
     rtx nextarg;
{
  tree t;

  if (!stdarg_p)
    nextarg = plus_constant (nextarg, -UNITS_PER_WORD);

  t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
	     make_tree (ptr_type_node, nextarg));
  TREE_SIDE_EFFECTS (t) = 1;

  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
}

/* Expand ARGLIST, which from a call to __builtin_stdarg_va_start or
   __builtin_varargs_va_start, depending on STDARG_P.  */
static rtx
expand_builtin_va_start (stdarg_p, arglist)
     int stdarg_p;
     tree arglist;
{
  rtx nextarg;
  tree chain = arglist, valist;

  if (stdarg_p)
    nextarg = expand_builtin_next_arg (chain = TREE_CHAIN (arglist));
  else
    nextarg = expand_builtin_next_arg (NULL_TREE);

  if (TREE_CHAIN (chain))
    error ("too many arguments to function `va_start'");

  valist = stabilize_va_list (TREE_VALUE (arglist), 1);

#ifdef EXPAND_BUILTIN_VA_START
  EXPAND_BUILTIN_VA_START (stdarg_p, valist, nextarg);
#else
  std_expand_builtin_va_start (stdarg_p, valist, nextarg);
#endif

  return const0_rtx;
}

/* Allocate an alias set for use in storing and reading from the varargs
   spill area.  */
int
get_varargs_alias_set ()
{
  static int set = -1;
  if (set == -1)
    set = new_alias_set ();
  return set;
}

/* The "standard" implementation of va_arg: read the value from the
   current (padded) address and increment by the (padded) size.  */
rtx
std_expand_builtin_va_arg (valist, type)
     tree valist, type;
{
  tree addr_tree, t;
  HOST_WIDE_INT align;
  HOST_WIDE_INT rounded_size;
  rtx addr;

  /* Compute the rounded size of the type.  */
  align = PARM_BOUNDARY / BITS_PER_UNIT;
  rounded_size = (((int_size_in_bytes (type) + align - 1) / align) * align);

  /* Get AP.  */
  addr_tree = valist;
  if (BYTES_BIG_ENDIAN)
    {
      /* Small args are padded downward.  */

      HOST_WIDE_INT adj;
      adj = TREE_INT_CST_LOW (TYPE_SIZE (type)) / BITS_PER_UNIT;
      if (rounded_size > align)
	adj = rounded_size;

      addr_tree = build (PLUS_EXPR, TREE_TYPE (addr_tree), addr_tree,
			 build_int_2 (rounded_size - adj, 0));
    }

  addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
  addr = copy_to_reg (addr);

  /* Compute new value for AP.  */
  t = build (MODIFY_EXPR, TREE_TYPE (valist), valist,
	     build (PLUS_EXPR, TREE_TYPE (valist), valist,
		    build_int_2 (rounded_size, 0)));
  TREE_SIDE_EFFECTS (t) = 1;
  expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);

  return addr;
}

/* Expand __builtin_va_arg, which is not really a builtin function, but
   a very special sort of operator.  */
rtx
expand_builtin_va_arg (valist, type)
     tree valist, type;
{
  rtx addr, result;

  if (TYPE_MAIN_VARIANT (TREE_TYPE (valist))
      != TYPE_MAIN_VARIANT (va_list_type_node))
    {
      error ("first argument to `__builtin_va_arg' not of type `va_list'");
      addr = const0_rtx;
    }
  else
    {
      /* Make it easier for the backends by protecting the valist argument
         from multiple evaluations.  */
      valist = stabilize_va_list (valist, 0);

#ifdef EXPAND_BUILTIN_VA_ARG
      addr = EXPAND_BUILTIN_VA_ARG (valist, type);
#else
      addr = std_expand_builtin_va_arg (valist, type);
#endif
    }

  result = gen_rtx_MEM (TYPE_MODE (type), addr);
  MEM_ALIAS_SET (result) = get_varargs_alias_set ();

  return result;
}

/* Expand ARGLIST, from a call to __builtin_va_end.  */
static rtx
expand_builtin_va_end (arglist)
     tree arglist;
{
  tree valist = TREE_VALUE (arglist);

#ifdef EXPAND_BUILTIN_VA_END
  valist = stabilize_va_list (valist, 0);
  EXPAND_BUILTIN_VA_END(arglist);
#else
  /* Evaluate for side effects, if needed.  I hate macros that don't
     do that.  */
  if (TREE_SIDE_EFFECTS (valist))
    expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
#endif

  return const0_rtx;
}

/* Expand ARGLIST, from a call to __builtin_va_copy.  We do this as a 
   builtin rather than just as an assignment in stdarg.h because of the
   nastiness of array-type va_list types.  */
static rtx
expand_builtin_va_copy (arglist)
     tree arglist;
{
  tree dst, src, t;

  dst = TREE_VALUE (arglist);
  src = TREE_VALUE (TREE_CHAIN (arglist));

  dst = stabilize_va_list (dst, 1);
  src = stabilize_va_list (src, 0);

  if (TREE_CODE (va_list_type_node) != ARRAY_TYPE)
    {
      t = build (MODIFY_EXPR, va_list_type_node, dst, src);
      TREE_SIDE_EFFECTS (t) = 1;
      expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
    }
  else
    {
      emit_block_move (expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL),
		       expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL),
		       expand_expr (TYPE_SIZE (va_list_type_node), NULL_RTX,
				    VOIDmode, EXPAND_NORMAL),