aboutsummaryrefslogtreecommitdiff
path: root/gcc/config/i386/vxworks.h
blob: 3b8eb6f3a59b6d3effa7c8aa3e5147f5de861e14 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
/* IA32 VxWorks target definitions for GNU compiler.
   Copyright (C) 2003-2022 Free Software Foundation, Inc.
   Updated by CodeSourcery, LLC.

This file is part of GCC.

GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option)
any later version.

GCC is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING3.  If not see
<http://www.gnu.org/licenses/>.  */

/* VxWorks after 7 SR0600 use the ELF ABI and the system environment is llvm
   based.  Earlier versions have GNU based environment components and use the
   same ABI as Solaris 2.  */

#if TARGET_VXWORKS7

#undef VXWORKS_PERSONALITY
#define VXWORKS_PERSONALITY "llvm"

#else

#undef ASM_OUTPUT_ALIGNED_BSS
#define ASM_OUTPUT_ALIGNED_BSS(FILE, DECL, NAME, SIZE, ALIGN) \
  asm_output_aligned_bss (FILE, DECL, NAME, SIZE, ALIGN)

#undef TARGET_SUBTARGET_DEFAULT
#define TARGET_SUBTARGET_DEFAULT \
	(MASK_80387 | MASK_IEEE_FP | MASK_FLOAT_RETURNS | MASK_VECT8_RETURNS)

#undef PTRDIFF_TYPE
#define PTRDIFF_TYPE (TARGET_LP64 ? "long int" : "int")

#undef SIZE_TYPE
#define SIZE_TYPE (TARGET_LP64 ? "long unsigned int" : "unsigned int")

/* We cannot use PC-relative accesses for VxWorks PIC because there is no
   fixed gap between segments.  */
#undef ASM_PREFERRED_EH_DATA_FORMAT

#if TARGET_64BIT_DEFAULT
#undef VXWORKS_SYSCALL_LIBS_RTP
#define VXWORKS_SYSCALL_LIBS_RTP "-lsyscall"
#endif

#endif

/* Provide our target specific DBX_REGISTER_NUMBER.  VxWorks relies on
   the SVR4 numbering.  */

#undef DBX_REGISTER_NUMBER
#define DBX_REGISTER_NUMBER(n) \
  (TARGET_64BIT ? dbx64_register_map[n] : svr4_dbx_register_map[n])

/* CPU macro definitions, ordered to account for VxWorks 7 not
   supporting CPUs older than PENTIUM4 since SR0650.  */

#define VX_CPUDEF(CPU) builtin_define(VX_CPU_PREFIX "CPU=" #CPU)
#define VX_CPUVDEF(CPU) builtin_define(VX_CPU_PREFIX "CPU_VARIANT=" #CPU)

#define TARGET_OS_CPP_BUILTINS()			\
  do							\
    {							\
      VXWORKS_OS_CPP_BUILTINS ();			\
      if (TARGET_64BIT)					\
	VX_CPUDEF (X86_64);				\
      else if (TARGET_CPU_P (PENTIUM4))			\
	{						\
	  VX_CPUDEF (PENTIUM4);				\
	  VX_CPUVDEF (PENTIUM4);			\
	}						\
      else if (TARGET_CPU_P (CORE2))			\
	VX_CPUDEF (CORE2);				\
      else if (TARGET_CPU_P (NEHALEM))			\
	VX_CPUDEF (NEHALEM);				\
      else if (TARGET_CPU_P (SANDYBRIDGE))		\
	VX_CPUDEF (SANDYBRIDGE);			\
      else if (TARGET_CPU_P (HASWELL))			\
	VX_CPUDEF (HASWELL);				\
      else if (TARGET_CPU_P (SILVERMONT))		\
	VX_CPUDEF (SILVERMONT);				\
      else if (TARGET_CPU_P (SKYLAKE) || TARGET_CPU_P (SKYLAKE_AVX512)) \
	VX_CPUDEF (SKYLAKE);				\
      else if (TARGET_CPU_P (GOLDMONT))			\
	VX_CPUDEF (GOLDMONT);				\
      else if (TARGET_VXWORKS7)				\
	VX_CPUDEF (PENTIUM4);				\
      else if (TARGET_CPU_P (I386))			\
	VX_CPUDEF (I80386);				\
      else if (TARGET_CPU_P (I486))			\
	VX_CPUDEF (I80486);				\
      else if (TARGET_CPU_P (PENTIUM))			\
	{						\
	  VX_CPUDEF (PENTIUM);				\
	  VX_CPUVDEF (PENTIUM);				\
	}						\
      else if (TARGET_CPU_P (PENTIUMPRO))		\
	{						\
	  VX_CPUDEF (PENTIUM2);				\
	  VX_CPUVDEF (PENTIUMPRO);			\
	}						\
      else						\
	VX_CPUDEF (I80386);				\
    }							\
  while (0)

#undef  CPP_SPEC
#define CPP_SPEC VXWORKS_ADDITIONAL_CPP_SPEC
#undef  CC1_SPEC
#define CC1_SPEC VXWORKS_CC1_SPEC
#undef  LIB_SPEC
#define LIB_SPEC VXWORKS_LIB_SPEC
#undef  STARTFILE_SPEC
#define STARTFILE_SPEC VXWORKS_STARTFILE_SPEC
#undef  ENDFILE_SPEC
#define ENDFILE_SPEC VXWORKS_ENDFILE_SPEC
#undef  LINK_SPEC
#define LINK_SPEC VXWORKS_LINK_SPEC

#undef  SUBTARGET_SWITCHES
#define SUBTARGET_SWITCHES EXTRA_SUBTARGET_SWITCHES

#undef SUBTARGET_OVERRIDE_OPTIONS
#define SUBTARGET_OVERRIDE_OPTIONS VXWORKS_OVERRIDE_OPTIONS

/* No _mcount profiling on VxWorks.  */
#undef FUNCTION_PROFILER
#define FUNCTION_PROFILER(FILE,LABELNO) VXWORKS_FUNCTION_PROFILER(FILE,LABELNO)

/* Define this to be nonzero if static stack checking is supported.  */
#define STACK_CHECK_STATIC_BUILTIN 1

/* This platform supports the probing method of stack checking (RTP mode).
   8K is reserved in the stack to propagate exceptions in case of overflow.
   On 64-bit targets, we double that size.  */

#define STACK_CHECK_PROTECT (TARGET_64BIT_DEFAULT ? 16 * 1024 : 8 * 1024)