aboutsummaryrefslogtreecommitdiff
path: root/llvm/utils/UpdateTestChecks/asm.py
diff options
context:
space:
mode:
authorSimon Pilgrim <llvm-dev@redking.me.uk>2022-10-12 15:57:45 +0100
committerSimon Pilgrim <llvm-dev@redking.me.uk>2022-10-12 15:57:52 +0100
commit8727248b7989a81fb73ea187831f55c61a7e2c8d (patch)
tree34420ffa6b72d6421ac865902594a0b07de2b927 /llvm/utils/UpdateTestChecks/asm.py
parent3fe2db8cf90cfed3d93456b6d44de03d677f2b57 (diff)
downloadllvm-8727248b7989a81fb73ea187831f55c61a7e2c8d.zip
llvm-8727248b7989a81fb73ea187831f55c61a7e2c8d.tar.gz
llvm-8727248b7989a81fb73ea187831f55c61a7e2c8d.tar.bz2
[UpdateTestChecks] Add basic BPF triple handling
Working on Issue #57872 - its really useful to be able to autogenerate checks
Diffstat (limited to 'llvm/utils/UpdateTestChecks/asm.py')
-rw-r--r--llvm/utils/UpdateTestChecks/asm.py20
1 files changed, 20 insertions, 0 deletions
diff --git a/llvm/utils/UpdateTestChecks/asm.py b/llvm/utils/UpdateTestChecks/asm.py
index 3bf1244..d420d96 100644
--- a/llvm/utils/UpdateTestChecks/asm.py
+++ b/llvm/utils/UpdateTestChecks/asm.py
@@ -47,6 +47,13 @@ ASM_FUNCTION_AMDGPU_RE = re.compile(
r'^\s*(\.Lfunc_end[0-9]+:\n|\.section)',
flags=(re.M | re.S))
+ASM_FUNCTION_BPF_RE = re.compile(
+ r'^_?(?P<func>[^:]+):[ \t]*#+[ \t]*@"?(?P=func)"?\n'
+ r'(?:[ \t]+.cfi_startproc\n|.seh_proc[^\n]+\n)?' # drop optional cfi
+ r'(?P<body>.*?)\s*'
+ r'.Lfunc_end[0-9]+:\n',
+ flags=(re.M | re.S))
+
ASM_FUNCTION_HEXAGON_RE = re.compile(
r'^_?(?P<func>[^:]+):[ \t]*//[ \t]*@"?(?P=func)"?\n[^:]*?'
r'(?P<body>.*?)\n' # (body of the function)
@@ -292,6 +299,16 @@ def scrub_asm_arm_eabi(asm, args):
asm = common.SCRUB_TRAILING_WHITESPACE_RE.sub(r'', asm)
return asm
+def scrub_asm_bpf(asm, args):
+ # Scrub runs of whitespace out of the assembly, but leave the leading
+ # whitespace in place.
+ asm = common.SCRUB_WHITESPACE_RE.sub(r' ', asm)
+ # Expand the tabs used for indentation.
+ asm = string.expandtabs(asm, 2)
+ # Strip trailing whitespace.
+ asm = common.SCRUB_TRAILING_WHITESPACE_RE.sub(r'', asm)
+ return asm
+
def scrub_asm_hexagon(asm, args):
# Scrub runs of whitespace out of the assembly, but leave the leading
# whitespace in place.
@@ -461,6 +478,9 @@ def get_run_handler(triple):
'aarch64': (scrub_asm_arm_eabi, ASM_FUNCTION_AARCH64_RE),
'aarch64-apple-darwin': (scrub_asm_arm_eabi, ASM_FUNCTION_AARCH64_DARWIN_RE),
'aarch64-apple-ios': (scrub_asm_arm_eabi, ASM_FUNCTION_AARCH64_DARWIN_RE),
+ 'bpf': (scrub_asm_bpf, ASM_FUNCTION_BPF_RE),
+ 'bpfel': (scrub_asm_bpf, ASM_FUNCTION_BPF_RE),
+ 'bpfeb': (scrub_asm_bpf, ASM_FUNCTION_BPF_RE),
'hexagon': (scrub_asm_hexagon, ASM_FUNCTION_HEXAGON_RE),
'r600': (scrub_asm_amdgpu, ASM_FUNCTION_AMDGPU_RE),
'amdgcn': (scrub_asm_amdgpu, ASM_FUNCTION_AMDGPU_RE),