aboutsummaryrefslogtreecommitdiff
path: root/tcg/tcg-op.h
diff options
context:
space:
mode:
authorRichard Henderson <rth@twiddle.net>2012-09-21 17:18:10 -0700
committerAurelien Jarno <aurelien@aurel32.net>2012-09-26 00:31:16 +0200
commit42ce3e2015b26df591e1d4c5aa1814fb8c45c36c (patch)
tree35ef901409ddc1b08dbb0b33d568540e48203094 /tcg/tcg-op.h
parent5a696f6ac0641f200cdd2dfe7a6fd397d48ea7bd (diff)
downloadqemu-42ce3e2015b26df591e1d4c5aa1814fb8c45c36c.zip
qemu-42ce3e2015b26df591e1d4c5aa1814fb8c45c36c.tar.gz
qemu-42ce3e2015b26df591e1d4c5aa1814fb8c45c36c.tar.bz2
tcg: Emit ANDI as EXTU for appropriate constants
Note that andi_i64 failed to perform even the minimal optimizations promised by the README. Signed-off-by: Richard Henderson <rth@twiddle.net> Signed-off-by: Aurelien Jarno <aurelien@aurel32.net>
Diffstat (limited to 'tcg/tcg-op.h')
-rw-r--r--tcg/tcg-op.h67
1 files changed, 56 insertions, 11 deletions
diff --git a/tcg/tcg-op.h b/tcg/tcg-op.h
index 6d28f82..c8633ff 100644
--- a/tcg/tcg-op.h
+++ b/tcg/tcg-op.h
@@ -518,18 +518,34 @@ static inline void tcg_gen_and_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
}
}
-static inline void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
+static inline void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
{
- /* some cases can be optimized here */
- if (arg2 == 0) {
+ TCGv_i32 t0;
+ /* Some cases can be optimized here. */
+ switch (arg2) {
+ case 0:
tcg_gen_movi_i32(ret, 0);
- } else if (arg2 == 0xffffffff) {
+ return;
+ case 0xffffffffu:
tcg_gen_mov_i32(ret, arg1);
- } else {
- TCGv_i32 t0 = tcg_const_i32(arg2);
- tcg_gen_and_i32(ret, arg1, t0);
- tcg_temp_free_i32(t0);
- }
+ return;
+ case 0xffu:
+ /* Don't recurse with tcg_gen_ext8u_i32. */
+ if (TCG_TARGET_HAS_ext8u_i32) {
+ tcg_gen_op2_i32(INDEX_op_ext8u_i32, ret, arg1);
+ return;
+ }
+ break;
+ case 0xffffu:
+ if (TCG_TARGET_HAS_ext16u_i32) {
+ tcg_gen_op2_i32(INDEX_op_ext16u_i32, ret, arg1);
+ return;
+ }
+ break;
+ }
+ t0 = tcg_const_i32(arg2);
+ tcg_gen_and_i32(ret, arg1, t0);
+ tcg_temp_free_i32(t0);
}
static inline void tcg_gen_or_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
@@ -1120,9 +1136,38 @@ static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
}
}
-static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
+static inline void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
{
- TCGv_i64 t0 = tcg_const_i64(arg2);
+ TCGv_i64 t0;
+ /* Some cases can be optimized here. */
+ switch (arg2) {
+ case 0:
+ tcg_gen_movi_i64(ret, 0);
+ return;
+ case 0xffffffffffffffffull:
+ tcg_gen_mov_i64(ret, arg1);
+ return;
+ case 0xffull:
+ /* Don't recurse with tcg_gen_ext8u_i32. */
+ if (TCG_TARGET_HAS_ext8u_i64) {
+ tcg_gen_op2_i64(INDEX_op_ext8u_i64, ret, arg1);
+ return;
+ }
+ break;
+ case 0xffffu:
+ if (TCG_TARGET_HAS_ext16u_i64) {
+ tcg_gen_op2_i64(INDEX_op_ext16u_i64, ret, arg1);
+ return;
+ }
+ break;
+ case 0xffffffffull:
+ if (TCG_TARGET_HAS_ext32u_i64) {
+ tcg_gen_op2_i64(INDEX_op_ext32u_i64, ret, arg1);
+ return;
+ }
+ break;
+ }
+ t0 = tcg_const_i64(arg2);
tcg_gen_and_i64(ret, arg1, t0);
tcg_temp_free_i64(t0);
}