version 1.1.1.3, 2012/10/09 09:19:18
|
version 1.1.1.4, 2013/07/22 08:25:57
|
Line 24
|
Line 24
|
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
*/ |
*/ |
|
|
SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name() | SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_platform_name(void) |
{ |
{ |
return "x86" SLJIT_CPUINFO; |
return "x86" SLJIT_CPUINFO; |
} |
} |
Line 67 SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_p
|
Line 67 SLJIT_API_FUNC_ATTRIBUTE SLJIT_CONST char* sljit_get_p
|
#define TMP_REGISTER (SLJIT_NO_REGISTERS + 1) |
#define TMP_REGISTER (SLJIT_NO_REGISTERS + 1) |
|
|
static SLJIT_CONST sljit_ub reg_map[SLJIT_NO_REGISTERS + 2] = { |
static SLJIT_CONST sljit_ub reg_map[SLJIT_NO_REGISTERS + 2] = { |
0, 0, 2, 1, 0, 0, 3, 6, 7, 0, 0, 4, 5 | 0, 0, 2, 1, 0, 0, 3, 6, 7, 0, 0, 4, 5 |
}; |
}; |
|
|
#define CHECK_EXTRA_REGS(p, w, do) \ |
#define CHECK_EXTRA_REGS(p, w, do) \ |
if (p >= SLJIT_TEMPORARY_EREG1 && p <= SLJIT_TEMPORARY_EREG2) { \ |
if (p >= SLJIT_TEMPORARY_EREG1 && p <= SLJIT_TEMPORARY_EREG2) { \ |
w = compiler->temporaries_start + (p - SLJIT_TEMPORARY_EREG1) * sizeof(sljit_w); \ | w = compiler->scratches_start + (p - SLJIT_TEMPORARY_EREG1) * sizeof(sljit_sw); \ |
p = SLJIT_MEM1(SLJIT_LOCALS_REG); \ |
p = SLJIT_MEM1(SLJIT_LOCALS_REG); \ |
do; \ |
do; \ |
} \ |
} \ |
else if (p >= SLJIT_SAVED_EREG1 && p <= SLJIT_SAVED_EREG2) { \ |
else if (p >= SLJIT_SAVED_EREG1 && p <= SLJIT_SAVED_EREG2) { \ |
w = compiler->saveds_start + (p - SLJIT_SAVED_EREG1) * sizeof(sljit_w); \ | w = compiler->saveds_start + (p - SLJIT_SAVED_EREG1) * sizeof(sljit_sw); \ |
p = SLJIT_MEM1(SLJIT_LOCALS_REG); \ |
p = SLJIT_MEM1(SLJIT_LOCALS_REG); \ |
do; \ |
do; \ |
} |
} |
Line 95 static SLJIT_CONST sljit_ub reg_map[SLJIT_NO_REGISTERS
|
Line 95 static SLJIT_CONST sljit_ub reg_map[SLJIT_NO_REGISTERS
|
#ifndef _WIN64 |
#ifndef _WIN64 |
/* 1st passed in rdi, 2nd argument passed in rsi, 3rd in rdx. */ |
/* 1st passed in rdi, 2nd argument passed in rsi, 3rd in rdx. */ |
static SLJIT_CONST sljit_ub reg_map[SLJIT_NO_REGISTERS + 4] = { |
static SLJIT_CONST sljit_ub reg_map[SLJIT_NO_REGISTERS + 4] = { |
0, 0, 6, 1, 8, 11, 3, 15, 14, 13, 12, 4, 2, 7, 9 | 0, 0, 6, 1, 8, 11, 3, 15, 14, 13, 12, 4, 2, 7, 9 |
}; |
}; |
/* low-map. reg_map & 0x7. */ |
/* low-map. reg_map & 0x7. */ |
static SLJIT_CONST sljit_ub reg_lmap[SLJIT_NO_REGISTERS + 4] = { |
static SLJIT_CONST sljit_ub reg_lmap[SLJIT_NO_REGISTERS + 4] = { |
0, 0, 6, 1, 0, 3, 3, 7, 6, 5, 4, 4, 2, 7, 1 | 0, 0, 6, 1, 0, 3, 3, 7, 6, 5, 4, 4, 2, 7, 1 |
}; |
}; |
#else |
#else |
/* 1st passed in rcx, 2nd argument passed in rdx, 3rd in r8. */ |
/* 1st passed in rcx, 2nd argument passed in rdx, 3rd in r8. */ |
static SLJIT_CONST sljit_ub reg_map[SLJIT_NO_REGISTERS + 4] = { |
static SLJIT_CONST sljit_ub reg_map[SLJIT_NO_REGISTERS + 4] = { |
0, 0, 2, 1, 11, 13, 3, 6, 7, 14, 15, 4, 10, 8, 9 | 0, 0, 2, 1, 11, 13, 3, 6, 7, 14, 15, 4, 10, 8, 9 |
}; |
}; |
/* low-map. reg_map & 0x7. */ |
/* low-map. reg_map & 0x7. */ |
static SLJIT_CONST sljit_ub reg_lmap[SLJIT_NO_REGISTERS + 4] = { |
static SLJIT_CONST sljit_ub reg_lmap[SLJIT_NO_REGISTERS + 4] = { |
0, 0, 2, 1, 3, 5, 3, 6, 7, 6, 7, 4, 2, 0, 1 | 0, 0, 2, 1, 3, 5, 3, 6, 7, 6, 7, 4, 2, 0, 1 |
}; |
}; |
#endif |
#endif |
|
|
Line 118 static SLJIT_CONST sljit_ub reg_lmap[SLJIT_NO_REGISTER
|
Line 118 static SLJIT_CONST sljit_ub reg_lmap[SLJIT_NO_REGISTER
|
#define REX_B 0x41 |
#define REX_B 0x41 |
#define REX 0x40 |
#define REX 0x40 |
|
|
typedef unsigned int sljit_uhw; |
|
typedef int sljit_hw; |
|
|
|
#define IS_HALFWORD(x) ((x) <= 0x7fffffffll && (x) >= -0x80000000ll) |
#define IS_HALFWORD(x) ((x) <= 0x7fffffffll && (x) >= -0x80000000ll) |
#define NOT_HALFWORD(x) ((x) > 0x7fffffffll || (x) < -0x80000000ll) |
#define NOT_HALFWORD(x) ((x) > 0x7fffffffll || (x) < -0x80000000ll) |
|
|
Line 129 typedef int sljit_hw;
|
Line 126 typedef int sljit_hw;
|
#endif /* SLJIT_CONFIG_X86_32 */ |
#endif /* SLJIT_CONFIG_X86_32 */ |
|
|
#if (defined SLJIT_SSE2 && SLJIT_SSE2) |
#if (defined SLJIT_SSE2 && SLJIT_SSE2) |
#define TMP_FREG (SLJIT_FLOAT_REG4 + 1) | #define TMP_FREG (0) |
#endif |
#endif |
|
|
/* Size flags for emit_x86_instruction: */ |
/* Size flags for emit_x86_instruction: */ |
Line 142 typedef int sljit_hw;
|
Line 139 typedef int sljit_hw;
|
#define EX86_PREF_66 0x0400 |
#define EX86_PREF_66 0x0400 |
|
|
#if (defined SLJIT_SSE2 && SLJIT_SSE2) |
#if (defined SLJIT_SSE2 && SLJIT_SSE2) |
#define EX86_PREF_F2 0x0800 | #define EX86_SSE2 0x0800 |
#define EX86_SSE2 0x1000 | #define EX86_PREF_F2 0x1000 |
| #define EX86_PREF_F3 0x2000 |
#endif |
#endif |
|
|
#define INC_SIZE(s) (*buf++ = (s), compiler->size += (s)) | /* --------------------------------------------------------------------- */ |
#define INC_CSIZE(s) (*code++ = (s), compiler->size += (s)) | /* Instrucion forms */ |
| /* --------------------------------------------------------------------- */ |
|
|
#define PUSH_REG(r) (*buf++ = (0x50 + (r))) | #define ADD (/* BINARY */ 0 << 3) |
#define POP_REG(r) (*buf++ = (0x58 + (r))) | #define ADD_EAX_i32 0x05 |
#define RET() (*buf++ = (0xc3)) | #define ADD_r_rm 0x03 |
#define RETN(n) (*buf++ = (0xc2), *buf++ = n, *buf++ = 0) | #define ADD_rm_r 0x01 |
| #define ADDSD_x_xm 0x58 |
| #define ADC (/* BINARY */ 2 << 3) |
| #define ADC_EAX_i32 0x15 |
| #define ADC_r_rm 0x13 |
| #define ADC_rm_r 0x11 |
| #define AND (/* BINARY */ 4 << 3) |
| #define AND_EAX_i32 0x25 |
| #define AND_r_rm 0x23 |
| #define AND_rm_r 0x21 |
| #define ANDPD_x_xm 0x54 |
| #define BSR_r_rm (/* GROUP_0F */ 0xbd) |
| #define CALL_i32 0xe8 |
| #define CALL_rm (/* GROUP_FF */ 2 << 3) |
| #define CDQ 0x99 |
| #define CMOVNE_r_rm (/* GROUP_0F */ 0x45) |
| #define CMP (/* BINARY */ 7 << 3) |
| #define CMP_EAX_i32 0x3d |
| #define CMP_r_rm 0x3b |
| #define CMP_rm_r 0x39 |
| #define DIV (/* GROUP_F7 */ 6 << 3) |
| #define DIVSD_x_xm 0x5e |
| #define INT3 0xcc |
| #define IDIV (/* GROUP_F7 */ 7 << 3) |
| #define IMUL (/* GROUP_F7 */ 5 << 3) |
| #define IMUL_r_rm (/* GROUP_0F */ 0xaf) |
| #define IMUL_r_rm_i8 0x6b |
| #define IMUL_r_rm_i32 0x69 |
| #define JE_i8 0x74 |
| #define JMP_i8 0xeb |
| #define JMP_i32 0xe9 |
| #define JMP_rm (/* GROUP_FF */ 4 << 3) |
| #define LEA_r_m 0x8d |
| #define MOV_r_rm 0x8b |
| #define MOV_r_i32 0xb8 |
| #define MOV_rm_r 0x89 |
| #define MOV_rm_i32 0xc7 |
| #define MOV_rm8_i8 0xc6 |
| #define MOV_rm8_r8 0x88 |
| #define MOVSD_x_xm 0x10 |
| #define MOVSD_xm_x 0x11 |
| #define MOVSXD_r_rm 0x63 |
| #define MOVSX_r_rm8 (/* GROUP_0F */ 0xbe) |
| #define MOVSX_r_rm16 (/* GROUP_0F */ 0xbf) |
| #define MOVZX_r_rm8 (/* GROUP_0F */ 0xb6) |
| #define MOVZX_r_rm16 (/* GROUP_0F */ 0xb7) |
| #define MUL (/* GROUP_F7 */ 4 << 3) |
| #define MULSD_x_xm 0x59 |
| #define NEG_rm (/* GROUP_F7 */ 3 << 3) |
| #define NOP 0x90 |
| #define NOT_rm (/* GROUP_F7 */ 2 << 3) |
| #define OR (/* BINARY */ 1 << 3) |
| #define OR_r_rm 0x0b |
| #define OR_EAX_i32 0x0d |
| #define OR_rm_r 0x09 |
| #define OR_rm8_r8 0x08 |
| #define POP_r 0x58 |
| #define POP_rm 0x8f |
| #define POPF 0x9d |
| #define PUSH_i32 0x68 |
| #define PUSH_r 0x50 |
| #define PUSH_rm (/* GROUP_FF */ 6 << 3) |
| #define PUSHF 0x9c |
| #define RET_near 0xc3 |
| #define RET_i16 0xc2 |
| #define SBB (/* BINARY */ 3 << 3) |
| #define SBB_EAX_i32 0x1d |
| #define SBB_r_rm 0x1b |
| #define SBB_rm_r 0x19 |
| #define SAR (/* SHIFT */ 7 << 3) |
| #define SHL (/* SHIFT */ 4 << 3) |
| #define SHR (/* SHIFT */ 5 << 3) |
| #define SUB (/* BINARY */ 5 << 3) |
| #define SUB_EAX_i32 0x2d |
| #define SUB_r_rm 0x2b |
| #define SUB_rm_r 0x29 |
| #define SUBSD_x_xm 0x5c |
| #define TEST_EAX_i32 0xa9 |
| #define TEST_rm_r 0x85 |
| #define UCOMISD_x_xm 0x2e |
| #define XCHG_EAX_r 0x90 |
| #define XCHG_r_rm 0x87 |
| #define XOR (/* BINARY */ 6 << 3) |
| #define XOR_EAX_i32 0x35 |
| #define XOR_r_rm 0x33 |
| #define XOR_rm_r 0x31 |
| #define XORPD_x_xm 0x57 |
| |
| #define GROUP_0F 0x0f |
| #define GROUP_F7 0xf7 |
| #define GROUP_FF 0xff |
| #define GROUP_BINARY_81 0x81 |
| #define GROUP_BINARY_83 0x83 |
| #define GROUP_SHIFT_1 0xd1 |
| #define GROUP_SHIFT_N 0xc1 |
| #define GROUP_SHIFT_CL 0xd3 |
| |
| #define MOD_REG 0xc0 |
| #define MOD_DISP8 0x40 |
| |
| #define INC_SIZE(s) (*inst++ = (s), compiler->size += (s)) |
| |
| #define PUSH_REG(r) (*inst++ = (PUSH_r + (r))) |
| #define POP_REG(r) (*inst++ = (POP_r + (r))) |
| #define RET() (*inst++ = (RET_near)) |
| #define RET_I16(n) (*inst++ = (RET_i16), *inst++ = n, *inst++ = 0) |
/* r32, r/m32 */ |
/* r32, r/m32 */ |
#define MOV_RM(mod, reg, rm) (*buf++ = (0x8b), *buf++ = (mod) << 6 | (reg) << 3 | (rm)) | #define MOV_RM(mod, reg, rm) (*inst++ = (MOV_r_rm), *inst++ = (mod) << 6 | (reg) << 3 | (rm)) |
|
|
static sljit_ub get_jump_code(int type) | /* Multithreading does not affect these static variables, since they store |
| built-in CPU features. Therefore they can be overwritten by different threads |
| if they detect the CPU features in the same time. */ |
| #if (defined SLJIT_SSE2 && SLJIT_SSE2) && (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2) |
| static sljit_si cpu_has_sse2 = -1; |
| #endif |
| static sljit_si cpu_has_cmov = -1; |
| |
| #if defined(_MSC_VER) && _MSC_VER >= 1400 |
| #include <intrin.h> |
| #endif |
| |
| static void get_cpu_features(void) |
{ |
{ |
|
sljit_ui features; |
|
|
|
#if defined(_MSC_VER) && _MSC_VER >= 1400 |
|
|
|
int CPUInfo[4]; |
|
__cpuid(CPUInfo, 1); |
|
features = (sljit_ui)CPUInfo[3]; |
|
|
|
#elif defined(__GNUC__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_C) |
|
|
|
/* AT&T syntax. */ |
|
__asm__ ( |
|
"movl $0x1, %%eax\n" |
|
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
|
/* On x86-32, there is no red zone, so this |
|
should work (no need for a local variable). */ |
|
"push %%ebx\n" |
|
#endif |
|
"cpuid\n" |
|
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
|
"pop %%ebx\n" |
|
#endif |
|
"movl %%edx, %0\n" |
|
: "=g" (features) |
|
: |
|
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
|
: "%eax", "%ecx", "%edx" |
|
#else |
|
: "%rax", "%rbx", "%rcx", "%rdx" |
|
#endif |
|
); |
|
|
|
#else /* _MSC_VER && _MSC_VER >= 1400 */ |
|
|
|
/* Intel syntax. */ |
|
__asm { |
|
mov eax, 1 |
|
cpuid |
|
mov features, edx |
|
} |
|
|
|
#endif /* _MSC_VER && _MSC_VER >= 1400 */ |
|
|
|
#if (defined SLJIT_SSE2 && SLJIT_SSE2) && (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2) |
|
cpu_has_sse2 = (features >> 26) & 0x1; |
|
#endif |
|
cpu_has_cmov = (features >> 15) & 0x1; |
|
} |
|
|
|
static sljit_ub get_jump_code(sljit_si type) |
|
{ |
switch (type) { |
switch (type) { |
case SLJIT_C_EQUAL: |
case SLJIT_C_EQUAL: |
case SLJIT_C_FLOAT_EQUAL: |
case SLJIT_C_FLOAT_EQUAL: |
return 0x84; | return 0x84 /* je */; |
|
|
case SLJIT_C_NOT_EQUAL: |
case SLJIT_C_NOT_EQUAL: |
case SLJIT_C_FLOAT_NOT_EQUAL: |
case SLJIT_C_FLOAT_NOT_EQUAL: |
return 0x85; | return 0x85 /* jne */; |
|
|
case SLJIT_C_LESS: |
case SLJIT_C_LESS: |
case SLJIT_C_FLOAT_LESS: |
case SLJIT_C_FLOAT_LESS: |
return 0x82; | return 0x82 /* jc */; |
|
|
case SLJIT_C_GREATER_EQUAL: |
case SLJIT_C_GREATER_EQUAL: |
case SLJIT_C_FLOAT_GREATER_EQUAL: |
case SLJIT_C_FLOAT_GREATER_EQUAL: |
return 0x83; | return 0x83 /* jae */; |
|
|
case SLJIT_C_GREATER: |
case SLJIT_C_GREATER: |
case SLJIT_C_FLOAT_GREATER: |
case SLJIT_C_FLOAT_GREATER: |
return 0x87; | return 0x87 /* jnbe */; |
|
|
case SLJIT_C_LESS_EQUAL: |
case SLJIT_C_LESS_EQUAL: |
case SLJIT_C_FLOAT_LESS_EQUAL: |
case SLJIT_C_FLOAT_LESS_EQUAL: |
return 0x86; | return 0x86 /* jbe */; |
|
|
case SLJIT_C_SIG_LESS: |
case SLJIT_C_SIG_LESS: |
return 0x8c; | return 0x8c /* jl */; |
|
|
case SLJIT_C_SIG_GREATER_EQUAL: |
case SLJIT_C_SIG_GREATER_EQUAL: |
return 0x8d; | return 0x8d /* jnl */; |
|
|
case SLJIT_C_SIG_GREATER: |
case SLJIT_C_SIG_GREATER: |
return 0x8f; | return 0x8f /* jnle */; |
|
|
case SLJIT_C_SIG_LESS_EQUAL: |
case SLJIT_C_SIG_LESS_EQUAL: |
return 0x8e; | return 0x8e /* jle */; |
|
|
case SLJIT_C_OVERFLOW: |
case SLJIT_C_OVERFLOW: |
case SLJIT_C_MUL_OVERFLOW: |
case SLJIT_C_MUL_OVERFLOW: |
return 0x80; | return 0x80 /* jo */; |
|
|
case SLJIT_C_NOT_OVERFLOW: |
case SLJIT_C_NOT_OVERFLOW: |
case SLJIT_C_MUL_NOT_OVERFLOW: |
case SLJIT_C_MUL_NOT_OVERFLOW: |
return 0x81; | return 0x81 /* jno */; |
|
|
case SLJIT_C_FLOAT_NAN: | case SLJIT_C_FLOAT_UNORDERED: |
return 0x8a; | return 0x8a /* jp */; |
|
|
case SLJIT_C_FLOAT_NOT_NAN: | case SLJIT_C_FLOAT_ORDERED: |
return 0x8b; | return 0x8b /* jpo */; |
} |
} |
return 0; |
return 0; |
} |
} |
|
|
static sljit_ub* generate_far_jump_code(struct sljit_jump *jump, sljit_ub *code_ptr, int type); | static sljit_ub* generate_far_jump_code(struct sljit_jump *jump, sljit_ub *code_ptr, sljit_si type); |
|
|
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
static sljit_ub* generate_fixed_jump(sljit_ub *code_ptr, sljit_w addr, int type); | static sljit_ub* generate_fixed_jump(sljit_ub *code_ptr, sljit_sw addr, sljit_si type); |
#endif |
#endif |
|
|
static sljit_ub* generate_near_jump_code(struct sljit_jump *jump, sljit_ub *code_ptr, sljit_ub *code, int type) | static sljit_ub* generate_near_jump_code(struct sljit_jump *jump, sljit_ub *code_ptr, sljit_ub *code, sljit_si type) |
{ |
{ |
int short_jump; | sljit_si short_jump; |
sljit_uw label_addr; |
sljit_uw label_addr; |
|
|
if (jump->flags & JUMP_LABEL) |
if (jump->flags & JUMP_LABEL) |
label_addr = (sljit_uw)(code + jump->u.label->size); |
label_addr = (sljit_uw)(code + jump->u.label->size); |
else |
else |
label_addr = jump->u.target; |
label_addr = jump->u.target; |
short_jump = (sljit_w)(label_addr - (jump->addr + 2)) >= -128 && (sljit_w)(label_addr - (jump->addr + 2)) <= 127; | short_jump = (sljit_sw)(label_addr - (jump->addr + 2)) >= -128 && (sljit_sw)(label_addr - (jump->addr + 2)) <= 127; |
|
|
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
if ((sljit_w)(label_addr - (jump->addr + 1)) > 0x7fffffffll || (sljit_w)(label_addr - (jump->addr + 1)) < -0x80000000ll) | if ((sljit_sw)(label_addr - (jump->addr + 1)) > 0x7fffffffll || (sljit_sw)(label_addr - (jump->addr + 1)) < -0x80000000ll) |
return generate_far_jump_code(jump, code_ptr, type); |
return generate_far_jump_code(jump, code_ptr, type); |
#endif |
#endif |
|
|
if (type == SLJIT_JUMP) { |
if (type == SLJIT_JUMP) { |
if (short_jump) |
if (short_jump) |
*code_ptr++ = 0xeb; | *code_ptr++ = JMP_i8; |
else |
else |
*code_ptr++ = 0xe9; | *code_ptr++ = JMP_i32; |
jump->addr++; |
jump->addr++; |
} |
} |
else if (type >= SLJIT_FAST_CALL) { |
else if (type >= SLJIT_FAST_CALL) { |
short_jump = 0; |
short_jump = 0; |
*code_ptr++ = 0xe8; | *code_ptr++ = CALL_i32; |
jump->addr++; |
jump->addr++; |
} |
} |
else if (short_jump) { |
else if (short_jump) { |
Line 251 static sljit_ub* generate_near_jump_code(struct sljit_
|
Line 418 static sljit_ub* generate_near_jump_code(struct sljit_
|
jump->addr++; |
jump->addr++; |
} |
} |
else { |
else { |
*code_ptr++ = 0x0f; | *code_ptr++ = GROUP_0F; |
*code_ptr++ = get_jump_code(type); |
*code_ptr++ = get_jump_code(type); |
jump->addr += 2; |
jump->addr += 2; |
} |
} |
|
|
if (short_jump) { |
if (short_jump) { |
jump->flags |= PATCH_MB; |
jump->flags |= PATCH_MB; |
code_ptr += sizeof(sljit_b); | code_ptr += sizeof(sljit_sb); |
} else { |
} else { |
jump->flags |= PATCH_MW; |
jump->flags |= PATCH_MW; |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
code_ptr += sizeof(sljit_w); | code_ptr += sizeof(sljit_sw); |
#else |
#else |
code_ptr += sizeof(sljit_hw); | code_ptr += sizeof(sljit_si); |
#endif |
#endif |
} |
} |
|
|
Line 323 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(str
|
Line 490 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(str
|
label = label->next; |
label = label->next; |
} |
} |
else if (*buf_ptr == 1) { |
else if (*buf_ptr == 1) { |
const_->addr = ((sljit_uw)code_ptr) - sizeof(sljit_w); | const_->addr = ((sljit_uw)code_ptr) - sizeof(sljit_sw); |
const_ = const_->next; |
const_ = const_->next; |
} |
} |
else { |
else { |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
*code_ptr++ = (*buf_ptr == 2) ? 0xe8 /* call */ : 0xe9 /* jmp */; | *code_ptr++ = (*buf_ptr == 2) ? CALL_i32 : JMP_i32; |
buf_ptr++; |
buf_ptr++; |
*(sljit_w*)code_ptr = *(sljit_w*)buf_ptr - ((sljit_w)code_ptr + sizeof(sljit_w)); | *(sljit_sw*)code_ptr = *(sljit_sw*)buf_ptr - ((sljit_sw)code_ptr + sizeof(sljit_sw)); |
code_ptr += sizeof(sljit_w); | code_ptr += sizeof(sljit_sw); |
buf_ptr += sizeof(sljit_w) - 1; | buf_ptr += sizeof(sljit_sw) - 1; |
#else |
#else |
code_ptr = generate_fixed_jump(code_ptr, *(sljit_w*)(buf_ptr + 1), *buf_ptr); | code_ptr = generate_fixed_jump(code_ptr, *(sljit_sw*)(buf_ptr + 1), *buf_ptr); |
buf_ptr += sizeof(sljit_w); | buf_ptr += sizeof(sljit_sw); |
#endif |
#endif |
} |
} |
buf_ptr++; |
buf_ptr++; |
Line 352 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(str
|
Line 519 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(str
|
jump = compiler->jumps; |
jump = compiler->jumps; |
while (jump) { |
while (jump) { |
if (jump->flags & PATCH_MB) { |
if (jump->flags & PATCH_MB) { |
SLJIT_ASSERT((sljit_w)(jump->u.label->addr - (jump->addr + sizeof(sljit_b))) >= -128 && (sljit_w)(jump->u.label->addr - (jump->addr + sizeof(sljit_b))) <= 127); | SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - (jump->addr + sizeof(sljit_sb))) >= -128 && (sljit_sw)(jump->u.label->addr - (jump->addr + sizeof(sljit_sb))) <= 127); |
*(sljit_ub*)jump->addr = (sljit_ub)(jump->u.label->addr - (jump->addr + sizeof(sljit_b))); | *(sljit_ub*)jump->addr = (sljit_ub)(jump->u.label->addr - (jump->addr + sizeof(sljit_sb))); |
} else if (jump->flags & PATCH_MW) { |
} else if (jump->flags & PATCH_MW) { |
if (jump->flags & JUMP_LABEL) { |
if (jump->flags & JUMP_LABEL) { |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
*(sljit_w*)jump->addr = (sljit_w)(jump->u.label->addr - (jump->addr + sizeof(sljit_w))); | *(sljit_sw*)jump->addr = (sljit_sw)(jump->u.label->addr - (jump->addr + sizeof(sljit_sw))); |
#else |
#else |
SLJIT_ASSERT((sljit_w)(jump->u.label->addr - (jump->addr + sizeof(sljit_hw))) >= -0x80000000ll && (sljit_w)(jump->u.label->addr - (jump->addr + sizeof(sljit_hw))) <= 0x7fffffffll); | SLJIT_ASSERT((sljit_sw)(jump->u.label->addr - (jump->addr + sizeof(sljit_si))) >= -0x80000000ll && (sljit_sw)(jump->u.label->addr - (jump->addr + sizeof(sljit_si))) <= 0x7fffffffll); |
*(sljit_hw*)jump->addr = (sljit_hw)(jump->u.label->addr - (jump->addr + sizeof(sljit_hw))); | *(sljit_si*)jump->addr = (sljit_si)(jump->u.label->addr - (jump->addr + sizeof(sljit_si))); |
#endif |
#endif |
} |
} |
else { |
else { |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
*(sljit_w*)jump->addr = (sljit_w)(jump->u.target - (jump->addr + sizeof(sljit_w))); | *(sljit_sw*)jump->addr = (sljit_sw)(jump->u.target - (jump->addr + sizeof(sljit_sw))); |
#else |
#else |
SLJIT_ASSERT((sljit_w)(jump->u.target - (jump->addr + sizeof(sljit_hw))) >= -0x80000000ll && (sljit_w)(jump->u.target - (jump->addr + sizeof(sljit_hw))) <= 0x7fffffffll); | SLJIT_ASSERT((sljit_sw)(jump->u.target - (jump->addr + sizeof(sljit_si))) >= -0x80000000ll && (sljit_sw)(jump->u.target - (jump->addr + sizeof(sljit_si))) <= 0x7fffffffll); |
*(sljit_hw*)jump->addr = (sljit_hw)(jump->u.target - (jump->addr + sizeof(sljit_hw))); | *(sljit_si*)jump->addr = (sljit_si)(jump->u.target - (jump->addr + sizeof(sljit_si))); |
#endif |
#endif |
} |
} |
} |
} |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
else if (jump->flags & PATCH_MD) |
else if (jump->flags & PATCH_MD) |
*(sljit_w*)jump->addr = jump->u.label->addr; | *(sljit_sw*)jump->addr = jump->u.label->addr; |
#endif |
#endif |
|
|
jump = jump->next; |
jump = jump->next; |
Line 383 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(str
|
Line 550 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(str
|
/* Maybe we waste some space because of short jumps. */ |
/* Maybe we waste some space because of short jumps. */ |
SLJIT_ASSERT(code_ptr <= code + compiler->size); |
SLJIT_ASSERT(code_ptr <= code + compiler->size); |
compiler->error = SLJIT_ERR_COMPILED; |
compiler->error = SLJIT_ERR_COMPILED; |
compiler->executable_size = compiler->size; | compiler->executable_size = code_ptr - code; |
return (void*)code; |
return (void*)code; |
} |
} |
|
|
Line 391 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(str
|
Line 558 SLJIT_API_FUNC_ATTRIBUTE void* sljit_generate_code(str
|
/* Operators */ |
/* Operators */ |
/* --------------------------------------------------------------------- */ |
/* --------------------------------------------------------------------- */ |
|
|
static int emit_cum_binary(struct sljit_compiler *compiler, | static sljit_si emit_cum_binary(struct sljit_compiler *compiler, |
sljit_ub op_rm, sljit_ub op_mr, sljit_ub op_imm, sljit_ub op_eax_imm, |
sljit_ub op_rm, sljit_ub op_mr, sljit_ub op_imm, sljit_ub op_eax_imm, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src1, sljit_w src1w, | sljit_si src1, sljit_sw src1w, |
int src2, sljit_w src2w); | sljit_si src2, sljit_sw src2w); |
|
|
static int emit_non_cum_binary(struct sljit_compiler *compiler, | static sljit_si emit_non_cum_binary(struct sljit_compiler *compiler, |
sljit_ub op_rm, sljit_ub op_mr, sljit_ub op_imm, sljit_ub op_eax_imm, |
sljit_ub op_rm, sljit_ub op_mr, sljit_ub op_imm, sljit_ub op_eax_imm, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src1, sljit_w src1w, | sljit_si src1, sljit_sw src1w, |
int src2, sljit_w src2w); | sljit_si src2, sljit_sw src2w); |
|
|
static int emit_mov(struct sljit_compiler *compiler, | static sljit_si emit_mov(struct sljit_compiler *compiler, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src, sljit_w srcw); | sljit_si src, sljit_sw srcw); |
|
|
static SLJIT_INLINE int emit_save_flags(struct sljit_compiler *compiler) | static SLJIT_INLINE sljit_si emit_save_flags(struct sljit_compiler *compiler) |
{ |
{ |
sljit_ub *buf; | sljit_ub *inst; |
|
|
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 5); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 5); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
INC_SIZE(5); |
INC_SIZE(5); |
#else |
#else |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 6); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 6); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
INC_SIZE(6); |
INC_SIZE(6); |
*buf++ = REX_W; | *inst++ = REX_W; |
#endif |
#endif |
*buf++ = 0x8d; /* lea esp/rsp, [esp/rsp + sizeof(sljit_w)] */ | *inst++ = LEA_r_m; /* lea esp/rsp, [esp/rsp + sizeof(sljit_sw)] */ |
*buf++ = 0x64; | *inst++ = 0x64; |
*buf++ = 0x24; | *inst++ = 0x24; |
*buf++ = (sljit_ub)sizeof(sljit_w); | *inst++ = (sljit_ub)sizeof(sljit_sw); |
*buf++ = 0x9c; /* pushfd / pushfq */ | *inst++ = PUSHF; |
compiler->flags_saved = 1; |
compiler->flags_saved = 1; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
static SLJIT_INLINE int emit_restore_flags(struct sljit_compiler *compiler, int keep_flags) | static SLJIT_INLINE sljit_si emit_restore_flags(struct sljit_compiler *compiler, sljit_si keep_flags) |
{ |
{ |
sljit_ub *buf; | sljit_ub *inst; |
|
|
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 5); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 5); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
INC_SIZE(5); |
INC_SIZE(5); |
*buf++ = 0x9d; /* popfd */ | *inst++ = POPF; |
#else |
#else |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 6); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 6); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
INC_SIZE(6); |
INC_SIZE(6); |
*buf++ = 0x9d; /* popfq */ | *inst++ = POPF; |
*buf++ = REX_W; | *inst++ = REX_W; |
#endif |
#endif |
*buf++ = 0x8d; /* lea esp/rsp, [esp/rsp - sizeof(sljit_w)] */ | *inst++ = LEA_r_m; /* lea esp/rsp, [esp/rsp - sizeof(sljit_sw)] */ |
*buf++ = 0x64; | *inst++ = 0x64; |
*buf++ = 0x24; | *inst++ = 0x24; |
*buf++ = (sljit_ub)-(int)sizeof(sljit_w); | *inst++ = (sljit_ub)-(sljit_sb)sizeof(sljit_sw); |
compiler->flags_saved = keep_flags; |
compiler->flags_saved = keep_flags; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
Line 457 static SLJIT_INLINE int emit_restore_flags(struct slji
|
Line 624 static SLJIT_INLINE int emit_restore_flags(struct slji
|
#ifdef _WIN32 |
#ifdef _WIN32 |
#include <malloc.h> |
#include <malloc.h> |
|
|
static void SLJIT_CALL sljit_grow_stack(sljit_w local_size) | static void SLJIT_CALL sljit_grow_stack(sljit_sw local_size) |
{ |
{ |
/* Workaround for calling the internal _chkstk() function on Windows. |
/* Workaround for calling the internal _chkstk() function on Windows. |
This function touches all 4k pages belongs to the requested stack space, |
This function touches all 4k pages belongs to the requested stack space, |
which size is passed in local_size. This is necessary on Windows where |
which size is passed in local_size. This is necessary on Windows where |
the stack can only grow in 4k steps. However, this function just burn |
the stack can only grow in 4k steps. However, this function just burn |
CPU cycles if the stack is large enough, but you don't know it in advance. | CPU cycles if the stack is large enough. However, you don't know it in |
I think this is a bad design even if it has some reasons. */ | advance, so it must always be called. I think this is a bad design in |
alloca(local_size); | general even if it has some reasons. */ |
| *(sljit_si*)alloca(local_size) = 0; |
} |
} |
|
|
#endif |
#endif |
Line 476 static void SLJIT_CALL sljit_grow_stack(sljit_w local_
|
Line 644 static void SLJIT_CALL sljit_grow_stack(sljit_w local_
|
#include "sljitNativeX86_64.c" |
#include "sljitNativeX86_64.c" |
#endif |
#endif |
|
|
static int emit_mov(struct sljit_compiler *compiler, | static sljit_si emit_mov(struct sljit_compiler *compiler, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src, sljit_w srcw) | sljit_si src, sljit_sw srcw) |
{ |
{ |
sljit_ub* code; | sljit_ub* inst; |
|
|
if (dst == SLJIT_UNUSED) { |
if (dst == SLJIT_UNUSED) { |
/* No destination, doesn't need to setup flags. */ |
/* No destination, doesn't need to setup flags. */ |
if (src & SLJIT_MEM) { |
if (src & SLJIT_MEM) { |
code = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src, srcw); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src, srcw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x8b; | *inst = MOV_r_rm; |
} |
} |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
if (src >= SLJIT_TEMPORARY_REG1 && src <= TMP_REGISTER) { | if (src <= TMP_REGISTER) { |
code = emit_x86_instruction(compiler, 1, src, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1, src, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x89; | *inst = MOV_rm_r; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
if (src & SLJIT_IMM) { |
if (src & SLJIT_IMM) { |
if (dst >= SLJIT_TEMPORARY_REG1 && dst <= TMP_REGISTER) { | if (dst <= TMP_REGISTER) { |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
return emit_do_imm(compiler, 0xb8 + reg_map[dst], srcw); | return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw); |
#else |
#else |
if (!compiler->mode32) { |
if (!compiler->mode32) { |
if (NOT_HALFWORD(srcw)) |
if (NOT_HALFWORD(srcw)) |
return emit_load_imm64(compiler, dst, srcw); |
return emit_load_imm64(compiler, dst, srcw); |
} |
} |
else |
else |
return emit_do_imm32(compiler, (reg_map[dst] >= 8) ? REX_B : 0, 0xb8 + reg_lmap[dst], srcw); | return emit_do_imm32(compiler, (reg_map[dst] >= 8) ? REX_B : 0, MOV_r_i32 + reg_lmap[dst], srcw); |
#endif |
#endif |
} |
} |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
if (!compiler->mode32 && NOT_HALFWORD(srcw)) { |
if (!compiler->mode32 && NOT_HALFWORD(srcw)) { |
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, srcw)); |
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, srcw)); |
code = emit_x86_instruction(compiler, 1, TMP_REG2, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1, TMP_REG2, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x89; | *inst = MOV_rm_r; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
#endif |
#endif |
code = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, dstw); | inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0xc7; | *inst = MOV_rm_i32; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
if (dst >= SLJIT_TEMPORARY_REG1 && dst <= TMP_REGISTER) { | if (dst <= TMP_REGISTER) { |
code = emit_x86_instruction(compiler, 1, dst, 0, src, srcw); | inst = emit_x86_instruction(compiler, 1, dst, 0, src, srcw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x8b; | *inst = MOV_r_rm; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
/* Memory to memory move. Requires two instruction. */ |
/* Memory to memory move. Requires two instruction. */ |
code = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src, srcw); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src, srcw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x8b; | *inst = MOV_r_rm; |
code = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x89; | *inst = MOV_rm_r; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
#define EMIT_MOV(compiler, dst, dstw, src, srcw) \ |
#define EMIT_MOV(compiler, dst, dstw, src, srcw) \ |
FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw)); |
FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw)); |
|
|
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op0(struct sljit_compiler *compiler, int op) | SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op0(struct sljit_compiler *compiler, sljit_si op) |
{ |
{ |
sljit_ub *buf; | sljit_ub *inst; |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
int size; | sljit_si size; |
#endif |
#endif |
|
|
CHECK_ERROR(); |
CHECK_ERROR(); |
Line 556 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op0(struct slj
|
Line 724 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op0(struct slj
|
|
|
switch (GET_OPCODE(op)) { |
switch (GET_OPCODE(op)) { |
case SLJIT_BREAKPOINT: |
case SLJIT_BREAKPOINT: |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 1); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 1); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
INC_SIZE(1); |
INC_SIZE(1); |
*buf = 0xcc; | *inst = INT3; |
break; |
break; |
case SLJIT_NOP: |
case SLJIT_NOP: |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 1); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 1); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
INC_SIZE(1); |
INC_SIZE(1); |
*buf = 0x90; | *inst = NOP; |
break; |
break; |
case SLJIT_UMUL: |
case SLJIT_UMUL: |
case SLJIT_SMUL: |
case SLJIT_SMUL: |
Line 575 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op0(struct slj
|
Line 743 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op0(struct slj
|
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#ifdef _WIN64 |
#ifdef _WIN64 |
SLJIT_COMPILE_ASSERT( |
SLJIT_COMPILE_ASSERT( |
reg_map[SLJIT_TEMPORARY_REG1] == 0 | reg_map[SLJIT_SCRATCH_REG1] == 0 |
&& reg_map[SLJIT_TEMPORARY_REG2] == 2 | && reg_map[SLJIT_SCRATCH_REG2] == 2 |
&& reg_map[TMP_REGISTER] > 7, |
&& reg_map[TMP_REGISTER] > 7, |
invalid_register_assignment_for_div_mul); |
invalid_register_assignment_for_div_mul); |
#else |
#else |
SLJIT_COMPILE_ASSERT( |
SLJIT_COMPILE_ASSERT( |
reg_map[SLJIT_TEMPORARY_REG1] == 0 | reg_map[SLJIT_SCRATCH_REG1] == 0 |
&& reg_map[SLJIT_TEMPORARY_REG2] < 7 | && reg_map[SLJIT_SCRATCH_REG2] < 7 |
&& reg_map[TMP_REGISTER] == 2, |
&& reg_map[TMP_REGISTER] == 2, |
invalid_register_assignment_for_div_mul); |
invalid_register_assignment_for_div_mul); |
#endif |
#endif |
Line 592 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op0(struct slj
|
Line 760 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op0(struct slj
|
op = GET_OPCODE(op); |
op = GET_OPCODE(op); |
if (op == SLJIT_UDIV) { |
if (op == SLJIT_UDIV) { |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64) |
EMIT_MOV(compiler, TMP_REGISTER, 0, SLJIT_TEMPORARY_REG2, 0); | EMIT_MOV(compiler, TMP_REGISTER, 0, SLJIT_SCRATCH_REG2, 0); |
buf = emit_x86_instruction(compiler, 1, SLJIT_TEMPORARY_REG2, 0, SLJIT_TEMPORARY_REG2, 0); | inst = emit_x86_instruction(compiler, 1, SLJIT_SCRATCH_REG2, 0, SLJIT_SCRATCH_REG2, 0); |
#else |
#else |
buf = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, TMP_REGISTER, 0); |
#endif |
#endif |
FAIL_IF(!buf); | FAIL_IF(!inst); |
*buf = 0x33; | *inst = XOR_r_rm; |
} |
} |
|
|
if (op == SLJIT_SDIV) { |
if (op == SLJIT_SDIV) { |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64) |
EMIT_MOV(compiler, TMP_REGISTER, 0, SLJIT_TEMPORARY_REG2, 0); | EMIT_MOV(compiler, TMP_REGISTER, 0, SLJIT_SCRATCH_REG2, 0); |
#endif |
#endif |
|
|
/* CDQ instruction */ |
|
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 1); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 1); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
INC_SIZE(1); |
INC_SIZE(1); |
*buf = 0x99; | *inst = CDQ; |
#else |
#else |
if (compiler->mode32) { |
if (compiler->mode32) { |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 1); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 1); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
INC_SIZE(1); |
INC_SIZE(1); |
*buf = 0x99; | *inst = CDQ; |
} else { |
} else { |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 2); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 2); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
INC_SIZE(2); |
INC_SIZE(2); |
*buf++ = REX_W; | *inst++ = REX_W; |
*buf = 0x99; | *inst = CDQ; |
} |
} |
#endif |
#endif |
} |
} |
|
|
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 2); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 2); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
INC_SIZE(2); |
INC_SIZE(2); |
*buf++ = 0xf7; | *inst++ = GROUP_F7; |
*buf = 0xc0 | ((op >= SLJIT_UDIV) ? reg_map[TMP_REGISTER] : reg_map[SLJIT_TEMPORARY_REG2]); | *inst = MOD_REG | ((op >= SLJIT_UDIV) ? reg_map[TMP_REGISTER] : reg_map[SLJIT_SCRATCH_REG2]); |
#else |
#else |
#ifdef _WIN64 |
#ifdef _WIN64 |
size = (!compiler->mode32 || op >= SLJIT_UDIV) ? 3 : 2; |
size = (!compiler->mode32 || op >= SLJIT_UDIV) ? 3 : 2; |
#else |
#else |
size = (!compiler->mode32) ? 3 : 2; |
size = (!compiler->mode32) ? 3 : 2; |
#endif |
#endif |
buf = (sljit_ub*)ensure_buf(compiler, 1 + size); | inst = (sljit_ub*)ensure_buf(compiler, 1 + size); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
INC_SIZE(size); |
INC_SIZE(size); |
#ifdef _WIN64 |
#ifdef _WIN64 |
if (!compiler->mode32) |
if (!compiler->mode32) |
*buf++ = REX_W | ((op >= SLJIT_UDIV) ? REX_B : 0); | *inst++ = REX_W | ((op >= SLJIT_UDIV) ? REX_B : 0); |
else if (op >= SLJIT_UDIV) |
else if (op >= SLJIT_UDIV) |
*buf++ = REX_B; | *inst++ = REX_B; |
*buf++ = 0xf7; | *inst++ = GROUP_F7; |
*buf = 0xc0 | ((op >= SLJIT_UDIV) ? reg_lmap[TMP_REGISTER] : reg_lmap[SLJIT_TEMPORARY_REG2]); | *inst = MOD_REG | ((op >= SLJIT_UDIV) ? reg_lmap[TMP_REGISTER] : reg_lmap[SLJIT_SCRATCH_REG2]); |
#else |
#else |
if (!compiler->mode32) |
if (!compiler->mode32) |
*buf++ = REX_W; | *inst++ = REX_W; |
*buf++ = 0xf7; | *inst++ = GROUP_F7; |
*buf = 0xc0 | reg_map[SLJIT_TEMPORARY_REG2]; | *inst = MOD_REG | reg_map[SLJIT_SCRATCH_REG2]; |
#endif |
#endif |
#endif |
#endif |
switch (op) { |
switch (op) { |
case SLJIT_UMUL: |
case SLJIT_UMUL: |
*buf |= 4 << 3; | *inst |= MUL; |
break; |
break; |
case SLJIT_SMUL: |
case SLJIT_SMUL: |
*buf |= 5 << 3; | *inst |= IMUL; |
break; |
break; |
case SLJIT_UDIV: |
case SLJIT_UDIV: |
*buf |= 6 << 3; | *inst |= DIV; |
break; |
break; |
case SLJIT_SDIV: |
case SLJIT_SDIV: |
*buf |= 7 << 3; | *inst |= IDIV; |
break; |
break; |
} |
} |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) && !defined(_WIN64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) && !defined(_WIN64) |
EMIT_MOV(compiler, SLJIT_TEMPORARY_REG2, 0, TMP_REGISTER, 0); | EMIT_MOV(compiler, SLJIT_SCRATCH_REG2, 0, TMP_REGISTER, 0); |
#endif |
#endif |
break; |
break; |
} |
} |
Line 682 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op0(struct slj
|
Line 849 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op0(struct slj
|
|
|
#define ENCODE_PREFIX(prefix) \ |
#define ENCODE_PREFIX(prefix) \ |
do { \ |
do { \ |
code = (sljit_ub*)ensure_buf(compiler, 1 + 1); \ | inst = (sljit_ub*)ensure_buf(compiler, 1 + 1); \ |
FAIL_IF(!code); \ | FAIL_IF(!inst); \ |
INC_CSIZE(1); \ | INC_SIZE(1); \ |
*code = (prefix); \ | *inst = (prefix); \ |
} while (0) |
} while (0) |
|
|
static int emit_mov_byte(struct sljit_compiler *compiler, int sign, | static sljit_si emit_mov_byte(struct sljit_compiler *compiler, sljit_si sign, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src, sljit_w srcw) | sljit_si src, sljit_sw srcw) |
{ |
{ |
sljit_ub* code; | sljit_ub* inst; |
int dst_r; | sljit_si dst_r; |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
int work_r; | sljit_si work_r; |
#endif |
#endif |
|
|
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
Line 706 static int emit_mov_byte(struct sljit_compiler *compil
|
Line 873 static int emit_mov_byte(struct sljit_compiler *compil
|
return SLJIT_SUCCESS; /* Empty instruction. */ |
return SLJIT_SUCCESS; /* Empty instruction. */ |
|
|
if (src & SLJIT_IMM) { |
if (src & SLJIT_IMM) { |
if (dst >= SLJIT_TEMPORARY_REG1 && dst <= TMP_REGISTER) { | if (dst <= TMP_REGISTER) { |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
return emit_do_imm(compiler, 0xb8 + reg_map[dst], srcw); | return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw); |
#else |
#else |
return emit_load_imm64(compiler, dst, srcw); | inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0); |
| FAIL_IF(!inst); |
| *inst = MOV_rm_i32; |
| return SLJIT_SUCCESS; |
#endif |
#endif |
} |
} |
code = emit_x86_instruction(compiler, 1 | EX86_BYTE_ARG | EX86_NO_REXW, SLJIT_IMM, srcw, dst, dstw); | inst = emit_x86_instruction(compiler, 1 | EX86_BYTE_ARG | EX86_NO_REXW, SLJIT_IMM, srcw, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0xc6; | *inst = MOV_rm8_i8; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
dst_r = (dst >= SLJIT_TEMPORARY_REG1 && dst <= TMP_REGISTER) ? dst : TMP_REGISTER; | dst_r = (dst <= TMP_REGISTER) ? dst : TMP_REGISTER; |
|
|
if ((dst & SLJIT_MEM) && src >= SLJIT_TEMPORARY_REG1 && src <= SLJIT_NO_REGISTERS) { | if ((dst & SLJIT_MEM) && src <= TMP_REGISTER) { |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
if (reg_map[src] >= 4) { |
if (reg_map[src] >= 4) { |
SLJIT_ASSERT(dst_r == TMP_REGISTER); |
SLJIT_ASSERT(dst_r == TMP_REGISTER); |
Line 733 static int emit_mov_byte(struct sljit_compiler *compil
|
Line 903 static int emit_mov_byte(struct sljit_compiler *compil
|
#endif |
#endif |
} |
} |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
else if (src >= SLJIT_TEMPORARY_REG1 && src <= SLJIT_NO_REGISTERS && reg_map[src] >= 4) { | else if (src <= TMP_REGISTER && reg_map[src] >= 4) { |
/* src, dst are registers. */ |
/* src, dst are registers. */ |
SLJIT_ASSERT(dst >= SLJIT_TEMPORARY_REG1 && dst <= TMP_REGISTER); | SLJIT_ASSERT(dst >= SLJIT_SCRATCH_REG1 && dst <= TMP_REGISTER); |
if (reg_map[dst] < 4) { |
if (reg_map[dst] < 4) { |
if (dst != src) |
if (dst != src) |
EMIT_MOV(compiler, dst, 0, src, 0); |
EMIT_MOV(compiler, dst, 0, src, 0); |
code = emit_x86_instruction(compiler, 2, dst, 0, dst, 0); | inst = emit_x86_instruction(compiler, 2, dst, 0, dst, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0x0f; | *inst++ = GROUP_0F; |
*code = sign ? 0xbe : 0xb6; | *inst = sign ? MOVSX_r_rm8 : MOVZX_r_rm8; |
} |
} |
else { |
else { |
if (dst != src) |
if (dst != src) |
EMIT_MOV(compiler, dst, 0, src, 0); |
EMIT_MOV(compiler, dst, 0, src, 0); |
if (sign) { |
if (sign) { |
/* shl reg, 24 */ |
/* shl reg, 24 */ |
code = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0); | inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code |= 0x4 << 3; | *inst |= SHL; |
code = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0); | /* sar reg, 24 */ |
FAIL_IF(!code); | inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 24, dst, 0); |
/* shr/sar reg, 24 */ | FAIL_IF(!inst); |
*code |= 0x7 << 3; | *inst |= SAR; |
} |
} |
else { |
else { |
/* and dst, 0xff */ | inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 0xff, dst, 0); |
code = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 255, dst, 0); | FAIL_IF(!inst); |
FAIL_IF(!code); | *(inst + 1) |= AND; |
*(code + 1) |= 0x4 << 3; | |
} |
} |
} |
} |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
Line 769 static int emit_mov_byte(struct sljit_compiler *compil
|
Line 938 static int emit_mov_byte(struct sljit_compiler *compil
|
#endif |
#endif |
else { |
else { |
/* src can be memory addr or reg_map[src] < 4 on x86_32 architectures. */ |
/* src can be memory addr or reg_map[src] < 4 on x86_32 architectures. */ |
code = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw); | inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0x0f; | *inst++ = GROUP_0F; |
*code = sign ? 0xbe : 0xb6; | *inst = sign ? MOVSX_r_rm8 : MOVZX_r_rm8; |
} |
} |
|
|
if (dst & SLJIT_MEM) { |
if (dst & SLJIT_MEM) { |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
if (dst_r == TMP_REGISTER) { |
if (dst_r == TMP_REGISTER) { |
/* Find a non-used register, whose reg_map[src] < 4. */ |
/* Find a non-used register, whose reg_map[src] < 4. */ |
if ((dst & 0xf) == SLJIT_TEMPORARY_REG1) { | if ((dst & 0xf) == SLJIT_SCRATCH_REG1) { |
if ((dst & 0xf0) == (SLJIT_TEMPORARY_REG2 << 4)) | if ((dst & 0xf0) == (SLJIT_SCRATCH_REG2 << 4)) |
work_r = SLJIT_TEMPORARY_REG3; | work_r = SLJIT_SCRATCH_REG3; |
else |
else |
work_r = SLJIT_TEMPORARY_REG2; | work_r = SLJIT_SCRATCH_REG2; |
} |
} |
else { |
else { |
if ((dst & 0xf0) != (SLJIT_TEMPORARY_REG1 << 4)) | if ((dst & 0xf0) != (SLJIT_SCRATCH_REG1 << 4)) |
work_r = SLJIT_TEMPORARY_REG1; | work_r = SLJIT_SCRATCH_REG1; |
else if ((dst & 0xf) == SLJIT_TEMPORARY_REG2) | else if ((dst & 0xf) == SLJIT_SCRATCH_REG2) |
work_r = SLJIT_TEMPORARY_REG3; | work_r = SLJIT_SCRATCH_REG3; |
else |
else |
work_r = SLJIT_TEMPORARY_REG2; | work_r = SLJIT_SCRATCH_REG2; |
} |
} |
|
|
if (work_r == SLJIT_TEMPORARY_REG1) { | if (work_r == SLJIT_SCRATCH_REG1) { |
ENCODE_PREFIX(0x90 + reg_map[TMP_REGISTER]); | ENCODE_PREFIX(XCHG_EAX_r + reg_map[TMP_REGISTER]); |
} |
} |
else { |
else { |
code = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0); | inst = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x87; | *inst = XCHG_r_rm; |
} |
} |
|
|
code = emit_x86_instruction(compiler, 1, work_r, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1, work_r, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x88; | *inst = MOV_rm8_r8; |
|
|
if (work_r == SLJIT_TEMPORARY_REG1) { | if (work_r == SLJIT_SCRATCH_REG1) { |
ENCODE_PREFIX(0x90 + reg_map[TMP_REGISTER]); | ENCODE_PREFIX(XCHG_EAX_r + reg_map[TMP_REGISTER]); |
} |
} |
else { |
else { |
code = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0); | inst = emit_x86_instruction(compiler, 1, work_r, 0, dst_r, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x87; | *inst = XCHG_r_rm; |
} |
} |
} |
} |
else { |
else { |
code = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x88; | *inst = MOV_rm8_r8; |
} |
} |
#else |
#else |
code = emit_x86_instruction(compiler, 1 | EX86_REX | EX86_NO_REXW, dst_r, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1 | EX86_REX | EX86_NO_REXW, dst_r, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x88; | *inst = MOV_rm8_r8; |
#endif |
#endif |
} |
} |
|
|
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
static int emit_mov_half(struct sljit_compiler *compiler, int sign, | static sljit_si emit_mov_half(struct sljit_compiler *compiler, sljit_si sign, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src, sljit_w srcw) | sljit_si src, sljit_sw srcw) |
{ |
{ |
sljit_ub* code; | sljit_ub* inst; |
int dst_r; | sljit_si dst_r; |
|
|
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
compiler->mode32 = 0; |
compiler->mode32 = 0; |
Line 846 static int emit_mov_half(struct sljit_compiler *compil
|
Line 1015 static int emit_mov_half(struct sljit_compiler *compil
|
return SLJIT_SUCCESS; /* Empty instruction. */ |
return SLJIT_SUCCESS; /* Empty instruction. */ |
|
|
if (src & SLJIT_IMM) { |
if (src & SLJIT_IMM) { |
if (dst >= SLJIT_TEMPORARY_REG1 && dst <= TMP_REGISTER) { | if (dst <= TMP_REGISTER) { |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
return emit_do_imm(compiler, 0xb8 + reg_map[dst], srcw); | return emit_do_imm(compiler, MOV_r_i32 + reg_map[dst], srcw); |
#else |
#else |
return emit_load_imm64(compiler, dst, srcw); | inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, srcw, dst, 0); |
| FAIL_IF(!inst); |
| *inst = MOV_rm_i32; |
| return SLJIT_SUCCESS; |
#endif |
#endif |
} |
} |
code = emit_x86_instruction(compiler, 1 | EX86_HALF_ARG | EX86_NO_REXW | EX86_PREF_66, SLJIT_IMM, srcw, dst, dstw); | inst = emit_x86_instruction(compiler, 1 | EX86_HALF_ARG | EX86_NO_REXW | EX86_PREF_66, SLJIT_IMM, srcw, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0xc7; | *inst = MOV_rm_i32; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
dst_r = (dst >= SLJIT_TEMPORARY_REG1 && dst <= TMP_REGISTER) ? dst : TMP_REGISTER; | dst_r = (dst <= TMP_REGISTER) ? dst : TMP_REGISTER; |
|
|
if ((dst & SLJIT_MEM) && (src >= SLJIT_TEMPORARY_REG1 && src <= SLJIT_NO_REGISTERS)) | if ((dst & SLJIT_MEM) && src <= TMP_REGISTER) |
dst_r = src; |
dst_r = src; |
else { |
else { |
code = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw); | inst = emit_x86_instruction(compiler, 2, dst_r, 0, src, srcw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0x0f; | *inst++ = GROUP_0F; |
*code = sign ? 0xbf : 0xb7; | *inst = sign ? MOVSX_r_rm16 : MOVZX_r_rm16; |
} |
} |
|
|
if (dst & SLJIT_MEM) { |
if (dst & SLJIT_MEM) { |
code = emit_x86_instruction(compiler, 1 | EX86_NO_REXW | EX86_PREF_66, dst_r, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1 | EX86_NO_REXW | EX86_PREF_66, dst_r, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x89; | *inst = MOV_rm_r; |
} |
} |
|
|
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
static int emit_unary(struct sljit_compiler *compiler, int un_index, | static sljit_si emit_unary(struct sljit_compiler *compiler, sljit_ub opcode, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src, sljit_w srcw) | sljit_si src, sljit_sw srcw) |
{ |
{ |
sljit_ub* code; | sljit_ub* inst; |
|
|
if (dst == SLJIT_UNUSED) { |
if (dst == SLJIT_UNUSED) { |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, srcw); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, srcw); |
code = emit_x86_instruction(compiler, 1, 0, 0, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0xf7; | *inst++ = GROUP_F7; |
*code |= (un_index) << 3; | *inst |= opcode; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
if (dst == src && dstw == srcw) { |
if (dst == src && dstw == srcw) { |
/* Same input and output */ |
/* Same input and output */ |
code = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0xf7; | *inst++ = GROUP_F7; |
*code |= (un_index) << 3; | *inst |= opcode; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
if (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) { | if (dst <= TMP_REGISTER) { |
EMIT_MOV(compiler, dst, 0, src, srcw); |
EMIT_MOV(compiler, dst, 0, src, srcw); |
code = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0xf7; | *inst++ = GROUP_F7; |
*code |= (un_index) << 3; | *inst |= opcode; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, srcw); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, srcw); |
code = emit_x86_instruction(compiler, 1, 0, 0, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0xf7; | *inst++ = GROUP_F7; |
*code |= (un_index) << 3; | *inst |= opcode; |
EMIT_MOV(compiler, dst, dstw, TMP_REGISTER, 0); |
EMIT_MOV(compiler, dst, dstw, TMP_REGISTER, 0); |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
static int emit_not_with_flags(struct sljit_compiler *compiler, | static sljit_si emit_not_with_flags(struct sljit_compiler *compiler, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src, sljit_w srcw) | sljit_si src, sljit_sw srcw) |
{ |
{ |
sljit_ub* code; | sljit_ub* inst; |
|
|
if (dst == SLJIT_UNUSED) { |
if (dst == SLJIT_UNUSED) { |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, srcw); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, srcw); |
code = emit_x86_instruction(compiler, 1, 0, 0, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0xf7; | *inst++ = GROUP_F7; |
*code |= 0x2 << 3; | *inst |= NOT_rm; |
code = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x0b; | *inst = OR_r_rm; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
if (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) { | if (dst <= TMP_REGISTER) { |
EMIT_MOV(compiler, dst, 0, src, srcw); |
EMIT_MOV(compiler, dst, 0, src, srcw); |
code = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1, 0, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0xf7; | *inst++ = GROUP_F7; |
*code |= 0x2 << 3; | *inst |= NOT_rm; |
code = emit_x86_instruction(compiler, 1, dst, 0, dst, 0); | inst = emit_x86_instruction(compiler, 1, dst, 0, dst, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x0b; | *inst = OR_r_rm; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, srcw); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, srcw); |
code = emit_x86_instruction(compiler, 1, 0, 0, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0xf7; | *inst++ = GROUP_F7; |
*code |= 0x2 << 3; | *inst |= NOT_rm; |
code = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x0b; | *inst = OR_r_rm; |
EMIT_MOV(compiler, dst, dstw, TMP_REGISTER, 0); |
EMIT_MOV(compiler, dst, dstw, TMP_REGISTER, 0); |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
static int emit_clz(struct sljit_compiler *compiler, int op, | static sljit_si emit_clz(struct sljit_compiler *compiler, sljit_si op_flags, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src, sljit_w srcw) | sljit_si src, sljit_sw srcw) |
{ |
{ |
sljit_ub* code; | sljit_ub* inst; |
int dst_r; | sljit_si dst_r; |
|
|
SLJIT_UNUSED_ARG(op); | SLJIT_UNUSED_ARG(op_flags); |
if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) { |
if (SLJIT_UNLIKELY(dst == SLJIT_UNUSED)) { |
/* Just set the zero flag. */ |
/* Just set the zero flag. */ |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, srcw); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, srcw); |
code = emit_x86_instruction(compiler, 1, 0, 0, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1, 0, 0, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0xf7; | *inst++ = GROUP_F7; |
*code |= 0x2 << 3; | *inst |= NOT_rm; |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
code = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 31, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, 31, TMP_REGISTER, 0); |
#else |
#else |
code = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, !(op & SLJIT_INT_OP) ? 63 : 31, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_IMM, !(op_flags & SLJIT_INT_OP) ? 63 : 31, TMP_REGISTER, 0); |
#endif |
#endif |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code |= 0x5 << 3; | *inst |= SHR; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
if (SLJIT_UNLIKELY(src & SLJIT_IMM)) { |
if (SLJIT_UNLIKELY(src & SLJIT_IMM)) { |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, srcw); | EMIT_MOV(compiler, TMP_REGISTER, 0, SLJIT_IMM, srcw); |
src = TMP_REGISTER; |
src = TMP_REGISTER; |
srcw = 0; |
srcw = 0; |
} |
} |
|
|
code = emit_x86_instruction(compiler, 2, TMP_REGISTER, 0, src, srcw); | inst = emit_x86_instruction(compiler, 2, TMP_REGISTER, 0, src, srcw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0x0f; | *inst++ = GROUP_0F; |
*code = 0xbd; | *inst = BSR_r_rm; |
|
|
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
if (dst >= SLJIT_TEMPORARY_REG1 && dst <= TMP_REGISTER) | if (dst <= TMP_REGISTER) |
dst_r = dst; |
dst_r = dst; |
else { |
else { |
/* Find an unused temporary register. */ |
/* Find an unused temporary register. */ |
if ((dst & 0xf) != SLJIT_TEMPORARY_REG1 && (dst & 0xf0) != (SLJIT_TEMPORARY_REG1 << 4)) | if ((dst & 0xf) != SLJIT_SCRATCH_REG1 && (dst & 0xf0) != (SLJIT_SCRATCH_REG1 << 4)) |
dst_r = SLJIT_TEMPORARY_REG1; | dst_r = SLJIT_SCRATCH_REG1; |
else if ((dst & 0xf) != SLJIT_TEMPORARY_REG2 && (dst & 0xf0) != (SLJIT_TEMPORARY_REG2 << 4)) | else if ((dst & 0xf) != SLJIT_SCRATCH_REG2 && (dst & 0xf0) != (SLJIT_SCRATCH_REG2 << 4)) |
dst_r = SLJIT_TEMPORARY_REG2; | dst_r = SLJIT_SCRATCH_REG2; |
else |
else |
dst_r = SLJIT_TEMPORARY_REG3; | dst_r = SLJIT_SCRATCH_REG3; |
EMIT_MOV(compiler, dst, dstw, dst_r, 0); |
EMIT_MOV(compiler, dst, dstw, dst_r, 0); |
} |
} |
EMIT_MOV(compiler, dst_r, 0, SLJIT_IMM, 32 + 31); |
EMIT_MOV(compiler, dst_r, 0, SLJIT_IMM, 32 + 31); |
#else |
#else |
dst_r = (dst >= SLJIT_TEMPORARY_REG1 && dst <= TMP_REGISTER) ? dst : TMP_REG2; | dst_r = (dst <= TMP_REGISTER) ? dst : TMP_REG2; |
compiler->mode32 = 0; |
compiler->mode32 = 0; |
EMIT_MOV(compiler, dst_r, 0, SLJIT_IMM, !(op & SLJIT_INT_OP) ? 64 + 63 : 32 + 31); | EMIT_MOV(compiler, dst_r, 0, SLJIT_IMM, !(op_flags & SLJIT_INT_OP) ? 64 + 63 : 32 + 31); |
compiler->mode32 = op & SLJIT_INT_OP; | compiler->mode32 = op_flags & SLJIT_INT_OP; |
#endif |
#endif |
|
|
code = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REGISTER, 0); | if (cpu_has_cmov == -1) |
FAIL_IF(!code); | get_cpu_features(); |
*code++ = 0x0f; | |
*code = 0x45; | |
|
|
|
if (cpu_has_cmov) { |
|
inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REGISTER, 0); |
|
FAIL_IF(!inst); |
|
*inst++ = GROUP_0F; |
|
*inst = CMOVNE_r_rm; |
|
} else { |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
code = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 31, dst_r, 0); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 4); |
| FAIL_IF(!inst); |
| INC_SIZE(4); |
| |
| *inst++ = JE_i8; |
| *inst++ = 2; |
| *inst++ = MOV_r_rm; |
| *inst++ = MOD_REG | (reg_map[dst_r] << 3) | reg_map[TMP_REGISTER]; |
#else |
#else |
code = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, !(op & SLJIT_INT_OP) ? 63 : 31, dst_r, 0); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 5); |
| FAIL_IF(!inst); |
| INC_SIZE(5); |
| |
| *inst++ = JE_i8; |
| *inst++ = 3; |
| *inst++ = REX_W | (reg_map[dst_r] >= 8 ? REX_R : 0) | (reg_map[TMP_REGISTER] >= 8 ? REX_B : 0); |
| *inst++ = MOV_r_rm; |
| *inst++ = MOD_REG | (reg_lmap[dst_r] << 3) | reg_lmap[TMP_REGISTER]; |
#endif |
#endif |
FAIL_IF(!code); | } |
*(code + 1) |= 0x6 << 3; | |
|
|
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
|
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, 31, dst_r, 0); |
|
#else |
|
inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, !(op_flags & SLJIT_INT_OP) ? 63 : 31, dst_r, 0); |
|
#endif |
|
FAIL_IF(!inst); |
|
*(inst + 1) |= XOR; |
|
|
|
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
if (dst & SLJIT_MEM) { |
if (dst & SLJIT_MEM) { |
code = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1, dst_r, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x87; | *inst = XCHG_r_rm; |
} |
} |
#else |
#else |
if (dst & SLJIT_MEM) |
if (dst & SLJIT_MEM) |
Line 1041 static int emit_clz(struct sljit_compiler *compiler, i
|
Line 1239 static int emit_clz(struct sljit_compiler *compiler, i
|
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op1(struct sljit_compiler *compiler, int op, | SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op1(struct sljit_compiler *compiler, sljit_si op, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src, sljit_w srcw) | sljit_si src, sljit_sw srcw) |
{ |
{ |
sljit_ub* code; | sljit_ub* inst; |
int update = 0; | sljit_si update = 0; |
| sljit_si op_flags = GET_ALL_FLAGS(op); |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
int dst_is_ereg = 0; | sljit_si dst_is_ereg = 0; |
int src_is_ereg = 0; | sljit_si src_is_ereg = 0; |
#else |
#else |
#define src_is_ereg 0 | # define src_is_ereg 0 |
#endif |
#endif |
|
|
CHECK_ERROR(); |
CHECK_ERROR(); |
Line 1062 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op1(struct slj
|
Line 1261 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op1(struct slj
|
CHECK_EXTRA_REGS(dst, dstw, dst_is_ereg = 1); |
CHECK_EXTRA_REGS(dst, dstw, dst_is_ereg = 1); |
CHECK_EXTRA_REGS(src, srcw, src_is_ereg = 1); |
CHECK_EXTRA_REGS(src, srcw, src_is_ereg = 1); |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
compiler->mode32 = op & SLJIT_INT_OP; | compiler->mode32 = op_flags & SLJIT_INT_OP; |
#endif |
#endif |
|
|
if (GET_OPCODE(op) >= SLJIT_MOV && GET_OPCODE(op) <= SLJIT_MOVU_SI) { | op = GET_OPCODE(op); |
op = GET_OPCODE(op); | if (op >= SLJIT_MOV && op <= SLJIT_MOVU_P) { |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
compiler->mode32 = 0; |
compiler->mode32 = 0; |
#endif |
#endif |
|
|
SLJIT_COMPILE_ASSERT(SLJIT_MOV + 7 == SLJIT_MOVU, movu_offset); | if (op_flags & SLJIT_INT_OP) { |
| if (src <= TMP_REGISTER && src == dst) { |
| if (!TYPE_CAST_NEEDED(op)) |
| return SLJIT_SUCCESS; |
| } |
| #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
| if (op == SLJIT_MOV_SI && (src & SLJIT_MEM)) |
| op = SLJIT_MOV_UI; |
| if (op == SLJIT_MOVU_SI && (src & SLJIT_MEM)) |
| op = SLJIT_MOVU_UI; |
| if (op == SLJIT_MOV_UI && (src & SLJIT_IMM)) |
| op = SLJIT_MOV_SI; |
| if (op == SLJIT_MOVU_UI && (src & SLJIT_IMM)) |
| op = SLJIT_MOVU_SI; |
| #endif |
| } |
| |
| SLJIT_COMPILE_ASSERT(SLJIT_MOV + 8 == SLJIT_MOVU, movu_offset); |
if (op >= SLJIT_MOVU) { |
if (op >= SLJIT_MOVU) { |
update = 1; |
update = 1; |
op -= 7; | op -= 8; |
} |
} |
|
|
if (src & SLJIT_IMM) { |
if (src & SLJIT_IMM) { |
switch (op) { |
switch (op) { |
case SLJIT_MOV_UB: |
case SLJIT_MOV_UB: |
srcw = (unsigned char)srcw; | srcw = (sljit_ub)srcw; |
break; |
break; |
case SLJIT_MOV_SB: |
case SLJIT_MOV_SB: |
srcw = (signed char)srcw; | srcw = (sljit_sb)srcw; |
break; |
break; |
case SLJIT_MOV_UH: |
case SLJIT_MOV_UH: |
srcw = (unsigned short)srcw; | srcw = (sljit_uh)srcw; |
break; |
break; |
case SLJIT_MOV_SH: |
case SLJIT_MOV_SH: |
srcw = (signed short)srcw; | srcw = (sljit_sh)srcw; |
break; |
break; |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
case SLJIT_MOV_UI: |
case SLJIT_MOV_UI: |
srcw = (unsigned int)srcw; | srcw = (sljit_ui)srcw; |
break; |
break; |
case SLJIT_MOV_SI: |
case SLJIT_MOV_SI: |
srcw = (signed int)srcw; | srcw = (sljit_si)srcw; |
break; |
break; |
#endif |
#endif |
} |
} |
Line 1107 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op1(struct slj
|
Line 1323 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op1(struct slj
|
} |
} |
|
|
if (SLJIT_UNLIKELY(update) && (src & SLJIT_MEM) && !src_is_ereg && (src & 0xf) && (srcw != 0 || (src & 0xf0) != 0)) { |
if (SLJIT_UNLIKELY(update) && (src & SLJIT_MEM) && !src_is_ereg && (src & 0xf) && (srcw != 0 || (src & 0xf0) != 0)) { |
code = emit_x86_instruction(compiler, 1, src & 0xf, 0, src, srcw); | inst = emit_x86_instruction(compiler, 1, src & 0xf, 0, src, srcw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x8d; | *inst = LEA_r_m; |
src &= SLJIT_MEM | 0xf; |
src &= SLJIT_MEM | 0xf; |
srcw = 0; |
srcw = 0; |
} |
} |
|
|
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
if (SLJIT_UNLIKELY(dst_is_ereg) && (!(op == SLJIT_MOV || op == SLJIT_MOV_UI || op == SLJIT_MOV_SI) || (src & SLJIT_MEM))) { | if (SLJIT_UNLIKELY(dst_is_ereg) && (!(op == SLJIT_MOV || op == SLJIT_MOV_UI || op == SLJIT_MOV_SI || op == SLJIT_MOV_P) || (src & SLJIT_MEM))) { |
SLJIT_ASSERT(dst == SLJIT_MEM1(SLJIT_LOCALS_REG)); |
SLJIT_ASSERT(dst == SLJIT_MEM1(SLJIT_LOCALS_REG)); |
dst = TMP_REGISTER; |
dst = TMP_REGISTER; |
} |
} |
Line 1123 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op1(struct slj
|
Line 1339 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op1(struct slj
|
|
|
switch (op) { |
switch (op) { |
case SLJIT_MOV: |
case SLJIT_MOV: |
|
case SLJIT_MOV_P: |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
case SLJIT_MOV_UI: |
case SLJIT_MOV_UI: |
case SLJIT_MOV_SI: |
case SLJIT_MOV_SI: |
Line 1130 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op1(struct slj
|
Line 1347 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op1(struct slj
|
FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw)); |
FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw)); |
break; |
break; |
case SLJIT_MOV_UB: |
case SLJIT_MOV_UB: |
FAIL_IF(emit_mov_byte(compiler, 0, dst, dstw, src, (src & SLJIT_IMM) ? (unsigned char)srcw : srcw)); | FAIL_IF(emit_mov_byte(compiler, 0, dst, dstw, src, srcw)); |
break; |
break; |
case SLJIT_MOV_SB: |
case SLJIT_MOV_SB: |
FAIL_IF(emit_mov_byte(compiler, 1, dst, dstw, src, (src & SLJIT_IMM) ? (signed char)srcw : srcw)); | FAIL_IF(emit_mov_byte(compiler, 1, dst, dstw, src, srcw)); |
break; |
break; |
case SLJIT_MOV_UH: |
case SLJIT_MOV_UH: |
FAIL_IF(emit_mov_half(compiler, 0, dst, dstw, src, (src & SLJIT_IMM) ? (unsigned short)srcw : srcw)); | FAIL_IF(emit_mov_half(compiler, 0, dst, dstw, src, srcw)); |
break; |
break; |
case SLJIT_MOV_SH: |
case SLJIT_MOV_SH: |
FAIL_IF(emit_mov_half(compiler, 1, dst, dstw, src, (src & SLJIT_IMM) ? (signed short)srcw : srcw)); | FAIL_IF(emit_mov_half(compiler, 1, dst, dstw, src, srcw)); |
break; |
break; |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
case SLJIT_MOV_UI: |
case SLJIT_MOV_UI: |
FAIL_IF(emit_mov_int(compiler, 0, dst, dstw, src, (src & SLJIT_IMM) ? (unsigned int)srcw : srcw)); | FAIL_IF(emit_mov_int(compiler, 0, dst, dstw, src, srcw)); |
break; |
break; |
case SLJIT_MOV_SI: |
case SLJIT_MOV_SI: |
FAIL_IF(emit_mov_int(compiler, 1, dst, dstw, src, (src & SLJIT_IMM) ? (signed int)srcw : srcw)); | FAIL_IF(emit_mov_int(compiler, 1, dst, dstw, src, srcw)); |
break; |
break; |
#endif |
#endif |
} |
} |
Line 1157 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op1(struct slj
|
Line 1374 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op1(struct slj
|
#endif |
#endif |
|
|
if (SLJIT_UNLIKELY(update) && (dst & SLJIT_MEM) && (dst & 0xf) && (dstw != 0 || (dst & 0xf0) != 0)) { |
if (SLJIT_UNLIKELY(update) && (dst & SLJIT_MEM) && (dst & 0xf) && (dstw != 0 || (dst & 0xf0) != 0)) { |
code = emit_x86_instruction(compiler, 1, dst & 0xf, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1, dst & 0xf, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x8d; | *inst = LEA_r_m; |
} |
} |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
if (SLJIT_UNLIKELY(GET_FLAGS(op))) | if (SLJIT_UNLIKELY(GET_FLAGS(op_flags))) |
compiler->flags_saved = 0; |
compiler->flags_saved = 0; |
|
|
switch (GET_OPCODE(op)) { | switch (op) { |
case SLJIT_NOT: |
case SLJIT_NOT: |
if (SLJIT_UNLIKELY(op & SLJIT_SET_E)) | if (SLJIT_UNLIKELY(op_flags & SLJIT_SET_E)) |
return emit_not_with_flags(compiler, dst, dstw, src, srcw); |
return emit_not_with_flags(compiler, dst, dstw, src, srcw); |
return emit_unary(compiler, 0x2, dst, dstw, src, srcw); | return emit_unary(compiler, NOT_rm, dst, dstw, src, srcw); |
|
|
case SLJIT_NEG: |
case SLJIT_NEG: |
if (SLJIT_UNLIKELY(op & SLJIT_KEEP_FLAGS) && !compiler->flags_saved) | if (SLJIT_UNLIKELY(op_flags & SLJIT_KEEP_FLAGS) && !compiler->flags_saved) |
FAIL_IF(emit_save_flags(compiler)); |
FAIL_IF(emit_save_flags(compiler)); |
return emit_unary(compiler, 0x3, dst, dstw, src, srcw); | return emit_unary(compiler, NEG_rm, dst, dstw, src, srcw); |
|
|
case SLJIT_CLZ: |
case SLJIT_CLZ: |
if (SLJIT_UNLIKELY(op & SLJIT_KEEP_FLAGS) && !compiler->flags_saved) | if (SLJIT_UNLIKELY(op_flags & SLJIT_KEEP_FLAGS) && !compiler->flags_saved) |
FAIL_IF(emit_save_flags(compiler)); |
FAIL_IF(emit_save_flags(compiler)); |
return emit_clz(compiler, op, dst, dstw, src, srcw); | return emit_clz(compiler, op_flags, dst, dstw, src, srcw); |
} |
} |
|
|
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
|
|
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#undef src_is_ereg | # undef src_is_ereg |
#endif |
#endif |
} |
} |
|
|
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
|
|
#define BINARY_IMM(_op_imm_, _op_mr_, immw, arg, argw) \ | #define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \ |
if (IS_HALFWORD(immw) || compiler->mode32) { \ |
if (IS_HALFWORD(immw) || compiler->mode32) { \ |
code = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, immw, arg, argw); \ | inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, immw, arg, argw); \ |
FAIL_IF(!code); \ | FAIL_IF(!inst); \ |
*(code + 1) |= (_op_imm_); \ | *(inst + 1) |= (op_imm); \ |
} \ |
} \ |
else { \ |
else { \ |
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, immw)); \ |
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, immw)); \ |
code = emit_x86_instruction(compiler, 1, TMP_REG2, 0, arg, argw); \ | inst = emit_x86_instruction(compiler, 1, TMP_REG2, 0, arg, argw); \ |
FAIL_IF(!code); \ | FAIL_IF(!inst); \ |
*code = (_op_mr_); \ | *inst = (op_mr); \ |
} |
} |
|
|
#define BINARY_EAX_IMM(_op_eax_imm_, immw) \ | #define BINARY_EAX_IMM(op_eax_imm, immw) \ |
FAIL_IF(emit_do_imm32(compiler, (!compiler->mode32) ? REX_W : 0, (_op_eax_imm_), immw)) | FAIL_IF(emit_do_imm32(compiler, (!compiler->mode32) ? REX_W : 0, (op_eax_imm), immw)) |
|
|
#else |
#else |
|
|
#define BINARY_IMM(_op_imm_, _op_mr_, immw, arg, argw) \ | #define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \ |
code = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, immw, arg, argw); \ | inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, immw, arg, argw); \ |
FAIL_IF(!code); \ | FAIL_IF(!inst); \ |
*(code + 1) |= (_op_imm_); | *(inst + 1) |= (op_imm); |
|
|
#define BINARY_EAX_IMM(_op_eax_imm_, immw) \ | #define BINARY_EAX_IMM(op_eax_imm, immw) \ |
FAIL_IF(emit_do_imm(compiler, (_op_eax_imm_), immw)) | FAIL_IF(emit_do_imm(compiler, (op_eax_imm), immw)) |
|
|
#endif |
#endif |
|
|
static int emit_cum_binary(struct sljit_compiler *compiler, | static sljit_si emit_cum_binary(struct sljit_compiler *compiler, |
sljit_ub op_rm, sljit_ub op_mr, sljit_ub op_imm, sljit_ub op_eax_imm, |
sljit_ub op_rm, sljit_ub op_mr, sljit_ub op_imm, sljit_ub op_eax_imm, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src1, sljit_w src1w, | sljit_si src1, sljit_sw src1w, |
int src2, sljit_w src2w) | sljit_si src2, sljit_sw src2w) |
{ |
{ |
sljit_ub* code; | sljit_ub* inst; |
|
|
if (dst == SLJIT_UNUSED) { |
if (dst == SLJIT_UNUSED) { |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
Line 1235 static int emit_cum_binary(struct sljit_compiler *comp
|
Line 1452 static int emit_cum_binary(struct sljit_compiler *comp
|
BINARY_IMM(op_imm, op_mr, src2w, TMP_REGISTER, 0); |
BINARY_IMM(op_imm, op_mr, src2w, TMP_REGISTER, 0); |
} |
} |
else { |
else { |
code = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_rm; | *inst = op_rm; |
} |
} |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
Line 1245 static int emit_cum_binary(struct sljit_compiler *comp
|
Line 1462 static int emit_cum_binary(struct sljit_compiler *comp
|
if (dst == src1 && dstw == src1w) { |
if (dst == src1 && dstw == src1w) { |
if (src2 & SLJIT_IMM) { |
if (src2 & SLJIT_IMM) { |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
if ((dst == SLJIT_TEMPORARY_REG1) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) { | if ((dst == SLJIT_SCRATCH_REG1) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) { |
#else |
#else |
if ((dst == SLJIT_TEMPORARY_REG1) && (src2w > 127 || src2w < -128)) { | if ((dst == SLJIT_SCRATCH_REG1) && (src2w > 127 || src2w < -128)) { |
#endif |
#endif |
BINARY_EAX_IMM(op_eax_imm, src2w); |
BINARY_EAX_IMM(op_eax_imm, src2w); |
} |
} |
Line 1255 static int emit_cum_binary(struct sljit_compiler *comp
|
Line 1472 static int emit_cum_binary(struct sljit_compiler *comp
|
BINARY_IMM(op_imm, op_mr, src2w, dst, dstw); |
BINARY_IMM(op_imm, op_mr, src2w, dst, dstw); |
} |
} |
} |
} |
else if (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) { | else if (dst <= TMP_REGISTER) { |
code = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w); | inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_rm; | *inst = op_rm; |
} |
} |
else if (src2 >= SLJIT_TEMPORARY_REG1 && src2 <= TMP_REGISTER) { | else if (src2 <= TMP_REGISTER) { |
/* Special exception for sljit_emit_cond_value. */ | /* Special exception for sljit_emit_op_flags. */ |
code = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw); | inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_mr; | *inst = op_mr; |
} |
} |
else { |
else { |
EMIT_MOV(compiler, TMP_REGISTER, 0, src2, src2w); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src2, src2w); |
code = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_mr; | *inst = op_mr; |
} |
} |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
Line 1279 static int emit_cum_binary(struct sljit_compiler *comp
|
Line 1496 static int emit_cum_binary(struct sljit_compiler *comp
|
if (dst == src2 && dstw == src2w) { |
if (dst == src2 && dstw == src2w) { |
if (src1 & SLJIT_IMM) { |
if (src1 & SLJIT_IMM) { |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
if ((dst == SLJIT_TEMPORARY_REG1) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) { | if ((dst == SLJIT_SCRATCH_REG1) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) { |
#else |
#else |
if ((dst == SLJIT_TEMPORARY_REG1) && (src1w > 127 || src1w < -128)) { | if ((dst == SLJIT_SCRATCH_REG1) && (src1w > 127 || src1w < -128)) { |
#endif |
#endif |
BINARY_EAX_IMM(op_eax_imm, src1w); |
BINARY_EAX_IMM(op_eax_imm, src1w); |
} |
} |
Line 1289 static int emit_cum_binary(struct sljit_compiler *comp
|
Line 1506 static int emit_cum_binary(struct sljit_compiler *comp
|
BINARY_IMM(op_imm, op_mr, src1w, dst, dstw); |
BINARY_IMM(op_imm, op_mr, src1w, dst, dstw); |
} |
} |
} |
} |
else if (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) { | else if (dst <= TMP_REGISTER) { |
code = emit_x86_instruction(compiler, 1, dst, dstw, src1, src1w); | inst = emit_x86_instruction(compiler, 1, dst, dstw, src1, src1w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_rm; | *inst = op_rm; |
} |
} |
else if (src1 >= SLJIT_TEMPORARY_REG1 && src1 <= SLJIT_NO_REGISTERS) { | else if (src1 <= TMP_REGISTER) { |
code = emit_x86_instruction(compiler, 1, src1, src1w, dst, dstw); | inst = emit_x86_instruction(compiler, 1, src1, src1w, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_mr; | *inst = op_mr; |
} |
} |
else { |
else { |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
code = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_mr; | *inst = op_mr; |
} |
} |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
/* General version. */ |
/* General version. */ |
if (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) { | if (dst <= TMP_REGISTER) { |
EMIT_MOV(compiler, dst, 0, src1, src1w); |
EMIT_MOV(compiler, dst, 0, src1, src1w); |
if (src2 & SLJIT_IMM) { |
if (src2 & SLJIT_IMM) { |
BINARY_IMM(op_imm, op_mr, src2w, dst, 0); |
BINARY_IMM(op_imm, op_mr, src2w, dst, 0); |
} |
} |
else { |
else { |
code = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_rm; | *inst = op_rm; |
} |
} |
} |
} |
else { |
else { |
Line 1327 static int emit_cum_binary(struct sljit_compiler *comp
|
Line 1544 static int emit_cum_binary(struct sljit_compiler *comp
|
BINARY_IMM(op_imm, op_mr, src2w, TMP_REGISTER, 0); |
BINARY_IMM(op_imm, op_mr, src2w, TMP_REGISTER, 0); |
} |
} |
else { |
else { |
code = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_rm; | *inst = op_rm; |
} |
} |
EMIT_MOV(compiler, dst, dstw, TMP_REGISTER, 0); |
EMIT_MOV(compiler, dst, dstw, TMP_REGISTER, 0); |
} |
} |
Line 1337 static int emit_cum_binary(struct sljit_compiler *comp
|
Line 1554 static int emit_cum_binary(struct sljit_compiler *comp
|
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
static int emit_non_cum_binary(struct sljit_compiler *compiler, | static sljit_si emit_non_cum_binary(struct sljit_compiler *compiler, |
sljit_ub op_rm, sljit_ub op_mr, sljit_ub op_imm, sljit_ub op_eax_imm, |
sljit_ub op_rm, sljit_ub op_mr, sljit_ub op_imm, sljit_ub op_eax_imm, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src1, sljit_w src1w, | sljit_si src1, sljit_sw src1w, |
int src2, sljit_w src2w) | sljit_si src2, sljit_sw src2w) |
{ |
{ |
sljit_ub* code; | sljit_ub* inst; |
|
|
if (dst == SLJIT_UNUSED) { |
if (dst == SLJIT_UNUSED) { |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
Line 1351 static int emit_non_cum_binary(struct sljit_compiler *
|
Line 1568 static int emit_non_cum_binary(struct sljit_compiler *
|
BINARY_IMM(op_imm, op_mr, src2w, TMP_REGISTER, 0); |
BINARY_IMM(op_imm, op_mr, src2w, TMP_REGISTER, 0); |
} |
} |
else { |
else { |
code = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_rm; | *inst = op_rm; |
} |
} |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
Line 1361 static int emit_non_cum_binary(struct sljit_compiler *
|
Line 1578 static int emit_non_cum_binary(struct sljit_compiler *
|
if (dst == src1 && dstw == src1w) { |
if (dst == src1 && dstw == src1w) { |
if (src2 & SLJIT_IMM) { |
if (src2 & SLJIT_IMM) { |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
if ((dst == SLJIT_TEMPORARY_REG1) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) { | if ((dst == SLJIT_SCRATCH_REG1) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) { |
#else |
#else |
if ((dst == SLJIT_TEMPORARY_REG1) && (src2w > 127 || src2w < -128)) { | if ((dst == SLJIT_SCRATCH_REG1) && (src2w > 127 || src2w < -128)) { |
#endif |
#endif |
BINARY_EAX_IMM(op_eax_imm, src2w); |
BINARY_EAX_IMM(op_eax_imm, src2w); |
} |
} |
Line 1371 static int emit_non_cum_binary(struct sljit_compiler *
|
Line 1588 static int emit_non_cum_binary(struct sljit_compiler *
|
BINARY_IMM(op_imm, op_mr, src2w, dst, dstw); |
BINARY_IMM(op_imm, op_mr, src2w, dst, dstw); |
} |
} |
} |
} |
else if (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) { | else if (dst <= TMP_REGISTER) { |
code = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w); | inst = emit_x86_instruction(compiler, 1, dst, dstw, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_rm; | *inst = op_rm; |
} |
} |
else if (src2 >= SLJIT_TEMPORARY_REG1 && src2 <= SLJIT_NO_REGISTERS) { | else if (src2 <= TMP_REGISTER) { |
code = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw); | inst = emit_x86_instruction(compiler, 1, src2, src2w, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_mr; | *inst = op_mr; |
} |
} |
else { |
else { |
EMIT_MOV(compiler, TMP_REGISTER, 0, src2, src2w); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src2, src2w); |
code = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, dst, dstw); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_mr; | *inst = op_mr; |
} |
} |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
/* General version. */ |
/* General version. */ |
if ((dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) && dst != src2) { | if (dst <= TMP_REGISTER && dst != src2) { |
EMIT_MOV(compiler, dst, 0, src1, src1w); |
EMIT_MOV(compiler, dst, 0, src1, src1w); |
if (src2 & SLJIT_IMM) { |
if (src2 & SLJIT_IMM) { |
BINARY_IMM(op_imm, op_mr, src2w, dst, 0); |
BINARY_IMM(op_imm, op_mr, src2w, dst, 0); |
} |
} |
else { |
else { |
code = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 1, dst, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_rm; | *inst = op_rm; |
} |
} |
} |
} |
else { |
else { |
Line 1409 static int emit_non_cum_binary(struct sljit_compiler *
|
Line 1626 static int emit_non_cum_binary(struct sljit_compiler *
|
BINARY_IMM(op_imm, op_mr, src2w, TMP_REGISTER, 0); |
BINARY_IMM(op_imm, op_mr, src2w, TMP_REGISTER, 0); |
} |
} |
else { |
else { |
code = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = op_rm; | *inst = op_rm; |
} |
} |
EMIT_MOV(compiler, dst, dstw, TMP_REGISTER, 0); |
EMIT_MOV(compiler, dst, dstw, TMP_REGISTER, 0); |
} |
} |
Line 1419 static int emit_non_cum_binary(struct sljit_compiler *
|
Line 1636 static int emit_non_cum_binary(struct sljit_compiler *
|
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
static int emit_mul(struct sljit_compiler *compiler, | static sljit_si emit_mul(struct sljit_compiler *compiler, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src1, sljit_w src1w, | sljit_si src1, sljit_sw src1w, |
int src2, sljit_w src2w) | sljit_si src2, sljit_sw src2w) |
{ |
{ |
sljit_ub* code; | sljit_ub* inst; |
int dst_r; | sljit_si dst_r; |
|
|
dst_r = (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) ? dst : TMP_REGISTER; | dst_r = (dst <= TMP_REGISTER) ? dst : TMP_REGISTER; |
|
|
/* Register destination. */ |
/* Register destination. */ |
if (dst_r == src1 && !(src2 & SLJIT_IMM)) { |
if (dst_r == src1 && !(src2 & SLJIT_IMM)) { |
code = emit_x86_instruction(compiler, 2, dst_r, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 2, dst_r, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0x0f; | *inst++ = GROUP_0F; |
*code = 0xaf; | *inst = IMUL_r_rm; |
} |
} |
else if (dst_r == src2 && !(src1 & SLJIT_IMM)) { |
else if (dst_r == src2 && !(src1 & SLJIT_IMM)) { |
code = emit_x86_instruction(compiler, 2, dst_r, 0, src1, src1w); | inst = emit_x86_instruction(compiler, 2, dst_r, 0, src1, src1w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0x0f; | *inst++ = GROUP_0F; |
*code = 0xaf; | *inst = IMUL_r_rm; |
} |
} |
else if (src1 & SLJIT_IMM) { |
else if (src1 & SLJIT_IMM) { |
if (src2 & SLJIT_IMM) { |
if (src2 & SLJIT_IMM) { |
Line 1450 static int emit_mul(struct sljit_compiler *compiler,
|
Line 1667 static int emit_mul(struct sljit_compiler *compiler,
|
} |
} |
|
|
if (src1w <= 127 && src1w >= -128) { |
if (src1w <= 127 && src1w >= -128) { |
code = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x6b; | *inst = IMUL_r_rm_i8; |
code = (sljit_ub*)ensure_buf(compiler, 1 + 1); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 1); |
FAIL_IF(!code); | FAIL_IF(!inst); |
INC_CSIZE(1); | INC_SIZE(1); |
*code = (sljit_b)src1w; | *inst = (sljit_sb)src1w; |
} |
} |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
else { |
else { |
code = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x69; | *inst = IMUL_r_rm_i32; |
code = (sljit_ub*)ensure_buf(compiler, 1 + 4); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 4); |
FAIL_IF(!code); | FAIL_IF(!inst); |
INC_CSIZE(4); | INC_SIZE(4); |
*(sljit_w*)code = src1w; | *(sljit_sw*)inst = src1w; |
} |
} |
#else |
#else |
else if (IS_HALFWORD(src1w)) { |
else if (IS_HALFWORD(src1w)) { |
code = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 1, dst_r, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x69; | *inst = IMUL_r_rm_i32; |
code = (sljit_ub*)ensure_buf(compiler, 1 + 4); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 4); |
FAIL_IF(!code); | FAIL_IF(!inst); |
INC_CSIZE(4); | INC_SIZE(4); |
*(sljit_hw*)code = (sljit_hw)src1w; | *(sljit_si*)inst = (sljit_si)src1w; |
} |
} |
else { |
else { |
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_IMM, src1w); |
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_IMM, src1w); |
if (dst_r != src2) |
if (dst_r != src2) |
EMIT_MOV(compiler, dst_r, 0, src2, src2w); |
EMIT_MOV(compiler, dst_r, 0, src2, src2w); |
code = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0); | inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0x0f; | *inst++ = GROUP_0F; |
*code = 0xaf; | *inst = IMUL_r_rm; |
} |
} |
#endif |
#endif |
} |
} |
Line 1493 static int emit_mul(struct sljit_compiler *compiler,
|
Line 1710 static int emit_mul(struct sljit_compiler *compiler,
|
/* Note: src1 is NOT immediate. */ |
/* Note: src1 is NOT immediate. */ |
|
|
if (src2w <= 127 && src2w >= -128) { |
if (src2w <= 127 && src2w >= -128) { |
code = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w); | inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x6b; | *inst = IMUL_r_rm_i8; |
code = (sljit_ub*)ensure_buf(compiler, 1 + 1); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 1); |
FAIL_IF(!code); | FAIL_IF(!inst); |
INC_CSIZE(1); | INC_SIZE(1); |
*code = (sljit_b)src2w; | *inst = (sljit_sb)src2w; |
} |
} |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
else { |
else { |
code = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w); | inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x69; | *inst = IMUL_r_rm_i32; |
code = (sljit_ub*)ensure_buf(compiler, 1 + 4); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 4); |
FAIL_IF(!code); | FAIL_IF(!inst); |
INC_CSIZE(4); | INC_SIZE(4); |
*(sljit_w*)code = src2w; | *(sljit_sw*)inst = src2w; |
} |
} |
#else |
#else |
else if (IS_HALFWORD(src2w)) { |
else if (IS_HALFWORD(src2w)) { |
code = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w); | inst = emit_x86_instruction(compiler, 1, dst_r, 0, src1, src1w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x69; | *inst = IMUL_r_rm_i32; |
code = (sljit_ub*)ensure_buf(compiler, 1 + 4); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 4); |
FAIL_IF(!code); | FAIL_IF(!inst); |
INC_CSIZE(4); | INC_SIZE(4); |
*(sljit_hw*)code = (sljit_hw)src2w; | *(sljit_si*)inst = (sljit_si)src2w; |
} |
} |
else { |
else { |
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_IMM, src1w); |
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_IMM, src1w); |
if (dst_r != src1) |
if (dst_r != src1) |
EMIT_MOV(compiler, dst_r, 0, src1, src1w); |
EMIT_MOV(compiler, dst_r, 0, src1, src1w); |
code = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0); | inst = emit_x86_instruction(compiler, 2, dst_r, 0, TMP_REG2, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0x0f; | *inst++ = GROUP_0F; |
*code = 0xaf; | *inst = IMUL_r_rm; |
} |
} |
#endif |
#endif |
} |
} |
Line 1537 static int emit_mul(struct sljit_compiler *compiler,
|
Line 1754 static int emit_mul(struct sljit_compiler *compiler,
|
if (ADDRESSING_DEPENDS_ON(src2, dst_r)) |
if (ADDRESSING_DEPENDS_ON(src2, dst_r)) |
dst_r = TMP_REGISTER; |
dst_r = TMP_REGISTER; |
EMIT_MOV(compiler, dst_r, 0, src1, src1w); |
EMIT_MOV(compiler, dst_r, 0, src1, src1w); |
code = emit_x86_instruction(compiler, 2, dst_r, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 2, dst_r, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0x0f; | *inst++ = GROUP_0F; |
*code = 0xaf; | *inst = IMUL_r_rm; |
} |
} |
|
|
if (dst_r == TMP_REGISTER) |
if (dst_r == TMP_REGISTER) |
Line 1549 static int emit_mul(struct sljit_compiler *compiler,
|
Line 1766 static int emit_mul(struct sljit_compiler *compiler,
|
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
static int emit_lea_binary(struct sljit_compiler *compiler, | static sljit_si emit_lea_binary(struct sljit_compiler *compiler, sljit_si keep_flags, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src1, sljit_w src1w, | sljit_si src1, sljit_sw src1w, |
int src2, sljit_w src2w) | sljit_si src2, sljit_sw src2w) |
{ |
{ |
sljit_ub* code; | sljit_ub* inst; |
int dst_r, done = 0; | sljit_si dst_r, done = 0; |
|
|
/* These cases better be left to handled by normal way. */ |
/* These cases better be left to handled by normal way. */ |
if (dst == src1 && dstw == src1w) | if (!keep_flags) { |
return SLJIT_ERR_UNSUPPORTED; | if (dst == src1 && dstw == src1w) |
if (dst == src2 && dstw == src2w) | return SLJIT_ERR_UNSUPPORTED; |
return SLJIT_ERR_UNSUPPORTED; | if (dst == src2 && dstw == src2w) |
| return SLJIT_ERR_UNSUPPORTED; |
| } |
|
|
dst_r = (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) ? dst : TMP_REGISTER; | dst_r = (dst <= TMP_REGISTER) ? dst : TMP_REGISTER; |
|
|
if (src1 >= SLJIT_TEMPORARY_REG1 && src1 <= SLJIT_NO_REGISTERS) { | if (src1 <= TMP_REGISTER) { |
if ((src2 >= SLJIT_TEMPORARY_REG1 && src2 <= SLJIT_NO_REGISTERS) || src2 == TMP_REGISTER) { | if (src2 <= TMP_REGISTER || src2 == TMP_REGISTER) { |
code = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM2(src1, src2), 0); | inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM2(src1, src2), 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x8d; | *inst = LEA_r_m; |
done = 1; |
done = 1; |
} |
} |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
if ((src2 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src2w))) { |
if ((src2 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src2w))) { |
code = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), (int)src2w); | inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), (sljit_si)src2w); |
#else |
#else |
if (src2 & SLJIT_IMM) { |
if (src2 & SLJIT_IMM) { |
code = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), src2w); | inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src1), src2w); |
#endif |
#endif |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x8d; | *inst = LEA_r_m; |
done = 1; |
done = 1; |
} |
} |
} |
} |
else if (src2 >= SLJIT_TEMPORARY_REG1 && src2 <= SLJIT_NO_REGISTERS) { | else if (src2 <= TMP_REGISTER) { |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
if ((src1 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src1w))) { |
if ((src1 & SLJIT_IMM) && (compiler->mode32 || IS_HALFWORD(src1w))) { |
code = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), (int)src1w); | inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), (sljit_si)src1w); |
#else |
#else |
if (src1 & SLJIT_IMM) { |
if (src1 & SLJIT_IMM) { |
code = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), src1w); | inst = emit_x86_instruction(compiler, 1, dst_r, 0, SLJIT_MEM1(src2), src1w); |
#endif |
#endif |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x8d; | *inst = LEA_r_m; |
done = 1; |
done = 1; |
} |
} |
} |
} |
Line 1606 static int emit_lea_binary(struct sljit_compiler *comp
|
Line 1825 static int emit_lea_binary(struct sljit_compiler *comp
|
return SLJIT_ERR_UNSUPPORTED; |
return SLJIT_ERR_UNSUPPORTED; |
} |
} |
|
|
static int emit_cmp_binary(struct sljit_compiler *compiler, | static sljit_si emit_cmp_binary(struct sljit_compiler *compiler, |
int src1, sljit_w src1w, | sljit_si src1, sljit_sw src1w, |
int src2, sljit_w src2w) | sljit_si src2, sljit_sw src2w) |
{ |
{ |
sljit_ub* code; | sljit_ub* inst; |
|
|
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
if (src1 == SLJIT_TEMPORARY_REG1 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) { | if (src1 == SLJIT_SCRATCH_REG1 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) { |
#else |
#else |
if (src1 == SLJIT_TEMPORARY_REG1 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128)) { | if (src1 == SLJIT_SCRATCH_REG1 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128)) { |
#endif |
#endif |
BINARY_EAX_IMM(0x3d, src2w); | BINARY_EAX_IMM(CMP_EAX_i32, src2w); |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
if (src1 >= SLJIT_TEMPORARY_REG1 && src1 <= SLJIT_NO_REGISTERS) { | if (src1 <= TMP_REGISTER) { |
if (src2 & SLJIT_IMM) { |
if (src2 & SLJIT_IMM) { |
BINARY_IMM(0x7 << 3, 0x39, src2w, src1, 0); | BINARY_IMM(CMP, CMP_rm_r, src2w, src1, 0); |
} |
} |
else { |
else { |
code = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x3b; | *inst = CMP_r_rm; |
} |
} |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
if (src2 >= SLJIT_TEMPORARY_REG1 && src2 <= SLJIT_NO_REGISTERS && !(src1 & SLJIT_IMM)) { | if (src2 <= TMP_REGISTER && !(src1 & SLJIT_IMM)) { |
code = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w); | inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x39; | *inst = CMP_rm_r; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
Line 1646 static int emit_cmp_binary(struct sljit_compiler *comp
|
Line 1865 static int emit_cmp_binary(struct sljit_compiler *comp
|
src1 = TMP_REGISTER; |
src1 = TMP_REGISTER; |
src1w = 0; |
src1w = 0; |
} |
} |
BINARY_IMM(0x7 << 3, 0x39, src2w, src1, src1w); | BINARY_IMM(CMP, CMP_rm_r, src2w, src1, src1w); |
} |
} |
else { |
else { |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
code = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x3b; | *inst = CMP_r_rm; |
} |
} |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
static int emit_test_binary(struct sljit_compiler *compiler, | static sljit_si emit_test_binary(struct sljit_compiler *compiler, |
int src1, sljit_w src1w, | sljit_si src1, sljit_sw src1w, |
int src2, sljit_w src2w) | sljit_si src2, sljit_sw src2w) |
{ |
{ |
sljit_ub* code; | sljit_ub* inst; |
|
|
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
if (src1 == SLJIT_TEMPORARY_REG1 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) { | if (src1 == SLJIT_SCRATCH_REG1 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128) && (compiler->mode32 || IS_HALFWORD(src2w))) { |
#else |
#else |
if (src1 == SLJIT_TEMPORARY_REG1 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128)) { | if (src1 == SLJIT_SCRATCH_REG1 && (src2 & SLJIT_IMM) && (src2w > 127 || src2w < -128)) { |
#endif |
#endif |
BINARY_EAX_IMM(0xa9, src2w); | BINARY_EAX_IMM(TEST_EAX_i32, src2w); |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
if (src2 == SLJIT_TEMPORARY_REG1 && (src2 & SLJIT_IMM) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) { | if (src2 == SLJIT_SCRATCH_REG1 && (src2 & SLJIT_IMM) && (src1w > 127 || src1w < -128) && (compiler->mode32 || IS_HALFWORD(src1w))) { |
#else |
#else |
if (src2 == SLJIT_TEMPORARY_REG1 && (src1 & SLJIT_IMM) && (src1w > 127 || src1w < -128)) { | if (src2 == SLJIT_SCRATCH_REG1 && (src1 & SLJIT_IMM) && (src1w > 127 || src1w < -128)) { |
#endif |
#endif |
BINARY_EAX_IMM(0xa9, src1w); | BINARY_EAX_IMM(TEST_EAX_i32, src1w); |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
if (src1 >= SLJIT_TEMPORARY_REG1 && src1 <= SLJIT_NO_REGISTERS) { | if (src1 <= TMP_REGISTER) { |
if (src2 & SLJIT_IMM) { |
if (src2 & SLJIT_IMM) { |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
if (IS_HALFWORD(src2w) || compiler->mode32) { |
if (IS_HALFWORD(src2w) || compiler->mode32) { |
code = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, 0); | inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0xf7; | *inst = GROUP_F7; |
} |
} |
else { |
else { |
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w)); |
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w)); |
code = emit_x86_instruction(compiler, 1, TMP_REG2, 0, src1, 0); | inst = emit_x86_instruction(compiler, 1, TMP_REG2, 0, src1, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x85; | *inst = TEST_rm_r; |
} |
} |
#else |
#else |
code = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, 0); | inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, src1, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0xf7; | *inst = GROUP_F7; |
#endif |
#endif |
} |
} |
else { |
else { |
code = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 1, src1, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x85; | *inst = TEST_rm_r; |
} |
} |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
if (src2 >= SLJIT_TEMPORARY_REG1 && src2 <= SLJIT_NO_REGISTERS) { | if (src2 <= TMP_REGISTER) { |
if (src1 & SLJIT_IMM) { |
if (src1 & SLJIT_IMM) { |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
if (IS_HALFWORD(src1w) || compiler->mode32) { |
if (IS_HALFWORD(src1w) || compiler->mode32) { |
code = emit_x86_instruction(compiler, 1, SLJIT_IMM, src1w, src2, 0); | inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src1w, src2, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0xf7; | *inst = GROUP_F7; |
} |
} |
else { |
else { |
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src1w)); |
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src1w)); |
code = emit_x86_instruction(compiler, 1, TMP_REG2, 0, src2, 0); | inst = emit_x86_instruction(compiler, 1, TMP_REG2, 0, src2, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x85; | *inst = TEST_rm_r; |
} |
} |
#else |
#else |
code = emit_x86_instruction(compiler, 1, src1, src1w, src2, 0); | inst = emit_x86_instruction(compiler, 1, src1, src1w, src2, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0xf7; | *inst = GROUP_F7; |
#endif |
#endif |
} |
} |
else { |
else { |
code = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w); | inst = emit_x86_instruction(compiler, 1, src2, 0, src1, src1w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x85; | *inst = TEST_rm_r; |
} |
} |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
Line 1741 static int emit_test_binary(struct sljit_compiler *com
|
Line 1960 static int emit_test_binary(struct sljit_compiler *com
|
if (src2 & SLJIT_IMM) { |
if (src2 & SLJIT_IMM) { |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
if (IS_HALFWORD(src2w) || compiler->mode32) { |
if (IS_HALFWORD(src2w) || compiler->mode32) { |
code = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0xf7; | *inst = GROUP_F7; |
} |
} |
else { |
else { |
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w)); |
FAIL_IF(emit_load_imm64(compiler, TMP_REG2, src2w)); |
code = emit_x86_instruction(compiler, 1, TMP_REG2, 0, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1, TMP_REG2, 0, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x85; | *inst = TEST_rm_r; |
} |
} |
#else |
#else |
code = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1, SLJIT_IMM, src2w, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0xf7; | *inst = GROUP_F7; |
#endif |
#endif |
} |
} |
else { |
else { |
code = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src2, src2w); | inst = emit_x86_instruction(compiler, 1, TMP_REGISTER, 0, src2, src2w); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code = 0x85; | *inst = TEST_rm_r; |
} |
} |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
static int emit_shift(struct sljit_compiler *compiler, | static sljit_si emit_shift(struct sljit_compiler *compiler, |
sljit_ub mode, |
sljit_ub mode, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src1, sljit_w src1w, | sljit_si src1, sljit_sw src1w, |
int src2, sljit_w src2w) | sljit_si src2, sljit_sw src2w) |
{ |
{ |
sljit_ub* code; | sljit_ub* inst; |
|
|
if ((src2 & SLJIT_IMM) || (src2 == SLJIT_PREF_SHIFT_REG)) { |
if ((src2 & SLJIT_IMM) || (src2 == SLJIT_PREF_SHIFT_REG)) { |
if (dst == src1 && dstw == src1w) { |
if (dst == src1 && dstw == src1w) { |
code = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, dstw); | inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, dstw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code |= mode; | *inst |= mode; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
if (dst == SLJIT_UNUSED) { |
if (dst == SLJIT_UNUSED) { |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
code = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code |= mode; | *inst |= mode; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
if (dst == SLJIT_PREF_SHIFT_REG && src2 == SLJIT_PREF_SHIFT_REG) { |
if (dst == SLJIT_PREF_SHIFT_REG && src2 == SLJIT_PREF_SHIFT_REG) { |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
code = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code |= mode; | *inst |= mode; |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REGISTER, 0); |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REGISTER, 0); |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
if (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) { | if (dst <= TMP_REGISTER) { |
EMIT_MOV(compiler, dst, 0, src1, src1w); |
EMIT_MOV(compiler, dst, 0, src1, src1w); |
code = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, 0); | inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, dst, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code |= mode; | *inst |= mode; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
code = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, src2, src2w, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code |= mode; | *inst |= mode; |
EMIT_MOV(compiler, dst, dstw, TMP_REGISTER, 0); |
EMIT_MOV(compiler, dst, dstw, TMP_REGISTER, 0); |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
Line 1814 static int emit_shift(struct sljit_compiler *compiler,
|
Line 2033 static int emit_shift(struct sljit_compiler *compiler,
|
if (dst == SLJIT_PREF_SHIFT_REG) { |
if (dst == SLJIT_PREF_SHIFT_REG) { |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src1, src1w); |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w); |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w); |
code = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code |= mode; | *inst |= mode; |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REGISTER, 0); |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REGISTER, 0); |
} |
} |
else if (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS && dst != src2 && !ADDRESSING_DEPENDS_ON(src2, dst)) { | else if (dst <= TMP_REGISTER && dst != src2 && !ADDRESSING_DEPENDS_ON(src2, dst)) { |
if (src1 != dst) |
if (src1 != dst) |
EMIT_MOV(compiler, dst, 0, src1, src1w); |
EMIT_MOV(compiler, dst, 0, src1, src1w); |
EMIT_MOV(compiler, TMP_REGISTER, 0, SLJIT_PREF_SHIFT_REG, 0); |
EMIT_MOV(compiler, TMP_REGISTER, 0, SLJIT_PREF_SHIFT_REG, 0); |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w); |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w); |
code = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, dst, 0); | inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, dst, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code |= mode; | *inst |= mode; |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REGISTER, 0); |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REGISTER, 0); |
} |
} |
else { |
else { |
Line 1837 static int emit_shift(struct sljit_compiler *compiler,
|
Line 2056 static int emit_shift(struct sljit_compiler *compiler,
|
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_PREF_SHIFT_REG, 0); |
EMIT_MOV(compiler, TMP_REG2, 0, SLJIT_PREF_SHIFT_REG, 0); |
#else |
#else |
/* [esp+0] contains the flags. */ |
/* [esp+0] contains the flags. */ |
EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_LOCALS_REG), sizeof(sljit_w), SLJIT_PREF_SHIFT_REG, 0); | EMIT_MOV(compiler, SLJIT_MEM1(SLJIT_LOCALS_REG), sizeof(sljit_sw), SLJIT_PREF_SHIFT_REG, 0); |
#endif |
#endif |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w); |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w); |
code = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REGISTER, 0); | inst = emit_x86_instruction(compiler, 1 | EX86_SHIFT_INS, SLJIT_PREF_SHIFT_REG, 0, TMP_REGISTER, 0); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code |= mode; | *inst |= mode; |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG2, 0); |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, TMP_REG2, 0); |
#else |
#else |
EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, SLJIT_MEM1(SLJIT_LOCALS_REG), sizeof(sljit_w)); | EMIT_MOV(compiler, SLJIT_PREF_SHIFT_REG, 0, SLJIT_MEM1(SLJIT_LOCALS_REG), sizeof(sljit_sw)); |
#endif |
#endif |
EMIT_MOV(compiler, dst, dstw, TMP_REGISTER, 0); |
EMIT_MOV(compiler, dst, dstw, TMP_REGISTER, 0); |
} |
} |
Line 1854 static int emit_shift(struct sljit_compiler *compiler,
|
Line 2073 static int emit_shift(struct sljit_compiler *compiler,
|
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
static int emit_shift_with_flags(struct sljit_compiler *compiler, | static sljit_si emit_shift_with_flags(struct sljit_compiler *compiler, |
sljit_ub mode, int set_flags, | sljit_ub mode, sljit_si set_flags, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src1, sljit_w src1w, | sljit_si src1, sljit_sw src1w, |
int src2, sljit_w src2w) | sljit_si src2, sljit_sw src2w) |
{ |
{ |
/* The CPU does not set flags if the shift count is 0. */ |
/* The CPU does not set flags if the shift count is 0. */ |
if (src2 & SLJIT_IMM) { |
if (src2 & SLJIT_IMM) { |
Line 1872 static int emit_shift_with_flags(struct sljit_compiler
|
Line 2091 static int emit_shift_with_flags(struct sljit_compiler
|
if (!set_flags) |
if (!set_flags) |
return emit_mov(compiler, dst, dstw, src1, src1w); |
return emit_mov(compiler, dst, dstw, src1, src1w); |
/* OR dst, src, 0 */ |
/* OR dst, src, 0 */ |
return emit_cum_binary(compiler, 0x0b, 0x09, 0x1 << 3, 0x0d, | return emit_cum_binary(compiler, OR_r_rm, OR_rm_r, OR, OR_EAX_i32, |
dst, dstw, src1, src1w, SLJIT_IMM, 0); |
dst, dstw, src1, src1w, SLJIT_IMM, 0); |
} |
} |
|
|
if (!set_flags) |
if (!set_flags) |
return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w); |
return emit_shift(compiler, mode, dst, dstw, src1, src1w, src2, src2w); |
|
|
if (!(dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS)) | if (!(dst <= TMP_REGISTER)) |
FAIL_IF(emit_cmp_binary(compiler, src1, src1w, SLJIT_IMM, 0)); |
FAIL_IF(emit_cmp_binary(compiler, src1, src1w, SLJIT_IMM, 0)); |
|
|
FAIL_IF(emit_shift(compiler,mode, dst, dstw, src1, src1w, src2, src2w)); |
FAIL_IF(emit_shift(compiler,mode, dst, dstw, src1, src1w, src2, src2w)); |
|
|
if (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) | if (dst <= TMP_REGISTER) |
return emit_cmp_binary(compiler, dst, dstw, SLJIT_IMM, 0); |
return emit_cmp_binary(compiler, dst, dstw, SLJIT_IMM, 0); |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op2(struct sljit_compiler *compiler, int op, | SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op2(struct sljit_compiler *compiler, sljit_si op, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src1, sljit_w src1w, | sljit_si src1, sljit_sw src1w, |
int src2, sljit_w src2w) | sljit_si src2, sljit_sw src2w) |
{ |
{ |
CHECK_ERROR(); |
CHECK_ERROR(); |
check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w); |
check_sljit_emit_op2(compiler, op, dst, dstw, src1, src1w, src2, src2w); |
Line 1917 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op2(struct slj
|
Line 2136 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op2(struct slj
|
switch (GET_OPCODE(op)) { |
switch (GET_OPCODE(op)) { |
case SLJIT_ADD: |
case SLJIT_ADD: |
if (!GET_FLAGS(op)) { |
if (!GET_FLAGS(op)) { |
if (emit_lea_binary(compiler, dst, dstw, src1, src1w, src2, src2w) != SLJIT_ERR_UNSUPPORTED) | if (emit_lea_binary(compiler, op & SLJIT_KEEP_FLAGS, dst, dstw, src1, src1w, src2, src2w) != SLJIT_ERR_UNSUPPORTED) |
return compiler->error; |
return compiler->error; |
} |
} |
else |
else |
compiler->flags_saved = 0; |
compiler->flags_saved = 0; |
if (SLJIT_UNLIKELY(op & SLJIT_KEEP_FLAGS) && !compiler->flags_saved) |
if (SLJIT_UNLIKELY(op & SLJIT_KEEP_FLAGS) && !compiler->flags_saved) |
FAIL_IF(emit_save_flags(compiler)); |
FAIL_IF(emit_save_flags(compiler)); |
return emit_cum_binary(compiler, 0x03, 0x01, 0x0 << 3, 0x05, | return emit_cum_binary(compiler, ADD_r_rm, ADD_rm_r, ADD, ADD_EAX_i32, |
dst, dstw, src1, src1w, src2, src2w); |
dst, dstw, src1, src1w, src2, src2w); |
case SLJIT_ADDC: |
case SLJIT_ADDC: |
if (SLJIT_UNLIKELY(compiler->flags_saved)) /* C flag must be restored. */ |
if (SLJIT_UNLIKELY(compiler->flags_saved)) /* C flag must be restored. */ |
Line 1933 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op2(struct slj
|
Line 2152 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op2(struct slj
|
FAIL_IF(emit_save_flags(compiler)); |
FAIL_IF(emit_save_flags(compiler)); |
if (SLJIT_UNLIKELY(GET_FLAGS(op))) |
if (SLJIT_UNLIKELY(GET_FLAGS(op))) |
compiler->flags_saved = 0; |
compiler->flags_saved = 0; |
return emit_cum_binary(compiler, 0x13, 0x11, 0x2 << 3, 0x15, | return emit_cum_binary(compiler, ADC_r_rm, ADC_rm_r, ADC, ADC_EAX_i32, |
dst, dstw, src1, src1w, src2, src2w); |
dst, dstw, src1, src1w, src2, src2w); |
case SLJIT_SUB: |
case SLJIT_SUB: |
if (!GET_FLAGS(op)) { |
if (!GET_FLAGS(op)) { |
if ((src2 & SLJIT_IMM) && emit_lea_binary(compiler, dst, dstw, src1, src1w, SLJIT_IMM, -src2w) != SLJIT_ERR_UNSUPPORTED) | if ((src2 & SLJIT_IMM) && emit_lea_binary(compiler, op & SLJIT_KEEP_FLAGS, dst, dstw, src1, src1w, SLJIT_IMM, -src2w) != SLJIT_ERR_UNSUPPORTED) |
return compiler->error; |
return compiler->error; |
} |
} |
else |
else |
Line 1946 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op2(struct slj
|
Line 2165 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op2(struct slj
|
FAIL_IF(emit_save_flags(compiler)); |
FAIL_IF(emit_save_flags(compiler)); |
if (dst == SLJIT_UNUSED) |
if (dst == SLJIT_UNUSED) |
return emit_cmp_binary(compiler, src1, src1w, src2, src2w); |
return emit_cmp_binary(compiler, src1, src1w, src2, src2w); |
return emit_non_cum_binary(compiler, 0x2b, 0x29, 0x5 << 3, 0x2d, | return emit_non_cum_binary(compiler, SUB_r_rm, SUB_rm_r, SUB, SUB_EAX_i32, |
dst, dstw, src1, src1w, src2, src2w); |
dst, dstw, src1, src1w, src2, src2w); |
case SLJIT_SUBC: |
case SLJIT_SUBC: |
if (SLJIT_UNLIKELY(compiler->flags_saved)) /* C flag must be restored. */ |
if (SLJIT_UNLIKELY(compiler->flags_saved)) /* C flag must be restored. */ |
Line 1955 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op2(struct slj
|
Line 2174 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op2(struct slj
|
FAIL_IF(emit_save_flags(compiler)); |
FAIL_IF(emit_save_flags(compiler)); |
if (SLJIT_UNLIKELY(GET_FLAGS(op))) |
if (SLJIT_UNLIKELY(GET_FLAGS(op))) |
compiler->flags_saved = 0; |
compiler->flags_saved = 0; |
return emit_non_cum_binary(compiler, 0x1b, 0x19, 0x3 << 3, 0x1d, | return emit_non_cum_binary(compiler, SBB_r_rm, SBB_rm_r, SBB, SBB_EAX_i32, |
dst, dstw, src1, src1w, src2, src2w); |
dst, dstw, src1, src1w, src2, src2w); |
case SLJIT_MUL: |
case SLJIT_MUL: |
return emit_mul(compiler, dst, dstw, src1, src1w, src2, src2w); |
return emit_mul(compiler, dst, dstw, src1, src1w, src2, src2w); |
case SLJIT_AND: |
case SLJIT_AND: |
if (dst == SLJIT_UNUSED) |
if (dst == SLJIT_UNUSED) |
return emit_test_binary(compiler, src1, src1w, src2, src2w); |
return emit_test_binary(compiler, src1, src1w, src2, src2w); |
return emit_cum_binary(compiler, 0x23, 0x21, 0x4 << 3, 0x25, | return emit_cum_binary(compiler, AND_r_rm, AND_rm_r, AND, AND_EAX_i32, |
dst, dstw, src1, src1w, src2, src2w); |
dst, dstw, src1, src1w, src2, src2w); |
case SLJIT_OR: |
case SLJIT_OR: |
return emit_cum_binary(compiler, 0x0b, 0x09, 0x1 << 3, 0x0d, | return emit_cum_binary(compiler, OR_r_rm, OR_rm_r, OR, OR_EAX_i32, |
dst, dstw, src1, src1w, src2, src2w); |
dst, dstw, src1, src1w, src2, src2w); |
case SLJIT_XOR: |
case SLJIT_XOR: |
return emit_cum_binary(compiler, 0x33, 0x31, 0x6 << 3, 0x35, | return emit_cum_binary(compiler, XOR_r_rm, XOR_rm_r, XOR, XOR_EAX_i32, |
dst, dstw, src1, src1w, src2, src2w); |
dst, dstw, src1, src1w, src2, src2w); |
case SLJIT_SHL: |
case SLJIT_SHL: |
return emit_shift_with_flags(compiler, 0x4 << 3, GET_FLAGS(op), | return emit_shift_with_flags(compiler, SHL, GET_FLAGS(op), |
dst, dstw, src1, src1w, src2, src2w); |
dst, dstw, src1, src1w, src2, src2w); |
case SLJIT_LSHR: |
case SLJIT_LSHR: |
return emit_shift_with_flags(compiler, 0x5 << 3, GET_FLAGS(op), | return emit_shift_with_flags(compiler, SHR, GET_FLAGS(op), |
dst, dstw, src1, src1w, src2, src2w); |
dst, dstw, src1, src1w, src2, src2w); |
case SLJIT_ASHR: |
case SLJIT_ASHR: |
return emit_shift_with_flags(compiler, 0x7 << 3, GET_FLAGS(op), | return emit_shift_with_flags(compiler, SAR, GET_FLAGS(op), |
dst, dstw, src1, src1w, src2, src2w); |
dst, dstw, src1, src1w, src2, src2w); |
} |
} |
|
|
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
SLJIT_API_FUNC_ATTRIBUTE int sljit_get_register_index(int reg) | SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_register_index(sljit_si reg) |
{ |
{ |
check_sljit_get_register_index(reg); |
check_sljit_get_register_index(reg); |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
Line 1995 SLJIT_API_FUNC_ATTRIBUTE int sljit_get_register_index(
|
Line 2214 SLJIT_API_FUNC_ATTRIBUTE int sljit_get_register_index(
|
return reg_map[reg]; |
return reg_map[reg]; |
} |
} |
|
|
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op_custom(struct sljit_compiler *compiler, | SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_float_register_index(sljit_si reg) |
void *instruction, int size) | |
{ |
{ |
sljit_ub *buf; | check_sljit_get_float_register_index(reg); |
| return reg; |
| } |
|
|
|
SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_custom(struct sljit_compiler *compiler, |
|
void *instruction, sljit_si size) |
|
{ |
|
sljit_ub *inst; |
|
|
CHECK_ERROR(); |
CHECK_ERROR(); |
check_sljit_emit_op_custom(compiler, instruction, size); |
check_sljit_emit_op_custom(compiler, instruction, size); |
SLJIT_ASSERT(size > 0 && size < 16); |
SLJIT_ASSERT(size > 0 && size < 16); |
|
|
buf = (sljit_ub*)ensure_buf(compiler, 1 + size); | inst = (sljit_ub*)ensure_buf(compiler, 1 + size); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
INC_SIZE(size); |
INC_SIZE(size); |
SLJIT_MEMMOVE(buf, instruction, size); | SLJIT_MEMMOVE(inst, instruction, size); |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
Line 2018 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op_custom(stru
|
Line 2243 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_op_custom(stru
|
#if (defined SLJIT_SSE2 && SLJIT_SSE2) |
#if (defined SLJIT_SSE2 && SLJIT_SSE2) |
|
|
/* Alignment + 2 * 16 bytes. */ |
/* Alignment + 2 * 16 bytes. */ |
static sljit_i sse2_data[3 + 4 + 4]; | static sljit_si sse2_data[3 + (4 + 4) * 2]; |
static sljit_i *sse2_buffer; | static sljit_si *sse2_buffer; |
|
|
static void init_compiler() | static void init_compiler(void) |
{ |
{ |
sse2_buffer = (sljit_i*)(((sljit_uw)sse2_data + 15) & ~0xf); | sse2_buffer = (sljit_si*)(((sljit_uw)sse2_data + 15) & ~0xf); |
sse2_buffer[0] = 0; | /* Single precision constants. */ |
sse2_buffer[1] = 0x80000000; | sse2_buffer[0] = 0x80000000; |
sse2_buffer[4] = 0xffffffff; | sse2_buffer[4] = 0x7fffffff; |
sse2_buffer[5] = 0x7fffffff; | /* Double precision constants. */ |
| sse2_buffer[8] = 0; |
| sse2_buffer[9] = 0x80000000; |
| sse2_buffer[12] = 0xffffffff; |
| sse2_buffer[13] = 0x7fffffff; |
} |
} |
|
|
#endif |
#endif |
|
|
SLJIT_API_FUNC_ATTRIBUTE int sljit_is_fpu_available(void) | SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_is_fpu_available(void) |
{ |
{ |
#if (defined SLJIT_SSE2 && SLJIT_SSE2) |
#if (defined SLJIT_SSE2 && SLJIT_SSE2) |
#if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2) |
#if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2) |
static int sse2_available = -1; | if (cpu_has_sse2 == -1) |
int features; | get_cpu_features(); |
| return cpu_has_sse2; |
if (sse2_available != -1) | #else /* SLJIT_DETECT_SSE2 */ |
return sse2_available; | |
| |
#ifdef __GNUC__ | |
/* AT&T syntax. */ | |
asm ( | |
"pushl %%ebx\n" | |
"movl $0x1, %%eax\n" | |
"cpuid\n" | |
"popl %%ebx\n" | |
"movl %%edx, %0\n" | |
: "=g" (features) | |
: | |
: "%eax", "%ecx", "%edx" | |
); | |
#elif defined(_MSC_VER) || defined(__BORLANDC__) | |
/* Intel syntax. */ | |
__asm { | |
mov eax, 1 | |
push ebx | |
cpuid | |
pop ebx | |
mov features, edx | |
} | |
#else | |
#error "SLJIT_DETECT_SSE2 is not implemented for this C compiler" | |
#endif | |
sse2_available = (features >> 26) & 0x1; | |
return sse2_available; | |
#else | |
return 1; |
return 1; |
#endif | #endif /* SLJIT_DETECT_SSE2 */ |
#else | #else /* SLJIT_SSE2 */ |
return 0; |
return 0; |
#endif |
#endif |
} |
} |
|
|
#if (defined SLJIT_SSE2 && SLJIT_SSE2) |
#if (defined SLJIT_SSE2 && SLJIT_SSE2) |
|
|
static int emit_sse2(struct sljit_compiler *compiler, sljit_ub opcode, | static sljit_si emit_sse2(struct sljit_compiler *compiler, sljit_ub opcode, |
int xmm1, int xmm2, sljit_w xmm2w) | sljit_si single, sljit_si xmm1, sljit_si xmm2, sljit_sw xmm2w) |
{ |
{ |
sljit_ub *buf; | sljit_ub *inst; |
|
|
buf = emit_x86_instruction(compiler, 2 | EX86_PREF_F2 | EX86_SSE2, xmm1, 0, xmm2, xmm2w); | inst = emit_x86_instruction(compiler, 2 | (single ? EX86_PREF_F3 : EX86_PREF_F2) | EX86_SSE2, xmm1, 0, xmm2, xmm2w); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
*buf++ = 0x0f; | *inst++ = GROUP_0F; |
*buf = opcode; | *inst = opcode; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
static int emit_sse2_logic(struct sljit_compiler *compiler, sljit_ub opcode, | static sljit_si emit_sse2_logic(struct sljit_compiler *compiler, sljit_ub opcode, |
int xmm1, int xmm2, sljit_w xmm2w) | sljit_si pref66, sljit_si xmm1, sljit_si xmm2, sljit_sw xmm2w) |
{ |
{ |
sljit_ub *buf; | sljit_ub *inst; |
|
|
buf = emit_x86_instruction(compiler, 2 | EX86_PREF_66 | EX86_SSE2, xmm1, 0, xmm2, xmm2w); | inst = emit_x86_instruction(compiler, 2 | (pref66 ? EX86_PREF_66 : 0) | EX86_SSE2, xmm1, 0, xmm2, xmm2w); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
*buf++ = 0x0f; | *inst++ = GROUP_0F; |
*buf = opcode; | *inst = opcode; |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
static SLJIT_INLINE int emit_sse2_load(struct sljit_compiler *compiler, | static SLJIT_INLINE sljit_si emit_sse2_load(struct sljit_compiler *compiler, |
int dst, int src, sljit_w srcw) | sljit_si single, sljit_si dst, sljit_si src, sljit_sw srcw) |
{ |
{ |
return emit_sse2(compiler, 0x10, dst, src, srcw); | return emit_sse2(compiler, MOVSD_x_xm, single, dst, src, srcw); |
} |
} |
|
|
static SLJIT_INLINE int emit_sse2_store(struct sljit_compiler *compiler, | static SLJIT_INLINE sljit_si emit_sse2_store(struct sljit_compiler *compiler, |
int dst, sljit_w dstw, int src) | sljit_si single, sljit_si dst, sljit_sw dstw, sljit_si src) |
{ |
{ |
return emit_sse2(compiler, 0x11, src, dst, dstw); | return emit_sse2(compiler, MOVSD_xm_x, single, src, dst, dstw); |
} |
} |
|
|
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_fop1(struct sljit_compiler *compiler, int op, | SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src, sljit_w srcw) | sljit_si src, sljit_sw srcw) |
{ |
{ |
int dst_r; | sljit_si dst_r; |
|
|
CHECK_ERROR(); |
CHECK_ERROR(); |
check_sljit_emit_fop1(compiler, op, dst, dstw, src, srcw); |
check_sljit_emit_fop1(compiler, op, dst, dstw, src, srcw); |
Line 2127 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_fop1(struct sl
|
Line 2327 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_fop1(struct sl
|
compiler->mode32 = 1; |
compiler->mode32 = 1; |
#endif |
#endif |
|
|
if (GET_OPCODE(op) == SLJIT_FCMP) { | if (GET_OPCODE(op) == SLJIT_CMPD) { |
compiler->flags_saved = 0; |
compiler->flags_saved = 0; |
if (dst >= SLJIT_FLOAT_REG1 && dst <= SLJIT_FLOAT_REG4) | if (dst <= SLJIT_FLOAT_REG6) |
dst_r = dst; |
dst_r = dst; |
else { |
else { |
dst_r = TMP_FREG; |
dst_r = TMP_FREG; |
FAIL_IF(emit_sse2_load(compiler, dst_r, dst, dstw)); | FAIL_IF(emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, dst_r, dst, dstw)); |
} |
} |
return emit_sse2_logic(compiler, 0x2e, dst_r, src, srcw); | return emit_sse2_logic(compiler, UCOMISD_x_xm, !(op & SLJIT_SINGLE_OP), dst_r, src, srcw); |
} |
} |
|
|
if (op == SLJIT_FMOV) { | if (op == SLJIT_MOVD) { |
if (dst >= SLJIT_FLOAT_REG1 && dst <= SLJIT_FLOAT_REG4) | if (dst <= SLJIT_FLOAT_REG6) |
return emit_sse2_load(compiler, dst, src, srcw); | return emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, dst, src, srcw); |
if (src >= SLJIT_FLOAT_REG1 && src <= SLJIT_FLOAT_REG4) | if (src <= SLJIT_FLOAT_REG6) |
return emit_sse2_store(compiler, dst, dstw, src); | return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, src); |
FAIL_IF(emit_sse2_load(compiler, TMP_FREG, src, srcw)); | FAIL_IF(emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, TMP_FREG, src, srcw)); |
return emit_sse2_store(compiler, dst, dstw, TMP_FREG); | return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, TMP_FREG); |
} |
} |
|
|
if (dst >= SLJIT_FLOAT_REG1 && dst <= SLJIT_FLOAT_REG4) { | if (dst >= SLJIT_FLOAT_REG1 && dst <= SLJIT_FLOAT_REG6) { |
dst_r = dst; |
dst_r = dst; |
if (dst != src) |
if (dst != src) |
FAIL_IF(emit_sse2_load(compiler, dst_r, src, srcw)); | FAIL_IF(emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, dst_r, src, srcw)); |
} |
} |
else { |
else { |
dst_r = TMP_FREG; |
dst_r = TMP_FREG; |
FAIL_IF(emit_sse2_load(compiler, dst_r, src, srcw)); | FAIL_IF(emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, dst_r, src, srcw)); |
} |
} |
|
|
switch (op) { | switch (GET_OPCODE(op)) { |
case SLJIT_FNEG: | case SLJIT_NEGD: |
FAIL_IF(emit_sse2_logic(compiler, 0x57, dst_r, SLJIT_MEM0(), (sljit_w)sse2_buffer)); | FAIL_IF(emit_sse2_logic(compiler, XORPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_SINGLE_OP ? sse2_buffer : sse2_buffer + 8))); |
break; |
break; |
|
|
case SLJIT_FABS: | case SLJIT_ABSD: |
FAIL_IF(emit_sse2_logic(compiler, 0x54, dst_r, SLJIT_MEM0(), (sljit_w)(sse2_buffer + 4))); | FAIL_IF(emit_sse2_logic(compiler, ANDPD_x_xm, 1, dst_r, SLJIT_MEM0(), (sljit_sw)(op & SLJIT_SINGLE_OP ? sse2_buffer + 4 : sse2_buffer + 12))); |
break; |
break; |
} |
} |
|
|
if (dst_r == TMP_FREG) |
if (dst_r == TMP_FREG) |
return emit_sse2_store(compiler, dst, dstw, TMP_FREG); | return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, TMP_FREG); |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_fop2(struct sljit_compiler *compiler, int op, | SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src1, sljit_w src1w, | sljit_si src1, sljit_sw src1w, |
int src2, sljit_w src2w) | sljit_si src2, sljit_sw src2w) |
{ |
{ |
int dst_r; | sljit_si dst_r; |
|
|
CHECK_ERROR(); |
CHECK_ERROR(); |
check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w); |
check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w); |
Line 2186 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_fop2(struct sl
|
Line 2386 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_fop2(struct sl
|
compiler->mode32 = 1; |
compiler->mode32 = 1; |
#endif |
#endif |
|
|
if (dst >= SLJIT_FLOAT_REG1 && dst <= SLJIT_FLOAT_REG4) { | if (dst <= SLJIT_FLOAT_REG6) { |
dst_r = dst; |
dst_r = dst; |
if (dst == src1) |
if (dst == src1) |
; /* Do nothing here. */ |
; /* Do nothing here. */ |
else if (dst == src2 && (op == SLJIT_FADD || op == SLJIT_FMUL)) { | else if (dst == src2 && (op == SLJIT_ADDD || op == SLJIT_MULD)) { |
/* Swap arguments. */ |
/* Swap arguments. */ |
src2 = src1; |
src2 = src1; |
src2w = src1w; |
src2w = src1w; |
} |
} |
else if (dst != src2) |
else if (dst != src2) |
FAIL_IF(emit_sse2_load(compiler, dst_r, src1, src1w)); | FAIL_IF(emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, dst_r, src1, src1w)); |
else { |
else { |
dst_r = TMP_FREG; |
dst_r = TMP_FREG; |
FAIL_IF(emit_sse2_load(compiler, TMP_FREG, src1, src1w)); | FAIL_IF(emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, TMP_FREG, src1, src1w)); |
} |
} |
} |
} |
else { |
else { |
dst_r = TMP_FREG; |
dst_r = TMP_FREG; |
FAIL_IF(emit_sse2_load(compiler, TMP_FREG, src1, src1w)); | FAIL_IF(emit_sse2_load(compiler, op & SLJIT_SINGLE_OP, TMP_FREG, src1, src1w)); |
} |
} |
|
|
switch (op) { | switch (GET_OPCODE(op)) { |
case SLJIT_FADD: | case SLJIT_ADDD: |
FAIL_IF(emit_sse2(compiler, 0x58, dst_r, src2, src2w)); | FAIL_IF(emit_sse2(compiler, ADDSD_x_xm, op & SLJIT_SINGLE_OP, dst_r, src2, src2w)); |
break; |
break; |
|
|
case SLJIT_FSUB: | case SLJIT_SUBD: |
FAIL_IF(emit_sse2(compiler, 0x5c, dst_r, src2, src2w)); | FAIL_IF(emit_sse2(compiler, SUBSD_x_xm, op & SLJIT_SINGLE_OP, dst_r, src2, src2w)); |
break; |
break; |
|
|
case SLJIT_FMUL: | case SLJIT_MULD: |
FAIL_IF(emit_sse2(compiler, 0x59, dst_r, src2, src2w)); | FAIL_IF(emit_sse2(compiler, MULSD_x_xm, op & SLJIT_SINGLE_OP, dst_r, src2, src2w)); |
break; |
break; |
|
|
case SLJIT_FDIV: | case SLJIT_DIVD: |
FAIL_IF(emit_sse2(compiler, 0x5e, dst_r, src2, src2w)); | FAIL_IF(emit_sse2(compiler, DIVSD_x_xm, op & SLJIT_SINGLE_OP, dst_r, src2, src2w)); |
break; |
break; |
} |
} |
|
|
if (dst_r == TMP_FREG) |
if (dst_r == TMP_FREG) |
return emit_sse2_store(compiler, dst, dstw, TMP_FREG); | return emit_sse2_store(compiler, op & SLJIT_SINGLE_OP, dst, dstw, TMP_FREG); |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
#else |
#else |
|
|
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_fop1(struct sljit_compiler *compiler, int op, | SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop1(struct sljit_compiler *compiler, sljit_si op, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src, sljit_w srcw) | sljit_si src, sljit_sw srcw) |
{ |
{ |
CHECK_ERROR(); |
CHECK_ERROR(); |
/* Should cause an assertion fail. */ |
/* Should cause an assertion fail. */ |
Line 2243 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_fop1(struct sl
|
Line 2443 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_fop1(struct sl
|
return SLJIT_ERR_UNSUPPORTED; |
return SLJIT_ERR_UNSUPPORTED; |
} |
} |
|
|
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_fop2(struct sljit_compiler *compiler, int op, | SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_fop2(struct sljit_compiler *compiler, sljit_si op, |
int dst, sljit_w dstw, | sljit_si dst, sljit_sw dstw, |
int src1, sljit_w src1w, | sljit_si src1, sljit_sw src1w, |
int src2, sljit_w src2w) | sljit_si src2, sljit_sw src2w) |
{ |
{ |
CHECK_ERROR(); |
CHECK_ERROR(); |
/* Should cause an assertion fail. */ |
/* Should cause an assertion fail. */ |
Line 2263 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_fop2(struct sl
|
Line 2463 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_fop2(struct sl
|
|
|
SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler) |
SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emit_label(struct sljit_compiler *compiler) |
{ |
{ |
sljit_ub *buf; | sljit_ub *inst; |
struct sljit_label *label; |
struct sljit_label *label; |
|
|
CHECK_ERROR_PTR(); |
CHECK_ERROR_PTR(); |
Line 2281 SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emi
|
Line 2481 SLJIT_API_FUNC_ATTRIBUTE struct sljit_label* sljit_emi
|
PTR_FAIL_IF(!label); |
PTR_FAIL_IF(!label); |
set_label(label, compiler); |
set_label(label, compiler); |
|
|
buf = (sljit_ub*)ensure_buf(compiler, 2); | inst = (sljit_ub*)ensure_buf(compiler, 2); |
PTR_FAIL_IF(!buf); | PTR_FAIL_IF(!inst); |
|
|
*buf++ = 0; | *inst++ = 0; |
*buf++ = 0; | *inst++ = 0; |
|
|
return label; |
return label; |
} |
} |
|
|
SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, int type) | SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit_jump(struct sljit_compiler *compiler, sljit_si type) |
{ |
{ |
sljit_ub *buf; | sljit_ub *inst; |
struct sljit_jump *jump; |
struct sljit_jump *jump; |
|
|
CHECK_ERROR_PTR(); |
CHECK_ERROR_PTR(); |
Line 2319 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit
|
Line 2519 SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump* sljit_emit
|
compiler->size += (type >= SLJIT_JUMP) ? (10 + 3) : (2 + 10 + 3); |
compiler->size += (type >= SLJIT_JUMP) ? (10 + 3) : (2 + 10 + 3); |
#endif |
#endif |
|
|
buf = (sljit_ub*)ensure_buf(compiler, 2); | inst = (sljit_ub*)ensure_buf(compiler, 2); |
PTR_FAIL_IF_NULL(buf); | PTR_FAIL_IF_NULL(inst); |
|
|
*buf++ = 0; | *inst++ = 0; |
*buf++ = type + 4; | *inst++ = type + 4; |
return jump; |
return jump; |
} |
} |
|
|
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_ijump(struct sljit_compiler *compiler, int type, int src, sljit_w srcw) | SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_ijump(struct sljit_compiler *compiler, sljit_si type, sljit_si src, sljit_sw srcw) |
{ |
{ |
sljit_ub *code; | sljit_ub *inst; |
struct sljit_jump *jump; |
struct sljit_jump *jump; |
|
|
CHECK_ERROR(); |
CHECK_ERROR(); |
Line 2347 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_ijump(struct s
|
Line 2547 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_ijump(struct s
|
if (type >= SLJIT_CALL1) { |
if (type >= SLJIT_CALL1) { |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) |
#if (defined SLJIT_X86_32_FASTCALL && SLJIT_X86_32_FASTCALL) |
if (src == SLJIT_TEMPORARY_REG3) { | if (src == SLJIT_SCRATCH_REG3) { |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, 0); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, 0); |
src = TMP_REGISTER; |
src = TMP_REGISTER; |
} |
} |
if (src == SLJIT_MEM1(SLJIT_LOCALS_REG) && type >= SLJIT_CALL3) |
if (src == SLJIT_MEM1(SLJIT_LOCALS_REG) && type >= SLJIT_CALL3) |
srcw += sizeof(sljit_w); | srcw += sizeof(sljit_sw); |
#else | |
if (src == SLJIT_MEM1(SLJIT_LOCALS_REG)) | |
srcw += sizeof(sljit_w) * (type - SLJIT_CALL0); | |
#endif |
#endif |
#endif |
#endif |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) && defined(_WIN64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) && defined(_WIN64) |
if (src == SLJIT_TEMPORARY_REG3) { | if (src == SLJIT_SCRATCH_REG3) { |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, 0); |
EMIT_MOV(compiler, TMP_REGISTER, 0, src, 0); |
src = TMP_REGISTER; |
src = TMP_REGISTER; |
} |
} |
Line 2380 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_ijump(struct s
|
Line 2577 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_ijump(struct s
|
compiler->size += 10 + 3; |
compiler->size += 10 + 3; |
#endif |
#endif |
|
|
code = (sljit_ub*)ensure_buf(compiler, 2); | inst = (sljit_ub*)ensure_buf(compiler, 2); |
FAIL_IF_NULL(code); | FAIL_IF_NULL(inst); |
|
|
*code++ = 0; | *inst++ = 0; |
*code++ = type + 4; | *inst++ = type + 4; |
} |
} |
else { |
else { |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
/* REX_W is not necessary (src is not immediate). */ |
/* REX_W is not necessary (src is not immediate). */ |
compiler->mode32 = 1; |
compiler->mode32 = 1; |
#endif |
#endif |
code = emit_x86_instruction(compiler, 1, 0, 0, src, srcw); | inst = emit_x86_instruction(compiler, 1, 0, 0, src, srcw); |
FAIL_IF(!code); | FAIL_IF(!inst); |
*code++ = 0xff; | *inst++ = GROUP_FF; |
*code |= (type >= SLJIT_FAST_CALL) ? (2 << 3) : (4 << 3); | *inst |= (type >= SLJIT_FAST_CALL) ? CALL_rm : JMP_rm; |
} |
} |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
} |
} |
|
|
SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_cond_value(struct sljit_compiler *compiler, int op, int dst, sljit_w dstw, int type) | SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_si op, |
| sljit_si dst, sljit_sw dstw, |
| sljit_si src, sljit_sw srcw, |
| sljit_si type) |
{ |
{ |
sljit_ub *buf; | sljit_ub *inst; |
sljit_ub cond_set = 0; |
sljit_ub cond_set = 0; |
int dst_save = dst; |
|
sljit_w dstw_save = dstw; |
|
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
int reg; | sljit_si reg; |
| #else |
| /* CHECK_EXTRA_REGS migh overwrite these values. */ |
| sljit_si dst_save = dst; |
| sljit_sw dstw_save = dstw; |
#endif |
#endif |
|
|
CHECK_ERROR(); |
CHECK_ERROR(); |
check_sljit_emit_cond_value(compiler, op, dst, dstw, type); | check_sljit_emit_op_flags(compiler, op, dst, dstw, src, srcw, type); |
|
|
if (dst == SLJIT_UNUSED) |
if (dst == SLJIT_UNUSED) |
return SLJIT_SUCCESS; |
return SLJIT_SUCCESS; |
Line 2420 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_cond_value(str
|
Line 2622 SLJIT_API_FUNC_ATTRIBUTE int sljit_emit_cond_value(str
|
if (SLJIT_UNLIKELY(compiler->flags_saved)) |
if (SLJIT_UNLIKELY(compiler->flags_saved)) |
FAIL_IF(emit_restore_flags(compiler, op & SLJIT_KEEP_FLAGS)); |
FAIL_IF(emit_restore_flags(compiler, op & SLJIT_KEEP_FLAGS)); |
|
|
switch (type) { | /* setcc = jcc + 0x10. */ |
case SLJIT_C_EQUAL: | cond_set = get_jump_code(type) + 0x10; |
case SLJIT_C_FLOAT_EQUAL: | |
cond_set = 0x94; | |
break; | |
|
|
case SLJIT_C_NOT_EQUAL: | #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
case SLJIT_C_FLOAT_NOT_EQUAL: | if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && dst <= TMP_REGISTER && dst == src) { |
cond_set = 0x95; | inst = (sljit_ub*)ensure_buf(compiler, 1 + 4 + 3); |
break; | FAIL_IF(!inst); |
| INC_SIZE(4 + 3); |
case SLJIT_C_LESS: | /* Set low register to conditional flag. */ |
case SLJIT_C_FLOAT_LESS: | *inst++ = (reg_map[TMP_REGISTER] <= 7) ? REX : REX_B; |
cond_set = 0x92; | *inst++ = GROUP_0F; |
break; | *inst++ = cond_set; |
| *inst++ = MOD_REG | reg_lmap[TMP_REGISTER]; |
case SLJIT_C_GREATER_EQUAL: | *inst++ = REX | (reg_map[TMP_REGISTER] <= 7 ? 0 : REX_R) | (reg_map[dst] <= 7 ? 0 : REX_B); |
case SLJIT_C_FLOAT_GREATER_EQUAL: | *inst++ = OR_rm8_r8; |
cond_set = 0x93; | *inst++ = MOD_REG | (reg_lmap[TMP_REGISTER] << 3) | reg_lmap[dst]; |
break; | return SLJIT_SUCCESS; |
| |
case SLJIT_C_GREATER: | |
case SLJIT_C_FLOAT_GREATER: | |
cond_set = 0x97; | |
break; | |
| |
case SLJIT_C_LESS_EQUAL: | |
case SLJIT_C_FLOAT_LESS_EQUAL: | |
cond_set = 0x96; | |
break; | |
| |
case SLJIT_C_SIG_LESS: | |
cond_set = 0x9c; | |
break; | |
| |
case SLJIT_C_SIG_GREATER_EQUAL: | |
cond_set = 0x9d; | |
break; | |
| |
case SLJIT_C_SIG_GREATER: | |
cond_set = 0x9f; | |
break; | |
| |
case SLJIT_C_SIG_LESS_EQUAL: | |
cond_set = 0x9e; | |
break; | |
| |
case SLJIT_C_OVERFLOW: | |
case SLJIT_C_MUL_OVERFLOW: | |
cond_set = 0x90; | |
break; | |
| |
case SLJIT_C_NOT_OVERFLOW: | |
case SLJIT_C_MUL_NOT_OVERFLOW: | |
cond_set = 0x91; | |
break; | |
| |
case SLJIT_C_FLOAT_NAN: | |
cond_set = 0x9a; | |
break; | |
| |
case SLJIT_C_FLOAT_NOT_NAN: | |
cond_set = 0x9b; | |
break; | |
} |
} |
|
|
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) | reg = (op == SLJIT_MOV && dst <= TMP_REGISTER) ? dst : TMP_REGISTER; |
reg = (op == SLJIT_MOV && dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) ? dst : TMP_REGISTER; | |
|
|
buf = (sljit_ub*)ensure_buf(compiler, 1 + 4 + 4); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 4 + 4); |
FAIL_IF(!buf); | FAIL_IF(!inst); |
INC_SIZE(4 + 4); |
INC_SIZE(4 + 4); |
/* Set low register to conditional flag. */ |
/* Set low register to conditional flag. */ |
*buf++ = (reg_map[reg] <= 7) ? 0x40 : REX_B; | *inst++ = (reg_map[reg] <= 7) ? REX : REX_B; |
*buf++ = 0x0f; | *inst++ = GROUP_0F; |
*buf++ = cond_set; | *inst++ = cond_set; |
*buf++ = 0xC0 | reg_lmap[reg]; | *inst++ = MOD_REG | reg_lmap[reg]; |
*buf++ = REX_W | (reg_map[reg] <= 7 ? 0 : (REX_B | REX_R)); | *inst++ = REX_W | (reg_map[reg] <= 7 ? 0 : (REX_B | REX_R)); |
*buf++ = 0x0f; | *inst++ = GROUP_0F; |
*buf++ = 0xb6; | *inst++ = MOVZX_r_rm8; |
*buf = 0xC0 | (reg_lmap[reg] << 3) | reg_lmap[reg]; | *inst = MOD_REG | (reg_lmap[reg] << 3) | reg_lmap[reg]; |
|
|
if (reg == TMP_REGISTER) { | if (reg != TMP_REGISTER) |
if (op == SLJIT_MOV) { | return SLJIT_SUCCESS; |
compiler->mode32 = 0; | |
EMIT_MOV(compiler, dst, dstw, TMP_REGISTER, 0); | if (GET_OPCODE(op) < SLJIT_ADD) { |
} | compiler->mode32 = GET_OPCODE(op) != SLJIT_MOV; |
else { | return emit_mov(compiler, dst, dstw, TMP_REGISTER, 0); |
| } |
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) || (defined SLJIT_DEBUG && SLJIT_DEBUG) |
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) || (defined SLJIT_DEBUG && SLJIT_DEBUG) |
compiler->skip_checks = 1; | compiler->skip_checks = 1; |
#endif |
#endif |
return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REGISTER, 0); | return sljit_emit_op2(compiler, op, dst, dstw, dst, dstw, TMP_REGISTER, 0); |
} | #else /* SLJIT_CONFIG_X86_64 */ |
} | if (GET_OPCODE(op) < SLJIT_ADD && dst <= TMP_REGISTER) { |
#else | if (reg_map[dst] <= 4) { |
if (op == SLJIT_MOV) { | /* Low byte is accessible. */ |
if (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_TEMPORARY_REG3) { | inst = (sljit_ub*)ensure_buf(compiler, 1 + 3 + 3); |
buf = (sljit_ub*)ensure_buf(compiler, 1 + 3 + 3); | FAIL_IF(!inst); |
FAIL_IF(!buf); | |
INC_SIZE(3 + 3); |
INC_SIZE(3 + 3); |
/* Set low byte to conditional flag. */ |
/* Set low byte to conditional flag. */ |
*buf++ = 0x0f; | *inst++ = GROUP_0F; |
*buf++ = cond_set; | *inst++ = cond_set; |
*buf++ = 0xC0 | reg_map[dst]; | *inst++ = MOD_REG | reg_map[dst]; |
|
|
*buf++ = 0x0f; | *inst++ = GROUP_0F; |
*buf++ = 0xb6; | *inst++ = MOVZX_r_rm8; |
*buf = 0xC0 | (reg_map[dst] << 3) | reg_map[dst]; | *inst = MOD_REG | (reg_map[dst] << 3) | reg_map[dst]; |
| return SLJIT_SUCCESS; |
} |
} |
else { |
|
EMIT_MOV(compiler, TMP_REGISTER, 0, SLJIT_TEMPORARY_REG1, 0); |
|
|
|
buf = (sljit_ub*)ensure_buf(compiler, 1 + 3 + 3); | /* Low byte is not accessible. */ |
FAIL_IF(!buf); | if (cpu_has_cmov == -1) |
INC_SIZE(3 + 3); | get_cpu_features(); |
/* Set al to conditional flag. */ | |
*buf++ = 0x0f; | |
*buf++ = cond_set; | |
*buf++ = 0xC0; | |
|
|
*buf++ = 0x0f; | if (cpu_has_cmov) { |
*buf++ = 0xb6; | EMIT_MOV(compiler, TMP_REGISTER, 0, SLJIT_IMM, 1); |
if (dst >= SLJIT_SAVED_REG1 && dst <= SLJIT_NO_REGISTERS) | /* a xor reg, reg operation would overwrite the flags. */ |
*buf = 0xC0 | (reg_map[dst] << 3); | EMIT_MOV(compiler, dst, 0, SLJIT_IMM, 0); |
else { | |
*buf = 0xC0; | |
EMIT_MOV(compiler, dst, dstw, SLJIT_TEMPORARY_REG1, 0); | |
} | |
|
|
EMIT_MOV(compiler, SLJIT_TEMPORARY_REG1, 0, TMP_REGISTER, 0); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 3); |
| FAIL_IF(!inst); |
| INC_SIZE(3); |
| |
| *inst++ = GROUP_0F; |
| /* cmovcc = setcc - 0x50. */ |
| *inst++ = cond_set - 0x50; |
| *inst++ = MOD_REG | (reg_map[dst] << 3) | reg_map[TMP_REGISTER]; |
| return SLJIT_SUCCESS; |
} |
} |
|
|
|
inst = (sljit_ub*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1); |
|
FAIL_IF(!inst); |
|
INC_SIZE(1 + 3 + 3 + 1); |
|
*inst++ = XCHG_EAX_r + reg_map[TMP_REGISTER]; |
|
/* Set al to conditional flag. */ |
|
*inst++ = GROUP_0F; |
|
*inst++ = cond_set; |
|
*inst++ = MOD_REG | 0 /* eax */; |
|
|
|
*inst++ = GROUP_0F; |
|
*inst++ = MOVZX_r_rm8; |
|
*inst++ = MOD_REG | (reg_map[dst] << 3) | 0 /* eax */; |
|
*inst++ = XCHG_EAX_r + reg_map[TMP_REGISTER]; |
|
return SLJIT_SUCCESS; |
} |
} |
else { |
|
if (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_TEMPORARY_REG3) { |
|
EMIT_MOV(compiler, TMP_REGISTER, 0, dst, 0); |
|
buf = (sljit_ub*)ensure_buf(compiler, 1 + 3); |
|
FAIL_IF(!buf); |
|
INC_SIZE(3); |
|
|
|
*buf++ = 0x0f; | if (GET_OPCODE(op) == SLJIT_OR && !GET_ALL_FLAGS(op) && dst <= TMP_REGISTER && dst == src && reg_map[dst] <= 4) { |
*buf++ = cond_set; | SLJIT_COMPILE_ASSERT(reg_map[SLJIT_SCRATCH_REG1] == 0, scratch_reg1_must_be_eax); |
*buf++ = 0xC0 | reg_map[dst]; | if (dst != SLJIT_SCRATCH_REG1) { |
| inst = (sljit_ub*)ensure_buf(compiler, 1 + 1 + 3 + 2 + 1); |
| FAIL_IF(!inst); |
| INC_SIZE(1 + 3 + 2 + 1); |
| /* Set low register to conditional flag. */ |
| *inst++ = XCHG_EAX_r + reg_map[TMP_REGISTER]; |
| *inst++ = GROUP_0F; |
| *inst++ = cond_set; |
| *inst++ = MOD_REG | 0 /* eax */; |
| *inst++ = OR_rm8_r8; |
| *inst++ = MOD_REG | (0 /* eax */ << 3) | reg_map[dst]; |
| *inst++ = XCHG_EAX_r + reg_map[TMP_REGISTER]; |
} |
} |
else { |
else { |
EMIT_MOV(compiler, TMP_REGISTER, 0, SLJIT_TEMPORARY_REG1, 0); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 2 + 3 + 2 + 2); |
| FAIL_IF(!inst); |
| INC_SIZE(2 + 3 + 2 + 2); |
| /* Set low register to conditional flag. */ |
| *inst++ = XCHG_r_rm; |
| *inst++ = MOD_REG | (1 /* ecx */ << 3) | reg_map[TMP_REGISTER]; |
| *inst++ = GROUP_0F; |
| *inst++ = cond_set; |
| *inst++ = MOD_REG | 1 /* ecx */; |
| *inst++ = OR_rm8_r8; |
| *inst++ = MOD_REG | (1 /* ecx */ << 3) | 0 /* eax */; |
| *inst++ = XCHG_r_rm; |
| *inst++ = MOD_REG | (1 /* ecx */ << 3) | reg_map[TMP_REGISTER]; |
| } |
| return SLJIT_SUCCESS; |
| } |
|
|
buf = (sljit_ub*)ensure_buf(compiler, 1 + 3 + 3 + 1); | /* Set TMP_REGISTER to the bit. */ |
FAIL_IF(!buf); | inst = (sljit_ub*)ensure_buf(compiler, 1 + 1 + 3 + 3 + 1); |
INC_SIZE(3 + 3 + 1); | FAIL_IF(!inst); |
/* Set al to conditional flag. */ | INC_SIZE(1 + 3 + 3 + 1); |
*buf++ = 0x0f; | *inst++ = XCHG_EAX_r + reg_map[TMP_REGISTER]; |
*buf++ = cond_set; | /* Set al to conditional flag. */ |
*buf++ = 0xC0; | *inst++ = GROUP_0F; |
| *inst++ = cond_set; |
| *inst++ = MOD_REG | 0 /* eax */; |
|
|
*buf++ = 0x0f; | *inst++ = GROUP_0F; |
*buf++ = 0xb6; | *inst++ = MOVZX_r_rm8; |
*buf++ = 0xC0; | *inst++ = MOD_REG | (0 << 3) /* eax */ | 0 /* eax */; |
|
|
*buf++ = 0x90 + reg_map[TMP_REGISTER]; | *inst++ = XCHG_EAX_r + reg_map[TMP_REGISTER]; |
} | |
| if (GET_OPCODE(op) < SLJIT_ADD) |
| return emit_mov(compiler, dst, dstw, TMP_REGISTER, 0); |
| |
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) || (defined SLJIT_DEBUG && SLJIT_DEBUG) |
#if (defined SLJIT_VERBOSE && SLJIT_VERBOSE) || (defined SLJIT_DEBUG && SLJIT_DEBUG) |
compiler->skip_checks = 1; | compiler->skip_checks = 1; |
#endif |
#endif |
return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REGISTER, 0); | return sljit_emit_op2(compiler, op, dst_save, dstw_save, dst_save, dstw_save, TMP_REGISTER, 0); |
} | #endif /* SLJIT_CONFIG_X86_64 */ |
#endif | |
| |
return SLJIT_SUCCESS; | |
} |
} |
|
|
SLJIT_API_FUNC_ATTRIBUTE int sljit_get_local_base(struct sljit_compiler *compiler, int dst, sljit_w dstw, sljit_w offset) | SLJIT_API_FUNC_ATTRIBUTE sljit_si sljit_get_local_base(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw offset) |
{ |
{ |
CHECK_ERROR(); |
CHECK_ERROR(); |
check_sljit_get_local_base(compiler, dst, dstw, offset); |
check_sljit_get_local_base(compiler, dst, dstw, offset); |
Line 2608 SLJIT_API_FUNC_ATTRIBUTE int sljit_get_local_base(stru
|
Line 2798 SLJIT_API_FUNC_ATTRIBUTE int sljit_get_local_base(stru
|
if (NOT_HALFWORD(offset)) { |
if (NOT_HALFWORD(offset)) { |
FAIL_IF(emit_load_imm64(compiler, TMP_REGISTER, offset)); |
FAIL_IF(emit_load_imm64(compiler, TMP_REGISTER, offset)); |
#if (defined SLJIT_DEBUG && SLJIT_DEBUG) |
#if (defined SLJIT_DEBUG && SLJIT_DEBUG) |
SLJIT_ASSERT(emit_lea_binary(compiler, dst, dstw, SLJIT_LOCALS_REG, 0, TMP_REGISTER, 0) != SLJIT_ERR_UNSUPPORTED); | SLJIT_ASSERT(emit_lea_binary(compiler, SLJIT_KEEP_FLAGS, dst, dstw, SLJIT_LOCALS_REG, 0, TMP_REGISTER, 0) != SLJIT_ERR_UNSUPPORTED); |
return compiler->error; |
return compiler->error; |
#else |
#else |
return emit_lea_binary(compiler, dst, dstw, SLJIT_LOCALS_REG, 0, TMP_REGISTER, 0); | return emit_lea_binary(compiler, SLJIT_KEEP_FLAGS, dst, dstw, SLJIT_LOCALS_REG, 0, TMP_REGISTER, 0); |
#endif |
#endif |
} |
} |
#endif |
#endif |
|
|
if (offset != 0) |
if (offset != 0) |
return emit_lea_binary(compiler, dst, dstw, SLJIT_LOCALS_REG, 0, SLJIT_IMM, offset); | return emit_lea_binary(compiler, SLJIT_KEEP_FLAGS, dst, dstw, SLJIT_LOCALS_REG, 0, SLJIT_IMM, offset); |
return emit_mov(compiler, dst, dstw, SLJIT_LOCALS_REG, 0); |
return emit_mov(compiler, dst, dstw, SLJIT_LOCALS_REG, 0); |
} |
} |
|
|
SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, int dst, sljit_w dstw, sljit_w init_value) | SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emit_const(struct sljit_compiler *compiler, sljit_si dst, sljit_sw dstw, sljit_sw init_value) |
{ |
{ |
sljit_ub *buf; | sljit_ub *inst; |
struct sljit_const *const_; |
struct sljit_const *const_; |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
int reg; | sljit_si reg; |
#endif |
#endif |
|
|
CHECK_ERROR_PTR(); |
CHECK_ERROR_PTR(); |
Line 2641 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emi
|
Line 2831 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emi
|
|
|
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
compiler->mode32 = 0; |
compiler->mode32 = 0; |
reg = (dst >= SLJIT_TEMPORARY_REG1 && dst <= SLJIT_NO_REGISTERS) ? dst : TMP_REGISTER; | reg = (dst <= TMP_REGISTER) ? dst : TMP_REGISTER; |
|
|
if (emit_load_imm64(compiler, reg, init_value)) |
if (emit_load_imm64(compiler, reg, init_value)) |
return NULL; |
return NULL; |
Line 2653 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emi
|
Line 2843 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emi
|
return NULL; |
return NULL; |
#endif |
#endif |
|
|
buf = (sljit_ub*)ensure_buf(compiler, 2); | inst = (sljit_ub*)ensure_buf(compiler, 2); |
PTR_FAIL_IF(!buf); | PTR_FAIL_IF(!inst); |
|
|
*buf++ = 0; | *inst++ = 0; |
*buf++ = 1; | *inst++ = 1; |
|
|
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
#if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) |
if (reg == TMP_REGISTER && dst != SLJIT_UNUSED) |
if (reg == TMP_REGISTER && dst != SLJIT_UNUSED) |
Line 2671 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emi
|
Line 2861 SLJIT_API_FUNC_ATTRIBUTE struct sljit_const* sljit_emi
|
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_addr) |
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_addr) |
{ |
{ |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
#if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) |
*(sljit_w*)addr = new_addr - (addr + 4); | *(sljit_sw*)addr = new_addr - (addr + 4); |
#else |
#else |
*(sljit_uw*)addr = new_addr; |
*(sljit_uw*)addr = new_addr; |
#endif |
#endif |
} |
} |
|
|
SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_w new_constant) | SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant) |
{ |
{ |
*(sljit_w*)addr = new_constant; | *(sljit_sw*)addr = new_constant; |
} |
} |