A RetroSearch Logo

Home - News ( United States | United Kingdom | Italy | Germany ) - Football scores

Search Query:

Showing content from http://www.ncbi.nlm.nih.gov/IEB/ToolBox/CPP_DOC/doxyhtml/sljitNativeX86__common_8c_source.html below:

NCBI C++ ToolKit: src/util/regexp/sljit/sljitNativeX86_common.c Source File

29  return "x86"

SLJIT_CPUINFO;

64 #define TMP_REG1 (SLJIT_NUMBER_OF_REGISTERS + 2) 65 #define TMP_FREG (SLJIT_NUMBER_OF_FLOAT_REGISTERS + 1) 67 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 70

0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 5, 7, 6, 4, 3

74

0, 1, 2, 3, 4, 5, 6, 7, 0

77 #define CHECK_EXTRA_REGS(p, w, do) \ 78  if (p >= SLJIT_R3 && p <= SLJIT_S3) { \ 79  w = (2 * SSIZE_OF(sw)) + ((p) - SLJIT_R3) * SSIZE_OF(sw); \ 80  p = SLJIT_MEM1(SLJIT_SP); \ 86 #define TMP_REG2 (SLJIT_NUMBER_OF_REGISTERS + 3) 94

0, 0, 6, 7, 1, 8, 11, 10, 12, 5, 13, 14, 15, 3, 4, 2, 9

98

0, 0, 6, 7, 1, 0, 3, 2, 4, 5, 5, 6, 7, 3, 4, 2, 1

103

0, 0, 2, 8, 1, 11, 12, 5, 13, 14, 15, 7, 6, 3, 4, 9, 10

107

0, 0, 2, 0, 1, 3, 4, 5, 5, 6, 7, 7, 6, 3, 4, 1, 2

113

0, 0, 1, 2, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 4

117

0, 0, 1, 2, 3, 5, 6, 7, 0, 1, 2, 3, 4, 5, 6, 7, 4

127 #define HALFWORD_MAX 0x7fffffffl 128 #define HALFWORD_MIN -0x80000000l 130 #define HALFWORD_MAX 0x7fffffffll 131 #define HALFWORD_MIN -0x80000000ll 134 #define IS_HALFWORD(x) ((x) <= HALFWORD_MAX && (x) >= HALFWORD_MIN) 135 #define NOT_HALFWORD(x) ((x) > HALFWORD_MAX || (x) < HALFWORD_MIN) 137 #define CHECK_EXTRA_REGS(p, w, do) 141 #define U8(v) ((sljit_u8)(v)) 144 #define EX86_BIN_INS ((sljit_uw)0x000010) 145 #define EX86_SHIFT_INS ((sljit_uw)0x000020) 146 #define EX86_BYTE_ARG ((sljit_uw)0x000040) 147 #define EX86_HALF_ARG ((sljit_uw)0x000080) 149 #define EX86_REX ((sljit_uw)0x000100) 150 #define EX86_NO_REXW ((sljit_uw)0x000200) 151 #define EX86_PREF_66 ((sljit_uw)0x000400) 152 #define EX86_PREF_F2 ((sljit_uw)0x000800) 153 #define EX86_PREF_F3 ((sljit_uw)0x001000) 154 #define EX86_SSE2_OP1 ((sljit_uw)0x002000) 155 #define EX86_SSE2_OP2 ((sljit_uw)0x004000) 156 #define EX86_SSE2 (EX86_SSE2_OP1 | EX86_SSE2_OP2) 157 #define EX86_VEX_EXT ((sljit_uw)0x008000) 159 #define VEX_OP_0F38 ((sljit_uw)0x010000) 160 #define VEX_OP_0F3A ((sljit_uw)0x020000) 161 #define VEX_SSE2_OPV ((sljit_uw)0x040000) 162 #define VEX_AUTO_W ((sljit_uw)0x080000) 163 #define VEX_W ((sljit_uw)0x100000) 164 #define VEX_256 ((sljit_uw)0x200000) 166 #define EX86_SELECT_66(op) (((op) & SLJIT_32) ? 0 : EX86_PREF_66) 167 #define EX86_SELECT_F2_F3(op) (((op) & SLJIT_32) ? EX86_PREF_F3 : EX86_PREF_F2) 173 #define ADD ( 0 << 3) 174 #define ADD_EAX_i32 0x05 175 #define ADD_r_rm 0x03 176 #define ADD_rm_r 0x01 177 #define ADDSD_x_xm 0x58 178 #define ADC ( 2 << 3) 179 #define ADC_EAX_i32 0x15 180 #define ADC_r_rm 0x13 181 #define ADC_rm_r 0x11 182 #define AND ( 4 << 3) 183 #define AND_EAX_i32 0x25 184 #define AND_r_rm 0x23 185 #define AND_rm_r 0x21 186 #define ANDPD_x_xm 0x54 187 #define BSR_r_rm ( 0xbd) 188 #define BSF_r_rm ( 0xbc) 189 #define BSWAP_r ( 0xc8) 190 #define CALL_i32 0xe8 191 #define CALL_rm ( 2 << 3) 193 #define CMOVE_r_rm ( 0x44) 194 #define CMP ( 7 << 3) 195 #define CMP_EAX_i32 0x3d 196 #define CMP_r_rm 0x3b 197 #define CMP_rm_r 0x39 198 #define CMPS_x_xm 0xc2 199 #define CMPXCHG_rm_r 0xb1 200 #define CMPXCHG_rm8_r 0xb0 201 #define CVTPD2PS_x_xm 0x5a 202 #define CVTPS2PD_x_xm 0x5a 203 #define CVTSI2SD_x_rm 0x2a 204 #define CVTTSD2SI_r_xm 0x2c 205 #define DIV ( 6 << 3) 206 #define DIVSD_x_xm 0x5e 207 #define EXTRACTPS_x_xm 0x17 212 #define INSERTPS_x_xm 0x21 214 #define IDIV ( 7 << 3) 215 #define IMUL ( 5 << 3) 216 #define IMUL_r_rm ( 0xaf) 217 #define IMUL_r_rm_i8 0x6b 218 #define IMUL_r_rm_i32 0x69 225 #define JMP_rm ( 4 << 3) 228 #define LZCNT_r_rm ( 0xbd) 229 #define MOV_r_rm 0x8b 230 #define MOV_r_i32 0xb8 231 #define MOV_rm_r 0x89 232 #define MOV_rm_i32 0xc7 233 #define MOV_rm8_i8 0xc6 234 #define MOV_rm8_r8 0x88 235 #define MOVAPS_x_xm 0x28 236 #define MOVAPS_xm_x 0x29 237 #define MOVD_x_rm 0x6e 238 #define MOVD_rm_x 0x7e 239 #define MOVDDUP_x_xm 0x12 240 #define MOVDQA_x_xm 0x6f 241 #define MOVDQA_xm_x 0x7f 242 #define MOVHLPS_x_x 0x12 243 #define MOVHPD_m_x 0x17 244 #define MOVHPD_x_m 0x16 245 #define MOVLHPS_x_x 0x16 246 #define MOVLPD_m_x 0x13 247 #define MOVLPD_x_m 0x12 248 #define MOVMSKPS_r_x ( 0x50) 249 #define MOVQ_x_xm ( 0x7e) 250 #define MOVSD_x_xm 0x10 251 #define MOVSD_xm_x 0x11 252 #define MOVSHDUP_x_xm 0x16 253 #define MOVSXD_r_rm 0x63 254 #define MOVSX_r_rm8 ( 0xbe) 255 #define MOVSX_r_rm16 ( 0xbf) 256 #define MOVUPS_x_xm 0x10 257 #define MOVZX_r_rm8 ( 0xb6) 258 #define MOVZX_r_rm16 ( 0xb7) 259 #define MUL ( 4 << 3) 260 #define MULSD_x_xm 0x59 261 #define NEG_rm ( 3 << 3) 263 #define NOT_rm ( 2 << 3) 266 #define OR_EAX_i32 0x0d 268 #define OR_rm8_r8 0x08 269 #define ORPD_x_xm 0x56 270 #define PACKSSWB_x_xm ( 0x63) 271 #define PAND_x_xm 0xdb 272 #define PCMPEQD_x_xm 0x76 273 #define PINSRB_x_rm_i8 0x20 274 #define PINSRW_x_rm_i8 0xc4 275 #define PINSRD_x_rm_i8 0x22 276 #define PEXTRB_rm_x_i8 0x14 277 #define PEXTRW_rm_x_i8 0x15 278 #define PEXTRD_rm_x_i8 0x16 279 #define PMOVMSKB_r_x ( 0xd7) 280 #define PMOVSXBD_x_xm 0x21 281 #define PMOVSXBQ_x_xm 0x22 282 #define PMOVSXBW_x_xm 0x20 283 #define PMOVSXDQ_x_xm 0x25 284 #define PMOVSXWD_x_xm 0x23 285 #define PMOVSXWQ_x_xm 0x24 286 #define PMOVZXBD_x_xm 0x31 287 #define PMOVZXBQ_x_xm 0x32 288 #define PMOVZXBW_x_xm 0x30 289 #define PMOVZXDQ_x_xm 0x35 290 #define PMOVZXWD_x_xm 0x33 291 #define PMOVZXWQ_x_xm 0x34 295 #define POR_x_xm 0xeb 296 #define PREFETCH 0x18 297 #define PSHUFB_x_xm 0x00 298 #define PSHUFD_x_xm 0x70 299 #define PSHUFLW_x_xm 0x70 300 #define PSRLDQ_x 0x73 301 #define PSLLD_x_i8 0x72 302 #define PSLLQ_x_i8 0x73 303 #define PUSH_i32 0x68 305 #define PUSH_rm ( 6 << 3) 307 #define PXOR_x_xm 0xef 308 #define ROL ( 0 << 3) 309 #define ROR ( 1 << 3) 310 #define RET_near 0xc3 312 #define SBB ( 3 << 3) 313 #define SBB_EAX_i32 0x1d 314 #define SBB_r_rm 0x1b 315 #define SBB_rm_r 0x19 316 #define SAR ( 7 << 3) 317 #define SHL ( 4 << 3) 320 #define SHR ( 5 << 3) 321 #define SHUFPS_x_xm 0xc6 322 #define SUB ( 5 << 3) 323 #define SUB_EAX_i32 0x2d 324 #define SUB_r_rm 0x2b 325 #define SUB_rm_r 0x29 326 #define SUBSD_x_xm 0x5c 327 #define TEST_EAX_i32 0xa9 328 #define TEST_rm_r 0x85 329 #define TZCNT_r_rm ( 0xbc) 330 #define UCOMISD_x_xm 0x2e 331 #define UNPCKLPD_x_xm 0x14 332 #define UNPCKLPS_x_xm 0x14 333 #define VBROADCASTSD_x_xm 0x19 334 #define VBROADCASTSS_x_xm 0x18 335 #define VEXTRACTF128_x_ym 0x19 336 #define VEXTRACTI128_x_ym 0x39 337 #define VINSERTF128_y_y_xm 0x18 338 #define VINSERTI128_y_y_xm 0x38 339 #define VPBROADCASTB_x_xm 0x78 340 #define VPBROADCASTD_x_xm 0x58 341 #define VPBROADCASTQ_x_xm 0x59 342 #define VPBROADCASTW_x_xm 0x79 343 #define VPERMPD_y_ym 0x01 344 #define VPERMQ_y_ym 0x00 345 #define XCHG_EAX_r 0x90 346 #define XCHG_r_rm 0x87 347 #define XOR ( 6 << 3) 348 #define XOR_EAX_i32 0x35 349 #define XOR_r_rm 0x33 350 #define XOR_rm_r 0x31 351 #define XORPD_x_xm 0x57 353 #define GROUP_0F 0x0f 354 #define GROUP_66 0x66 355 #define GROUP_F3 0xf3 356 #define GROUP_F7 0xf7 357 #define GROUP_FF 0xff 358 #define GROUP_BINARY_81 0x81 359 #define GROUP_BINARY_83 0x83 360 #define GROUP_SHIFT_1 0xd1 361 #define GROUP_SHIFT_N 0xc1 362 #define GROUP_SHIFT_CL 0xd3 363 #define GROUP_LOCK 0xf0 366 #define MOD_DISP8 0x40 368 #define INC_SIZE(s) (*inst++ = U8(s), compiler->size += (s)) 370 #define PUSH_REG(r) (*inst++ = U8(PUSH_r + (r))) 371 #define POP_REG(r) (*inst++ = U8(POP_r + (r))) 372 #define RET() (*inst++ = RET_near) 373 #define RET_I16(n) (*inst++ = RET_i16, *inst++ = U8(n), *inst++ = 0) 375 #define SLJIT_INST_LABEL 255 376 #define SLJIT_INST_JUMP 254 377 #define SLJIT_INST_MOV_ADDR 253 378 #define SLJIT_INST_CONST 252 383 #define CPU_FEATURE_DETECTED 0x001 384 #if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2) 385 #define CPU_FEATURE_SSE2 0x002 387 #define CPU_FEATURE_SSE41 0x004 388 #define CPU_FEATURE_LZCNT 0x008 389 #define CPU_FEATURE_TZCNT 0x010 390 #define CPU_FEATURE_CMOV 0x020 391 #define CPU_FEATURE_AVX 0x040 392 #define CPU_FEATURE_AVX2 0x080 393 #define CPU_FEATURE_OSXSAVE 0x100 398 #include <cmnintrin.h> 399 #elif defined(_MSC_VER) && _MSC_VER >= 1400 428 #if defined(_MSC_VER) && _MSC_VER >= 1400 430

__cpuidex((

int

*)

info

, (

int

)

info

[0], (

int

)

info

[2]);

432 #elif defined(__GNUC__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_C) || defined(__TINYC__) 436

#

if

(defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)

438  "movl (%%esi), %%eax\n" 439  "movl 8(%%esi), %%ecx\n" 442  "movl %%eax, (%%esi)\n" 443  "movl %%ebx, 4(%%esi)\n" 445  "movl %%ecx, 8(%%esi)\n" 446  "movl %%edx, 12(%%esi)\n" 449  "movl (%%rsi), %%eax\n" 450  "movl 8(%%rsi), %%ecx\n" 452  "movl %%eax, (%%rsi)\n" 453  "movl %%ebx, 4(%%rsi)\n" 454  "movl %%ecx, 8(%%rsi)\n" 455  "movl %%edx, 12(%%rsi)\n" 459

#

if

(defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)

460

:

"memory"

,

"eax"

,

"ecx"

,

"edx"

,

"esi" 462

:

"memory"

,

"rax"

,

"rbx"

,

"rcx"

,

"rdx"

,

"rsi" 470 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 498 #if defined(_MSC_VER) && _MSC_VER >= 1400 502 #elif defined(__GNUC__) || defined(__INTEL_COMPILER) || defined(__SUNPRO_C) || defined(__TINYC__) 506  "xorl %%ecx, %%ecx\n" 510

#

if

(defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)

546  if

(

info

[1] & 0x20)

554  if

(

info

[2] & 0x80000)

556  if

(

info

[2] & 0x8000000)

558  if

(

info

[2] & 0x10000000)

560 #if (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2) 561  if

(

info

[3] & 0x4000000)

562

feature_list |= CPU_FEATURE_SSE2;

564  if

(

info

[3] & 0x8000)

568  info

[0] = 0x80000001;

571  if

(

info

[2] & 0x20)

650 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 663  if

(jump->

flags

& JUMP_ADDR)

668 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 683

}

else if

(short_jump) {

693

jump->

flags

|= PATCH_MB;

696

jump->

flags

|= PATCH_MW;

711 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 714  if

(

flags

& PATCH_MD) {

720  if

(

flags

& PATCH_MW) {

731 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 741  if

(

flags

& PATCH_MB) {

746

}

else if

(

flags

& PATCH_MW) {

748 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 767 #if (defined SLJIT_DEBUG && SLJIT_DEBUG) 772

jump = compiler->

jumps

;

774

next_label_size = SLJIT_GET_NEXT_SIZE(

label

);

775

next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);

778

next_min_addr = next_label_size;

779  if

(next_jump_addr < next_min_addr)

780

next_min_addr = next_jump_addr;

782  if

(next_min_addr == SLJIT_MAX_ADDRESS)

785  if

(next_min_addr == next_label_size) {

786  label

->size -= size_reduce;

789

next_label_size = SLJIT_GET_NEXT_SIZE(

label

);

792  if

(next_min_addr != next_jump_addr)

795  if

(!(jump->

flags

& JUMP_MOV_ADDR)) {

796 #if (defined SLJIT_DEBUG && SLJIT_DEBUG) 797

size_reduce_max = size_reduce + (((jump->

flags

>> TYPE_SHIFT) <

SLJIT_JUMP

) ? CJUMP_MAX_SIZE : JUMP_MAX_SIZE);

801  if

(jump->

flags

& JUMP_ADDR) {

802 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 803  if

(jump->

u

.

target

<= 0xffffffffl)

811 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 813  if

(diff <= 0x7f + 2 && diff >= -0x80 + 2)

814

size_reduce += JUMP_MAX_SIZE - 2;

815  else if

(diff <= HALFWORD_MAX + 5 && diff >=

HALFWORD_MIN

+ 5)

816

size_reduce += JUMP_MAX_SIZE - 5;

818  if

(diff <= 0x7f + 2 && diff >= -0x80 + 2)

819

size_reduce += CJUMP_MAX_SIZE - 2;

820  else if

(diff <= HALFWORD_MAX + 6 && diff >=

HALFWORD_MIN

+ 6)

821

size_reduce += CJUMP_MAX_SIZE - 6;

823  if

(diff <= HALFWORD_MAX + 5 && diff >=

HALFWORD_MIN

+ 5)

824

size_reduce += JUMP_MAX_SIZE - 5;

828  if

(diff <= 0x7f + 2 && diff >= -0x80 + 2)

829

size_reduce += JUMP_MAX_SIZE - 2;

831  if

(diff <= 0x7f + 2 && diff >= -0x80 + 2)

832

size_reduce += CJUMP_MAX_SIZE - 2;

838 #if (defined SLJIT_DEBUG && SLJIT_DEBUG) 839

jump->

flags

|= (size_reduce_max - size_reduce) << JUMP_SIZE_SHIFT;

841 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 843 #if (defined SLJIT_DEBUG && SLJIT_DEBUG) 844

size_reduce_max = size_reduce + 10;

847  if

(!(jump->

flags

& JUMP_ADDR)) {

852

}

else if

(jump->

u

.

target

<= 0xffffffffl)

853

size_reduce += (jump->

flags

& MOV_ADDR_HI) ? 4 : 5;

855 #if (defined SLJIT_DEBUG && SLJIT_DEBUG) 856

jump->

flags

|= (size_reduce_max - size_reduce) << JUMP_SIZE_SHIFT;

862

next_jump_addr = SLJIT_GET_NEXT_ADDRESS(jump);

865

compiler->

size

-= size_reduce;

877 #if (defined SLJIT_DEBUG && SLJIT_DEBUG) 886

CHECK_PTR(check_sljit_generate_code(compiler));

891  code

= (

sljit_u8

*)allocate_executable_memory(compiler->

size

, options, exec_allocator_data, &executable_offset);

894

reverse_buf(compiler);

899

jump = compiler->

jumps

;

900

const_ = compiler->

consts

;

903

buf_ptr =

buf

->memory;

904

buf_end = buf_ptr +

buf

->used_size;

916  label

->u.addr = (

sljit_uw

)SLJIT_ADD_EXEC_OFFSET(code_ptr, executable_offset);

921 #if (defined SLJIT_DEBUG && SLJIT_DEBUG) 927 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 938 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 947

const_ = const_->

next

;

951

}

while

(buf_ptr < buf_end);

962

jump = compiler->

jumps

;

975  return

(

void

*)

code

;

980  switch

(feature_type) {

982 #ifdef SLJIT_IS_FPU_AVAILABLE 984 #elif (defined SLJIT_DETECT_SSE2 && SLJIT_DETECT_SSE2) 992 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1022 #if !(defined SLJIT_IS_FPU_AVAILABLE) || SLJIT_IS_FPU_AVAILABLE 1027  case

SLJIT_HAS_AVX2:

1056 #define BINARY_OPCODE(opcode) (((opcode ## _EAX_i32) << 24) | ((opcode ## _r_rm) << 16) | ((opcode ## _rm_r) << 8) | (opcode)) 1058 #define BINARY_IMM32(op_imm, immw, arg, argw) \ 1060  inst = emit_x86_instruction(compiler, 1 | EX86_BIN_INS, SLJIT_IMM, immw, arg, argw); \ 1062  *(inst + 1) |= (op_imm); \ 1065 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1067 #define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \ 1069  if (IS_HALFWORD(immw) || compiler->mode32) { \ 1070  BINARY_IMM32(op_imm, immw, arg, argw); \ 1073  FAIL_IF(emit_load_imm64(compiler, FAST_IS_REG(arg) ? TMP_REG2 : TMP_REG1, immw)); \ 1074  inst = emit_x86_instruction(compiler, 1, FAST_IS_REG(arg) ? TMP_REG2 : TMP_REG1, 0, arg, argw); \ 1080 #define BINARY_EAX_IMM(op_eax_imm, immw) \ 1081  FAIL_IF(emit_do_imm32(compiler, (!compiler->mode32) ? REX_W : 0, (op_eax_imm), immw)) 1085 #define BINARY_IMM(op_imm, op_mr, immw, arg, argw) \ 1086  BINARY_IMM32(op_imm, immw, arg, argw) 1088 #define BINARY_EAX_IMM(op_eax_imm, immw) \ 1089  FAIL_IF(emit_do_imm(compiler, (op_eax_imm), immw)) 1106 #define EMIT_MOV(compiler, dst, dstw, src, srcw) \ 1107  FAIL_IF(emit_mov(compiler, dst, dstw, src, srcw)); 1133 #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) 1136

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 4);

1142 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1153 #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__) 1160 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1170 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1175 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1188 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1198 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1203

inst[2] = (0x3 << 6) | (0x5 << 3) | (

reg_map

[reg] & 0x7);

1211 #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__) 1212  return

_get_ssp() != 0;

1221 #if (defined SLJIT_CONFIG_X86_CET && SLJIT_CONFIG_X86_CET) && defined (__SHSTK__) 1222  sljit_u8

*inst, *jz_after_cmp_inst;

1238

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 2);

1242

size_jz_after_cmp_inst = compiler->

size

;

1243

jz_after_cmp_inst = inst;

1245 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1247

compiler->mode32 = 1;

1256

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 2);

1260

inst[1] = size_before_rdssp_inst - compiler->

size

;

1262

*jz_after_cmp_inst = compiler->

size

- size_jz_after_cmp_inst;

1271 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1283  if

(FAST_IS_REG(src)) {

1291  if

(FAST_IS_REG(dst)) {

1292 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1295  if

(!compiler->mode32) {

1303 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1319  if

(FAST_IS_REG(dst)) {

1346

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 2);

1352  EMIT_MOV

(compiler, dst_reg, 0, src, srcw);

1361 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1366  CHECK

(check_sljit_emit_op0(compiler, op));

1368  switch

(GET_OPCODE(op)) {

1379 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1395

op = GET_OPCODE(op);

1397 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64) 1408 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) || defined(_WIN64) 1412 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1415  if

(!compiler->mode32) {

1416

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 2);

1426 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1427

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 2);

1436  size

= (!compiler->mode32) ? 3 : 2;

1442  if

(!compiler->mode32)

1449  if

(!compiler->mode32)

1471 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) && !defined(_WIN64) 1495 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1496

compiler->mode32 = 0;

1500  if

(FAST_IS_REG(dst)) {

1501 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1516

dst_r = FAST_IS_REG(dst) ? dst :

TMP_REG1

;

1518  if

((dst &

SLJIT_MEM

) && FAST_IS_REG(src)) {

1519 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1529 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1530  if

(FAST_IS_REG(src) &&

reg_map

[src] >= 4) {

1534  if

(src == dst && !sign) {

1537

*(inst + 1) |=

AND

;

1565 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1566

compiler->mode32 = 1;

1575

inst[2] |= (1 << 3);

1577

inst[2] |= (2 << 3);

1579

inst[2] |= (3 << 3);

1591 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1592

compiler->mode32 = 0;

1596  if

(FAST_IS_REG(dst)) {

1597 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1612

dst_r = FAST_IS_REG(dst) ? dst :

TMP_REG1

;

1614  if

((dst &

SLJIT_MEM

) && FAST_IS_REG(src))

1634  if

(dst == src && dstw == srcw) {

1643  if

(FAST_IS_REG(dst)) {

1644  EMIT_MOV

(compiler, dst, 0, src, srcw);

1661 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1662 static const sljit_sw

emit_clz_arg = 32 + 31;

1663 static const sljit_sw

emit_ctz_arg = 32;

1676

dst_r = FAST_IS_REG(dst) ? dst :

TMP_REG1

;

1688 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1689  max

= is_clz ? (32 + 31) : 32;

1709

*(inst + 1) |=

XOR

;

1713  max

= compiler->mode32 ? (32 + 31) : (64 + 63);

1715  max

= compiler->mode32 ? 32 : 64;

1726

*(inst + 1) |=

XOR

;

1743 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1749 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1751

compiler->mode32 = 1;

1761  EMIT_MOV

(compiler, dst_r, 0, src, srcw);

1765 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1766  if

(!compiler->mode32)

1780 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1792 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1793  size

= compiler->mode32 ? 16 : 48;

1807 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1817 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1819

compiler->mode32 = 0;

1833 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1840  CHECK

(check_sljit_emit_op1(compiler, op, dst, dstw, src, srcw));

1841

ADJUST_LOCAL_OFFSET(dst, dstw);

1842

ADJUST_LOCAL_OFFSET(src, srcw);

1846 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1847

compiler->mode32 = op_flags &

SLJIT_32

;

1850

op = GET_OPCODE(op);

1853 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1854

compiler->mode32 = 0;

1857  if

(FAST_IS_REG(src) && src == dst) {

1858  if

(!TYPE_CAST_NEEDED(op))

1862 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1889 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1898 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1900  return emit_mov

(compiler, dst, dstw, src, srcw);

1904 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1914 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1919  EMIT_MOV

(compiler, dst, dstw, src, srcw);

1933 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1941

compiler->mode32 = 1;

1942  EMIT_MOV

(compiler, dst, dstw, src, srcw);

1943

compiler->mode32 = 0;

1948 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1964 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 1968  return emit_bswap

(compiler, op, dst, dstw, src, srcw);

1982  sljit_u8

op_rm =

U8

((op_types >> 16) & 0xff);

1983  sljit_u8

op_mr =

U8

((op_types >> 8) & 0xff);

1986  if

(dst == src1 && dstw == src1w) {

1988 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 1989  if

((dst ==

SLJIT_R0

) && (src2w > 127 || src2w < -128) && (compiler->mode32 ||

IS_HALFWORD

(src2w))) {

1991  if

((dst ==

SLJIT_R0

) && (src2w > 127 || src2w < -128)) {

1996  BINARY_IMM

(op_imm, op_mr, src2w, dst, dstw);

1999  else if

(FAST_IS_REG(dst)) {

2004  else if

(FAST_IS_REG(src2)) {

2020  if

(dst == src2 && dstw == src2w) {

2022 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2023  if

((dst ==

SLJIT_R0

) && (src1w > 127 || src1w < -128) && (compiler->mode32 ||

IS_HALFWORD

(src1w))) {

2025  if

((dst ==

SLJIT_R0

) && (src1w > 127 || src1w < -128)) {

2030  BINARY_IMM

(op_imm, op_mr, src1w, dst, dstw);

2033  else if

(FAST_IS_REG(dst)) {

2038  else if

(FAST_IS_REG(src1)) {

2053  if

(FAST_IS_REG(dst)) {

2054  EMIT_MOV

(compiler, dst, 0, src1, src1w);

2089  sljit_u8

op_rm =

U8

((op_types >> 16) & 0xff);

2090  sljit_u8

op_mr =

U8

((op_types >> 8) & 0xff);

2093  if

(dst == src1 && dstw == src1w) {

2095 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2096  if

((dst ==

SLJIT_R0

) && (src2w > 127 || src2w < -128) && (compiler->mode32 ||

IS_HALFWORD

(src2w))) {

2098  if

((dst ==

SLJIT_R0

) && (src2w > 127 || src2w < -128)) {

2103  BINARY_IMM

(op_imm, op_mr, src2w, dst, dstw);

2106  else if

(FAST_IS_REG(dst)) {

2111  else if

(FAST_IS_REG(src2)) {

2126  if

(FAST_IS_REG(dst) && dst != src2) {

2127  EMIT_MOV

(compiler, dst, 0, src1, src1w);

2163  if

(dst_r == src1 && src2 !=

SLJIT_IMM

) {

2165

}

else if

(dst_r == src2 && src1 !=

SLJIT_IMM

) {

2174  if

(src1w <= 127 && src1w >= -128) {

2181 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 2186

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 4);

2196

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 4);

2203  EMIT_MOV

(compiler, dst_r, 0, src2, src2w);

2212  if

(src2w <= 127 && src2w >= -128) {

2219 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 2225

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 4);

2236

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 4);

2242  EMIT_MOV

(compiler, dst_r, 0, src1, src1w);

2249  if

(ADDRESSING_DEPENDS_ON(src2, dst_r))

2251  EMIT_MOV

(compiler, dst_r, 0, src1, src1w);

2270  if

(dst == src1 && dstw == src1w)

2272  if

(dst == src2 && dstw == src2w)

2275

dst_r = FAST_IS_REG(dst) ? dst :

TMP_REG1

;

2277  if

(FAST_IS_REG(src1)) {

2278  if

(FAST_IS_REG(src2)) {

2284 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2296  else if

(FAST_IS_REG(src2)) {

2297 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2324 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2333  if

(FAST_IS_REG(src1)) {

2345  if

(FAST_IS_REG(src2) && src1 !=

SLJIT_IMM

) {

2375 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2384 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2395 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2413  else if

(FAST_IS_REG(src1)) {

2423 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2442  else if

(FAST_IS_REG(src2)) {

2452 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2484 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2489  if

(src2 ==

SLJIT_IMM

|| src2 == SLJIT_PREF_SHIFT_REG) {

2490  if

(dst == src1 && dstw == src1w) {

2496  if

(dst == SLJIT_PREF_SHIFT_REG && src2 == SLJIT_PREF_SHIFT_REG) {

2504  if

(FAST_IS_REG(dst)) {

2505  EMIT_MOV

(compiler, dst, 0, src1, src1w);

2520  if

(dst == SLJIT_PREF_SHIFT_REG) {

2522  EMIT_MOV

(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);

2529  if

(FAST_IS_REG(dst) && dst != src2 && dst !=

TMP_REG1

&& !ADDRESSING_DEPENDS_ON(src2, dst)) {

2531  EMIT_MOV

(compiler, dst, 0, src1, src1w);

2532 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2533

mode32 = compiler->mode32;

2534

compiler->mode32 = 0;

2537 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2538

compiler->mode32 = mode32;

2540  EMIT_MOV

(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);

2544 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2545

compiler->mode32 = 0;

2548 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2549

compiler->mode32 = mode32;

2557 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 2560

mode32 = compiler->mode32;

2561

compiler->mode32 = 0;

2563

compiler->mode32 = mode32;

2566  EMIT_MOV

(compiler, SLJIT_PREF_SHIFT_REG, 0, src2, src2w);

2571 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 2574

compiler->mode32 = 0;

2576

compiler->mode32 = mode32;

2593 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2594

src2w &= compiler->mode32 ? 0x1f : 0x3f;

2599  return emit_shift

(compiler,

mode

, dst, dstw, src1, src1w, src2, src2w);

2602  return emit_mov

(compiler, dst, dstw, src1, src1w);

2609  return emit_shift

(compiler,

mode

, dst, dstw, src1, src1w, src2, src2w);

2611  if

(!FAST_IS_REG(dst))

2616  if

(FAST_IS_REG(dst))

2627  CHECK

(check_sljit_emit_op2(compiler, op, 0, dst, dstw, src1, src1w, src2, src2w));

2628

ADJUST_LOCAL_OFFSET(dst, dstw);

2629

ADJUST_LOCAL_OFFSET(src1, src1w);

2630

ADJUST_LOCAL_OFFSET(src2, src2w);

2635 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2639  switch

(GET_OPCODE(op)) {

2641  if

(!HAS_FLAGS(op)) {

2643  return

compiler->

error

;

2646

dst, dstw, src1, src1w, src2, src2w);

2649

dst, dstw, src1, src1w, src2, src2w);

2654  if

(!HAS_FLAGS(op)) {

2656  return

compiler->

error

;

2657  if

(FAST_IS_REG(dst) && src2 == dst) {

2664

dst, dstw, src1, src1w, src2, src2w);

2667

dst, dstw, src1, src1w, src2, src2w);

2669  return emit_mul

(compiler, dst, dstw, src1, src1w, src2, src2w);

2672

dst, dstw, src1, src1w, src2, src2w);

2675

dst, dstw, src1, src1w, src2, src2w);

2677  if

(!HAS_FLAGS(op)) {

2685

dst, dstw, src1, src1w, src2, src2w);

2689

dst, dstw, src1, src1w, src2, src2w);

2693

dst, dstw, src1, src1w, src2, src2w);

2697

dst, dstw, src1, src1w, src2, src2w);

2700

dst, dstw, src1, src1w, src2, src2w);

2703

dst, dstw, src1, src1w, src2, src2w);

2716  CHECK

(check_sljit_emit_op2(compiler, op, 1, 0, 0, src1, src1w, src2, src2w));

2719

SLJIT_SKIP_CHECKS(compiler);

2723

ADJUST_LOCAL_OFFSET(src1, src1w);

2724

ADJUST_LOCAL_OFFSET(src2, src2w);

2728 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2747  CHECK

(check_sljit_emit_op2r(compiler, op, dst_reg, src1, src1w, src2, src2w));

2748

ADJUST_LOCAL_OFFSET(src1, src1w);

2749

ADJUST_LOCAL_OFFSET(src2, src2w);

2754 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2758  switch

(GET_OPCODE(op)) {

2776  sljit_s32

is_rotate, is_left, move_src1;

2782 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 2788  CHECK

(check_sljit_emit_shift_into(compiler, op, dst_reg, src1_reg, src2_reg, src3, src3w));

2789

ADJUST_LOCAL_OFFSET(src3, src3w);

2794 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2799 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 2802

src3w &= (op &

SLJIT_32

) ? 0x1f : 0x3f;

2811

is_rotate = (src1_reg == src2_reg);

2816  return emit_shift

(compiler, is_left ?

ROL

:

ROR

, dst_reg, dstw, src1_reg, src1w, src3, src3w);

2818 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 2825  if

(dst_reg == SLJIT_PREF_SHIFT_REG && src3 !=

SLJIT_IMM

&& (src3 != SLJIT_PREF_SHIFT_REG || src1_reg != SLJIT_PREF_SHIFT_REG)) {

2826 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2835

}

else if

((src1_reg &

SLJIT_MEM

) || src1_reg == SLJIT_PREF_SHIFT_REG) {

2838  EMIT_MOV

(compiler, restore_sp4, 0, src1_reg, src1w);

2839

src1_reg = restore_sp4;

2843

restore_sp4 = src1_reg;

2847  if

(src3 != SLJIT_PREF_SHIFT_REG)

2848  EMIT_MOV

(compiler, SLJIT_PREF_SHIFT_REG, 0, src3, src3w);

2850  if

(src2_reg == SLJIT_PREF_SHIFT_REG && src3 !=

SLJIT_IMM

&& src3 != SLJIT_PREF_SHIFT_REG) {

2851 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2852

compiler->mode32 = 0;

2855 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2863 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2864  if

(dst_reg != src1_reg) {

2865  if

(dst_reg != src3) {

2866  EMIT_MOV

(compiler, dst_reg, 0, src1_reg, src1w);

2878

}

else if

((src1_reg &

SLJIT_MEM

) || src1_reg == SLJIT_PREF_SHIFT_REG) {

2881  EMIT_MOV

(compiler, restore_sp4, 0, src1_reg, src1w);

2882

src1_reg = restore_sp4;

2886

restore_sp4 = src1_reg;

2888

}

else if

(dst_reg != src1_reg) {

2889  if

(dst_reg != src3) {

2890  EMIT_MOV

(compiler, dst_reg, 0, src1_reg, src1w);

2898  if

(src3 !=

SLJIT_IMM

&& src3 != SLJIT_PREF_SHIFT_REG) {

2900 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2901

compiler->mode32 = 0;

2915  EMIT_MOV

(compiler, SLJIT_PREF_SHIFT_REG, 0, src3, src3w);

2919  EMIT_MOV

(compiler, dst_reg, 0, src1_reg, src1w);

2937 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 2939

compiler->mode32 = 0;

2943  if

(src1_reg != dst_reg) {

2945  return emit_mov

(compiler, dst_reg, dstw, src1_reg, 0);

2951  if

(src1_reg != dst_reg)

2952  EMIT_MOV

(compiler, dst_reg, dstw, src1_reg, 0);

2965  CHECK

(check_sljit_emit_op_src(compiler, op, src, srcw));

2966

ADJUST_LOCAL_OFFSET(src, srcw);

2992  CHECK

(check_sljit_emit_op_dst(compiler, op, dst, dstw));

2993

ADJUST_LOCAL_OFFSET(dst, dstw);

3009

CHECK_REG_INDEX(check_sljit_get_register_index(

type

, reg));

3012 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 3031  CHECK

(check_sljit_emit_op_custom(compiler, instruction,

size

));

3072

inst[1] = op & 0xff;

3088

inst[2] = op & 0xff;

3111

dst_r = FAST_IS_REG(dst) ? dst :

TMP_REG1

;

3113 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3115

compiler->mode32 = 0;

3133 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3135

compiler->mode32 = 0;

3139 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3150 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3151

compiler->mode32 = 1;

3162  switch

(GET_FLAG_TYPE(op)) {

3179  if

(!FAST_IS_REG(src2)) {

3187  if

(!FAST_IS_REG(src1)) {

3202 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3203

compiler->mode32 = 1;

3207

SELECT_FOP1_OPERATION_WITH_CHECKS(compiler, op, dst, dstw, src, srcw);

3210  if

(FAST_IS_REG(dst))

3212  if

(FAST_IS_REG(src))

3219

dst_r = FAST_IS_REG(dst) ? dst :

TMP_FREG

;

3220  if

(FAST_IS_REG(src)) {

3236  if

(FAST_IS_REG(dst)) {

3237

dst_r = (dst == src) ?

TMP_FREG

: dst;

3264  switch

(GET_OPCODE(op)) {

3285  CHECK

(check_sljit_emit_fop2(compiler, op, dst, dstw, src1, src1w, src2, src2w));

3286

ADJUST_LOCAL_OFFSET(dst, dstw);

3287

ADJUST_LOCAL_OFFSET(src1, src1w);

3288

ADJUST_LOCAL_OFFSET(src2, src2w);

3290 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3291

compiler->mode32 = 1;

3294  if

(FAST_IS_REG(dst)) {

3302

}

else if

(dst != src2)

3313  switch

(GET_OPCODE(op)) {

3344  CHECK

(check_sljit_emit_fop2r(compiler, op, dst_freg, src1, src1w, src2, src2w));

3345

ADJUST_LOCAL_OFFSET(src1, src1w);

3346

ADJUST_LOCAL_OFFSET(src2, src2w);

3348 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3349

compiler->mode32 = 1;

3352  if

(dst_freg == src1) {

3366  if

(dst_freg != src2)

3385

CHECK_PTR(check_sljit_emit_label(compiler));

3392

set_label(

label

, compiler);

3394

inst = (

sljit_u8

*)ensure_buf(compiler, 1);

3407

CHECK_PTR(check_sljit_emit_jump(compiler,

type

));

3417

inst = (

sljit_u8

*)ensure_buf(compiler, 1);

3430  CHECK

(check_sljit_emit_ijump(compiler,

type

, src, srcw));

3431

ADJUST_LOCAL_OFFSET(src, srcw);

3438

set_jump(jump, compiler, (

sljit_u32

)(JUMP_ADDR | (

type

<< TYPE_SHIFT)));

3443

compiler->

size

+= JUMP_MAX_SIZE;

3444

inst = (

sljit_u8

*)ensure_buf(compiler, 1);

3449 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3451

compiler->mode32 = 1;

3467 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3475  CHECK

(check_sljit_emit_op_flags(compiler, op, dst, dstw,

type

));

3477

ADJUST_LOCAL_OFFSET(dst, dstw);

3483 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3484  if

(GET_OPCODE(op) ==

SLJIT_OR

&& !GET_ALL_FLAGS(op) && FAST_IS_REG(dst)) {

3485

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 4 + 3);

3501

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 4 + 4);

3519

compiler->mode32 = GET_OPCODE(op) !=

SLJIT_MOV

;

3523

SLJIT_SKIP_CHECKS(compiler);

3530  if

(GET_OPCODE(op) <

SLJIT_ADD

&& FAST_IS_REG(dst) &&

reg_map

[dst] <= 4) {

3532

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 3 + 3);

3546  if

(GET_OPCODE(op) ==

SLJIT_OR

&& !GET_ALL_FLAGS(op) && FAST_IS_REG(dst) &&

reg_map

[dst] <= 4) {

3547

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 3 + 2);

3561

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 3 + 3);

3576

SLJIT_SKIP_CHECKS(compiler);

3590  CHECK

(check_sljit_emit_fselect(compiler,

type

, dst_freg, src1, src1w, src2_freg));

3592

ADJUST_LOCAL_OFFSET(src1, src1w);

3594 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3595

compiler->mode32 = 1;

3598  if

(dst_freg != src2_freg) {

3599  if

(dst_freg == src1) {

3607

inst = (

sljit_u8

*)ensure_buf(compiler, 1 + 2);

3629  CHECK

(check_sljit_emit_simd_mov(compiler,

type

, freg, srcdst, srcdstw));

3631

ADJUST_LOCAL_OFFSET(srcdst, srcdstw);

3633 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3634

compiler->mode32 = 1;

3651

alignment = reg_size;

3654  if

(elem_size == 2 || elem_size == 3) {

3675  return emit_groupf

(compiler, op, freg, srcdst, srcdstw);

3690  CHECK

(check_sljit_emit_simd_replicate(compiler,

type

, freg, src, srcw));

3692

ADJUST_LOCAL_OFFSET(src, srcw);

3698 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 3702

compiler->mode32 = 1;

3720  switch

(elem_size) {

3737 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3752 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3754

compiler->mode32 = 0;

3757 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3758

compiler->mode32 = 1;

3790  if

(elem_size == 2 && freg != src) {

3805  if

(elem_size == 0) {

3810

}

else if

(elem_size == 1) {

3816 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3817  if

(elem_size == 2 && (

sljit_s32

)srcw == -1)

3821  if

(srcw == 0 || srcw == -1) {

3828 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3843  switch

(elem_size) {

3845  if

(!FAST_IS_REG(src)) {

3851  if

(!FAST_IS_REG(src))

3856 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3859

compiler->mode32 = 0;

3882  if

(use_vex && elem_size >= 2) {

3883 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 3896  switch

(elem_size) {

3917 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3919

compiler->mode32 = 1;

3940 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 3947  CHECK

(check_sljit_emit_simd_lane_mov(compiler,

type

, freg, lane_index, srcdst, srcdstw));

3949

ADJUST_LOCAL_OFFSET(srcdst, srcdstw);

3951  if

(reg_size == 5) {

3955

}

else if

(reg_size != 4)

3958 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 3969 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3970

compiler->mode32 = 1;

3976

srcdst_orig = srcdst;

3977

srcdstw_orig = srcdstw;

3985  if

(lane_index == 0) {

3987 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 3988  if

(elem_size == 3) {

3989

compiler->mode32 = 0;

3996  else if

(elem_size == 1)

4005  if

(elem_size == 2) {

4016

}

else if

(elem_size == 3) {

4020

}

else if

(use_vex) {

4026  if

(reg_size == 5 && lane_index >= (1 << (4 - elem_size))) {

4028

lane_index -= (1 << (4 - elem_size));

4045

}

else if

(reg_size == 5 && lane_index >= (1 << (4 - elem_size))) {

4050

lane_index -= (1 << (4 - elem_size));

4054  if

(elem_size == 3) {

4070  if

(lane_index == 1) {

4078

}

else if

(use_vex && (reg_size == 4 || freg ==

TMP_FREG

)) {

4079  if

(lane_index == 1)

4084  if

(lane_index == 1)

4090  if

(lane_index == 0) {

4113  switch

(lane_index) {

4135  if

(lane_index != 0 || (srcdst &

SLJIT_MEM

)) {

4164  switch

(elem_size) {

4178 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4182

compiler->mode32 = 0;

4217 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 4224 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4235  if

(elem_size == 2) {

4256

(srcdst_orig != 0 && FAST_IS_REG(srcdst_orig)) ? srcdst_orig : srcdst, srcdst, 0));

4273 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 4280  CHECK

(check_sljit_emit_simd_lane_replicate(compiler,

type

, freg, src, src_lane_index));

4282 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4283

compiler->mode32 = 1;

4287  if

(reg_size == 5) {

4291

}

else if

(reg_size != 4)

4296  byte

=

U8

(src_lane_index);

4298  if

(elem_size == 3) {

4302  if

(reg_size == 5) {

4303  if

(src_lane_index == 0)

4308  byte

=

U8

(

byte

| (

byte

<< 2));

4309  return emit_byte

(compiler,

U8

(

byte

| (

byte

<< 4)));

4312  if

(src_lane_index == 0) {

4320

}

else if

(elem_size != 2)

4325  if

(reg_size == 5) {

4328  if

(src_lane_index == 0)

4334  if

(src_lane_index >= 4) {

4336

src_lane_index -= 4;

4341  byte

=

U8

(src_lane_index);

4342

}

else if

(use_vex) {

4351  if

(elem_size == 2) {

4352  byte

=

U8

(

byte

| (

byte

<< 2));

4353  byte

=

U8

(

byte

| (

byte

<< 4));

4355  byte

=

U8

(

byte

| (

byte

<< 1));

4363  if

(elem_size == 0) {

4364  if

(reg_size == 5 && src_lane_index >= 16) {

4367

src_lane_index &= 0x7;

4374  if

((src_lane_index & 0x3) == 0) {

4376  byte

=

U8

(src_lane_index >> 2);

4377

}

else if

(src_lane_index < 8 && (src_lane_index & 0x1) == 0) {

4379  byte

=

U8

(src_lane_index >> 1);

4412  switch

(elem_size) {

4430  if

(reg_size == 5) {

4431  switch

(elem_size) {

4433  byte

=

U8

(src_lane_index & 0x3);

4434

src_lane_index >>= 2;

4438  byte

=

U8

(src_lane_index & 0x3);

4439

src_lane_index >>= 1;

4447  return emit_byte

(compiler,

U8

(src_lane_index == 0 ? 0x44 : 0xee));

4452  byte

=

U8

(

byte

| (

byte

<< 2));

4455  if

(src_lane_index == 0)

4462  byte

=

U8

(src_lane_index);

4463  byte

=

U8

(

byte

| (

byte

<< 2));

4464  return emit_byte

(compiler,

U8

(

byte

| (

byte

<< 4)));

4467  switch

(elem_size) {

4469  byte

=

U8

(src_lane_index & 0x3);

4470

src_lane_index >>= 1;

4477  byte

=

U8

(

byte

| (

byte

<< 2));

4486  byte

=

U8

(src_lane_index);

4487  byte

=

U8

(

byte

| (

byte

<< 2));

4490  byte

=

U8

(src_lane_index << 1);

4491  byte

=

U8

(

byte

| (

byte

<< 2) | 0x4);

4499  return emit_byte

(compiler,

U8

(

byte

| (

byte

<< 4)));

4513  CHECK

(check_sljit_emit_simd_extend(compiler,

type

, freg, src, srcw));

4515

ADJUST_LOCAL_OFFSET(src, srcw);

4517 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4518

compiler->mode32 = 1;

4521  if

(reg_size == 5) {

4525

}

else if

(reg_size != 4)

4529  if

(elem_size != 2 || elem2_size != 3)

4540  switch

(elem_size) {

4542  if

(elem2_size == 1)

4544  else if

(elem2_size == 2)

4546  else if

(elem2_size == 3)

4552  if

(elem2_size == 2)

4554  else if

(elem2_size == 3)

4560  if

(elem2_size == 3)

4589  CHECK

(check_sljit_emit_simd_sign(compiler,

type

, freg, dst, dstw));

4591

ADJUST_LOCAL_OFFSET(dst, dstw);

4594 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4595

compiler->mode32 = 1;

4601  if

(reg_size == 4) {

4607  switch

(elem_size) {

4620

dst_r = FAST_IS_REG(dst) ? dst :

TMP_REG1

;

4628 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4632  if

(elem_size == 1) {

4650

dst_r = FAST_IS_REG(dst) ? dst :

TMP_REG1

;

4652  if

(elem_size == 1) {

4662  else if

(elem_size == 3)

4669 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4688  return emit_groupf

(compiler, op, dst_freg, src_freg, 0);

4699  CHECK

(check_sljit_emit_simd_op2(compiler,

type

, dst_freg, src1_freg, src2_freg));

4701 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4702

compiler->mode32 = 1;

4705  if

(reg_size == 5) {

4708

}

else if

(reg_size != 4)

4714  switch

(SLJIT_SIMD_GET_OPCODE(

type

)) {

4745  if

(dst_freg != src1_freg) {

4746  if

(dst_freg == src2_freg)

4747

src2_freg = src1_freg;

4761  CHECK

(check_sljit_emit_atomic_load(compiler, op, dst_reg, mem_reg));

4763

SLJIT_SKIP_CHECKS(compiler);

4774 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 4780  CHECK

(check_sljit_emit_atomic_store(compiler, op, src_reg, mem_reg, temp_reg));

4787

op = GET_OPCODE(op);

4788 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32) 4797  if

(mem_reg == src_reg)

4803 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4804

compiler->mode32 = 0;

4833 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4843 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4851 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4852

compiler->mode32 = 0;

4866  CHECK

(check_sljit_get_local_base(compiler, dst, dstw,

offset

));

4867

ADJUST_LOCAL_OFFSET(dst, dstw);

4871 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4872

compiler->mode32 = 0;

4877 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4880 #if (defined SLJIT_DEBUG && SLJIT_DEBUG) 4882  return

compiler->

error

;

4898 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4903

CHECK_PTR(check_sljit_emit_const(compiler, dst, dstw, init_value));

4904

ADJUST_LOCAL_OFFSET(dst, dstw);

4910

set_const(const_, compiler);

4912 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4913

compiler->mode32 = 0;

4914

reg = FAST_IS_REG(dst) ? dst :

TMP_REG1

;

4923

inst = (

sljit_u8

*)ensure_buf(compiler, 1);

4928 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4941 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4946

CHECK_PTR(check_sljit_emit_mov_addr(compiler, dst, dstw));

4947

ADJUST_LOCAL_OFFSET(dst, dstw);

4953

set_mov_addr(jump, compiler, 0);

4955 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4956

compiler->mode32 = 0;

4957

reg = FAST_IS_REG(dst) ? dst :

TMP_REG1

;

4963

jump->

flags

|= MOV_ADDR_HI;

4968

inst = (

sljit_u8

*)ensure_buf(compiler, 1);

4973 #if (defined SLJIT_CONFIG_X86_64 && SLJIT_CONFIG_X86_64) 4986 #if (defined SLJIT_CONFIG_X86_32 && SLJIT_CONFIG_X86_32)

#define CHECK_ERROR(name, s)

static const char label[]

static void byte(MDB_val *v)

const struct ncbi::grid::netcache::search::fields::SIZE size

const GenericPointer< typename T::ValueType > T2 value

unsigned short int sljit_u16

signed short int sljit_s16

#define SLJIT_UNLIKELY(x)

#define SLJIT_API_FUNC_ATTRIBUTE

#define SLJIT_COMPILE_ASSERT(x, description)

#define SLJIT_MEMCPY(dest, src, len)

#define SLJIT_UNUSED_ARG(arg)

#define SLJIT_UPDATE_WX_FLAGS(from, to, enable_exec)

#define PTR_FAIL_IF_NULL(ptr)

#define PTR_FAIL_IF(expr)

#define FAIL_IF_NULL(ptr)

#define PTR_FAIL_WITH_EXEC_IF(ptr)

#define CHECK_ERROR_PTR()

#define SLJIT_UNORDERED_OR_LESS_EQUAL

#define SLJIT_SKIP_FRAMES_BEFORE_FAST_RETURN

#define SLJIT_SIMD_OP2_AND

#define SLJIT_ORDERED_LESS_EQUAL

#define SLJIT_CONV_F64_FROM_S32

#define SLJIT_FAST_RETURN

#define SLJIT_ATOMIC_NOT_STORED

#define SLJIT_UNORDERED_OR_GREATER

#define SLJIT_MEM2(r1, r2)

#define SLJIT_ORDERED_GREATER_EQUAL

#define SLJIT_PREFETCH_L3

#define SLJIT_SIG_GREATER_EQUAL

#define SLJIT_UNORDERED_OR_NOT_EQUAL

#define SLJIT_SIMD_EXTEND_SIGNED

#define SLJIT_PREFETCH_L1

#define SLJIT_SIMD_OP2_XOR

#define SLJIT_ORDERED_EQUAL

#define SLJIT_HAS_VIRTUAL_REGISTERS

#define SLJIT_ERR_UNSUPPORTED

#define SLJIT_UNORDERED_OR_LESS

#define SLJIT_ORDERED_GREATER

#define SLJIT_SIG_LESS_EQUAL

#define SLJIT_UNORDERED_OR_EQUAL

#define SLJIT_REWRITABLE_JUMP

#define SLJIT_SIMD_REG_512

#define SLJIT_NOT_OVERFLOW

#define SLJIT_F_NOT_EQUAL

#define SLJIT_F_GREATER_EQUAL

#define SLJIT_CONV_SW_FROM_F64

#define SLJIT_HAS_PREFETCH

#define SLJIT_SIG_GREATER

#define SLJIT_SIMD_LANE_ZERO

#define SLJIT_FLOAT_REGISTER

#define SLJIT_ATOMIC_STORED

#define SLJIT_GET_RETURN_ADDRESS

#define SLJIT_SIMD_OP2_OR

#define SLJIT_CONV_F64_FROM_SW

#define SLJIT_SIMD_LANE_SIGNED

#define SLJIT_GREATER_EQUAL

#define SLJIT_GP_REGISTER

#define SLJIT_SKIP_FRAMES_BEFORE_RETURN

#define SLJIT_SIMD_REG_256

#define SLJIT_ERR_COMPILED

#define SLJIT_HAS_COPY_F64

#define SLJIT_SIMD_REG_128

#define SLJIT_F_LESS_EQUAL

#define SLJIT_ORDERED_LESS

#define SLJIT_HAS_COPY_F32

#define SLJIT_CONV_F64_FROM_F32

#define SLJIT_PREFETCH_L2

#define SLJIT_PREFETCH_ONCE

#define SLJIT_ORDERED_NOT_EQUAL

#define SLJIT_UNORDERED_OR_GREATER_EQUAL

#define SLJIT_IS_FPU_AVAILABLE

static sljit_s32 skip_frames_before_return(struct sljit_compiler *compiler)

static sljit_s32 emit_vex_instruction(struct sljit_compiler *compiler, sljit_uw op, sljit_s32 a, sljit_s32 v, sljit_s32 b, sljit_sw immb)

static sljit_s32 emit_do_imm(struct sljit_compiler *compiler, sljit_u8 opcode, sljit_sw imm)

static sljit_s32 emit_fast_enter(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)

static sljit_s32 sljit_emit_get_return_address(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)

static sljit_u8 * emit_x86_instruction(struct sljit_compiler *compiler, sljit_uw size, sljit_s32 a, sljit_sw imma, sljit_s32 b, sljit_sw immb)

static sljit_s32 emit_fast_return(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)

static sljit_s32 emit_load_imm64(struct sljit_compiler *compiler, sljit_s32 reg, sljit_sw imm)

static sljit_s32 emit_mov_int(struct sljit_compiler *compiler, sljit_s32 sign, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)

static sljit_s32 emit_do_imm32(struct sljit_compiler *compiler, sljit_u8 rex, sljit_u8 opcode, sljit_sw imm)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_flags(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 type)

#define CHECK_EXTRA_REGS(p, w, do)

static sljit_s32 emit_test_binary(struct sljit_compiler *compiler, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)

#define VPBROADCASTQ_x_xm

#define VPBROADCASTD_x_xm

static sljit_s32 emit_lea_binary(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)

static sljit_s32 emit_mov(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)

#define VBROADCASTSS_x_xm

SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump * sljit_emit_jump(struct sljit_compiler *compiler, sljit_s32 type)

static const sljit_u8 freg_lmap[SLJIT_NUMBER_OF_FLOAT_REGISTERS+2]

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2r(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst_reg, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)

static sljit_s32 emit_groupf_ext(struct sljit_compiler *compiler, sljit_uw op, sljit_s32 dst, sljit_s32 src, sljit_sw srcw)

static sljit_s32 emit_non_cum_binary(struct sljit_compiler *compiler, sljit_u32 op_types, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)

#define BINARY_EAX_IMM(op_eax_imm, immw)

#define VPBROADCASTB_x_xm

static sljit_u8 * detect_far_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr)

static SLJIT_INLINE sljit_s32 emit_endbranch(struct sljit_compiler *compiler)

static sljit_s32 emit_cum_binary(struct sljit_compiler *compiler, sljit_u32 op_types, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_has_cpu_feature(sljit_s32 feature_type)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2u(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)

static void execute_cpu_id(sljit_u32 info[4])

static sljit_s32 emit_groupf(struct sljit_compiler *compiler, sljit_uw op, sljit_s32 dst, sljit_s32 src, sljit_sw srcw)

static const sljit_u8 reg_lmap[SLJIT_NUMBER_OF_REGISTERS+4]

SLJIT_API_FUNC_ATTRIBUTE struct sljit_label * sljit_emit_label(struct sljit_compiler *compiler)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_cmp_info(sljit_s32 type)

static sljit_s32 emit_prefetch(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_local_base(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw offset)

static sljit_s32 emit_unary(struct sljit_compiler *compiler, sljit_u8 opcode, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_load(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst_reg, sljit_s32 mem_reg)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_get_register_index(sljit_s32 type, sljit_s32 reg)

static sljit_u32 execute_get_xcr0_low(void)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_replicate(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 src, sljit_sw srcw)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_op2(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 dst_freg, sljit_s32 src1_freg, sljit_s32 src2_freg)

static sljit_s32 emit_shift(struct sljit_compiler *compiler, sljit_u8 mode, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)

SLJIT_API_FUNC_ATTRIBUTE void sljit_set_const(sljit_uw addr, sljit_sw new_constant, sljit_sw executable_offset)

static void get_cpu_features(void)

static SLJIT_INLINE sljit_s32 cpu_has_shadow_stack(void)

#define BINARY_IMM(op_imm, op_mr, immw, arg, argw)

static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_f64_from_sw(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)

#define CPU_FEATURE_LZCNT

static sljit_s32 emit_mul(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_ijump(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 src, sljit_sw srcw)

static sljit_u32 * sse2_buffer

SLJIT_API_FUNC_ATTRIBUTE struct sljit_jump * sljit_emit_mov_addr(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw)

#define CPU_FEATURE_OSXSAVE

static sljit_s32 emit_clz_ctz(struct sljit_compiler *compiler, sljit_s32 is_clz, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)

#define SLJIT_INST_MOV_ADDR

static SLJIT_INLINE void sljit_unaligned_store_s32(void *addr, sljit_s32 value)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2r(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst_freg, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)

static void generate_jump_or_mov_addr(struct sljit_jump *jump, sljit_sw executable_offset)

static const sljit_u8 reg_map[SLJIT_NUMBER_OF_REGISTERS+4]

static sljit_u8 get_jump_code(sljit_uw type)

static sljit_u8 * generate_mov_addr_code(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_mov(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 lane_index, sljit_s32 srcdst, sljit_sw srcdstw)

#define CPU_FEATURE_DETECTED

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op0(struct sljit_compiler *compiler, sljit_s32 op)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop2(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fop1(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)

#define VEXTRACTF128_x_ym

#define EMIT_MOV(compiler, dst, dstw, src, srcw)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_shift_into(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst_reg, sljit_s32 src1_reg, sljit_s32 src2_reg, sljit_s32 src3, sljit_sw src3w)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_extend(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 src, sljit_sw srcw)

#define EX86_SELECT_66(op)

static sljit_s32 emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 dst_freg, sljit_s32 src_freg)

static const sljit_u8 freg_map[SLJIT_NUMBER_OF_FLOAT_REGISTERS+2]

#define VPBROADCASTW_x_xm

#define BINARY_OPCODE(opcode)

static SLJIT_INLINE sljit_s32 emit_sse2_store(struct sljit_compiler *compiler, sljit_s32 single, sljit_s32 dst, sljit_sw dstw, sljit_s32 src)

static sljit_u32 cpu_feature_list

#define VINSERTI128_y_y_xm

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_mov(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 srcdst, sljit_sw srcdstw)

#define VBROADCASTSD_x_xm

#define EX86_SELECT_F2_F3(op)

static void reduce_code_size(struct sljit_compiler *compiler)

static SLJIT_INLINE sljit_s32 adjust_shadow_stack(struct sljit_compiler *compiler, sljit_s32 src, sljit_sw srcw)

static sljit_u8 * detect_near_jump_type(struct sljit_jump *jump, sljit_u8 *code_ptr, sljit_u8 *code, sljit_sw executable_offset)

#define VINSERTF128_y_y_xm

#define VEXTRACTI128_x_ym

static SLJIT_INLINE void sljit_unaligned_store_sw(void *addr, sljit_sw value)

#define CPU_FEATURE_TZCNT

static sljit_s32 emit_mov_byte(struct sljit_compiler *compiler, sljit_s32 sign, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)

static SLJIT_INLINE void sljit_unaligned_store_s16(void *addr, sljit_s16 value)

static sljit_u32 sse2_data[3+(4 *4)]

static sljit_s32 emit_cmp_binary(struct sljit_compiler *compiler, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)

#define CPU_FEATURE_SSE41

SLJIT_API_FUNC_ATTRIBUTE void sljit_set_jump_addr(sljit_uw addr, sljit_uw new_target, sljit_sw executable_offset)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_dst(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw)

static sljit_s32 emit_shift_with_flags(struct sljit_compiler *compiler, sljit_u8 mode, sljit_s32 set_flags, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)

SLJIT_API_FUNC_ATTRIBUTE struct sljit_const * sljit_emit_const(struct sljit_compiler *compiler, sljit_s32 dst, sljit_sw dstw, sljit_sw init_value)

SLJIT_API_FUNC_ATTRIBUTE const char * sljit_get_platform_name(void)

static sljit_s32 emit_byte(struct sljit_compiler *compiler, sljit_u8 byte)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_atomic_store(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src_reg, sljit_s32 mem_reg, sljit_s32 temp_reg)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_fselect(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 dst_freg, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2_freg)

static sljit_s32 emit_bswap(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)

static sljit_s32 emit_mov_half(struct sljit_compiler *compiler, sljit_s32 sign, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_src(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src, sljit_sw srcw)

static SLJIT_INLINE sljit_s32 sljit_emit_fop1_cmp(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_sign(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 dst, sljit_sw dstw)

static SLJIT_INLINE sljit_s32 emit_sse2_load(struct sljit_compiler *compiler, sljit_s32 single, sljit_s32 dst, sljit_s32 src, sljit_sw srcw)

static sljit_s32 emit_cmov_generic(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 dst_reg, sljit_s32 src, sljit_sw srcw)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op2(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src1, sljit_sw src1w, sljit_s32 src2, sljit_sw src2w)

static void init_compiler(void)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op_custom(struct sljit_compiler *compiler, void *instruction, sljit_u32 size)

static SLJIT_INLINE sljit_s32 sljit_emit_fop1_conv_sw_from_f64(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)

SLJIT_API_FUNC_ATTRIBUTE void * sljit_generate_code(struct sljit_compiler *compiler, sljit_s32 options, void *exec_allocator_data)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_op1(struct sljit_compiler *compiler, sljit_s32 op, sljit_s32 dst, sljit_sw dstw, sljit_s32 src, sljit_sw srcw)

SLJIT_API_FUNC_ATTRIBUTE sljit_s32 sljit_emit_simd_lane_replicate(struct sljit_compiler *compiler, sljit_s32 type, sljit_s32 freg, sljit_s32 src, sljit_s32 src_lane_index)

struct sljit_const * consts

sljit_sw executable_offset

struct sljit_jump * jumps

struct sljit_label * last_label

struct sljit_memory_fragment * buf

struct sljit_label * labels

struct sljit_const * next

union sljit_jump::@1235 u

struct sljit_label * label

union sljit_label::@1234 u


RetroSearch is an open source project built by @garambo | Open a GitHub Issue

Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo

HTML: 3.2 | Encoding: UTF-8 | Version: 0.7.4