1>ffi.lib(ffi64.obj) : error LNK2001: unresolved external symbol ffi_closure_unix64
1>ffi.lib(ffi64.obj) : error LNK2001: unresolved external symbol ffi_call_unix64
C(ffi_closure_unix64):
L(UW8):
_CET_ENDBR
subq $ffi_closure_FS, %rsp
L(UW9):
/* cfi_adjust_cfa_offset(ffi_closure_FS) */
L(sse_entry1):
movq %rdi, ffi_closure_OFS_G+0x00(%rsp)
movq %rsi, ffi_closure_OFS_G+0x08(%rsp)
movq %rdx, ffi_closure_OFS_G+0x10(%rsp)
movq %rcx, ffi_closure_OFS_G+0x18(%rsp)
movq %r8, ffi_closure_OFS_G+0x20(%rsp)
movq %r9, ffi_closure_OFS_G+0x28(%rsp)
#ifdef __ILP32__
movl FFI_TRAMPOLINE_SIZE(%r10), %edi /* Load cif */
movl FFI_TRAMPOLINE_SIZE+4(%r10), %esi /* Load fun */
movl FFI_TRAMPOLINE_SIZE+8(%r10), %edx /* Load user_data */
#else
movq FFI_TRAMPOLINE_SIZE(%r10), %rdi /* Load cif */
movq FFI_TRAMPOLINE_SIZE+8(%r10), %rsi /* Load fun */
movq FFI_TRAMPOLINE_SIZE+16(%r10), %rdx /* Load user_data */
#endif
L(do_closure):
leaq ffi_closure_OFS_RVALUE(%rsp), %rcx /* Load rvalue */
movq %rsp, %r8 /* Load reg_args */
leaq ffi_closure_FS+8(%rsp), %r9 /* Load argp */
call PLT(C(ffi_closure_unix64_inner))
/* Deallocate stack frame early; return value is now in redzone. */
addq $ffi_closure_FS, %rsp
L(UW10):
/* cfi_adjust_cfa_offset(-ffi_closure_FS) */
/* The first byte of the return value contains the FFI_TYPE. */
cmpb $UNIX64_RET_LAST, %al
movzbl %al, %r10d
leaq L(load_table)(%rip), %r11
ja L(la)
#ifdef __CET__
/* NB: Originally, each slot is 8 byte. 4 bytes of ENDBR64 +
4 bytes NOP padding double slot size to 16 bytes. */
addl %r10d, %r10d
#endif
leaq (%r11, %r10, 8), %r10
leaq ffi_closure_RED_RVALUE(%rsp), %rsi
jmp *%r10
.balign 8
L(load_table):
E(L(load_table), UNIX64_RET_VOID)
_CET_ENDBR
ret
E(L(load_table), UNIX64_RET_UINT8)
_CET_ENDBR
movzbl (%rsi), %eax
ret
E(L(load_table), UNIX64_RET_UINT16)
_CET_ENDBR
movzwl (%rsi), %eax
ret
E(L(load_table), UNIX64_RET_UINT32)
_CET_ENDBR
movl (%rsi), %eax
ret
E(L(load_table), UNIX64_RET_SINT8)
_CET_ENDBR
movsbl (%rsi), %eax
ret
E(L(load_table), UNIX64_RET_SINT16)
_CET_ENDBR
movswl (%rsi), %eax
ret
E(L(load_table), UNIX64_RET_SINT32)
_CET_ENDBR
movl (%rsi), %eax
ret
E(L(load_table), UNIX64_RET_INT64)
_CET_ENDBR
movq (%rsi), %rax
ret
E(L(load_table), UNIX64_RET_XMM32)
_CET_ENDBR
movd (%rsi), %xmm0
ret
E(L(load_table), UNIX64_RET_XMM64)
_CET_ENDBR
movq (%rsi), %xmm0
ret
E(L(load_table), UNIX64_RET_X87)
_CET_ENDBR
fldt (%rsi)
ret
E(L(load_table), UNIX64_RET_X87_2)
_CET_ENDBR
fldt 16(%rsi)
fldt (%rsi)
ret
E(L(load_table), UNIX64_RET_ST_XMM0_RAX)
_CET_ENDBR
movq 8(%rsi), %rax
jmp L(l3)
E(L(load_table), UNIX64_RET_ST_RAX_XMM0)
_CET_ENDBR
movq 8(%rsi), %xmm0
jmp L(l2)
E(L(load_table), UNIX64_RET_ST_XMM0_XMM1)
_CET_ENDBR
movq 8(%rsi), %xmm1
jmp L(l3)
E(L(load_table), UNIX64_RET_ST_RAX_RDX)
_CET_ENDBR
movq 8(%rsi), %rdx
L(l2):
movq (%rsi), %rax
ret
.balign 8
L(l3):
movq (%rsi), %xmm0
ret
L(la): call PLT(C(abort))
L(UW11):
ENDF(C(ffi_closure_unix64))
--------------------------------------------------------------------------------------------------------
C(ffi_call_unix64):
L(UW0):
_CET_ENDBR
movq (%rsp), %r10 /* Load return address. */
leaq (%rdi, %rsi), %rax /* Find local stack base. */
movq %rdx, (%rax) /* Save flags. */
movq %rcx, 8(%rax) /* Save raddr. */
movq %rbp, 16(%rax) /* Save old frame pointer. */
movq %r10, 24(%rax) /* Relocate return address. */
movq %rax, %rbp /* Finalize local stack frame. */
/* New stack frame based off rbp. This is a itty bit of unwind
trickery in that the CFA *has* changed. There is no easy way
to describe it correctly on entry to the function. Fortunately,
it doesn't matter too much since at all points we can correctly
unwind back to ffi_call. Note that the location to which we
moved the return address is (the new) CFA-8, so from the
perspective of the unwind info, it hasn't moved. */
L(UW1):
/* cfi_def_cfa(%rbp, 32) */
/* cfi_rel_offset(%rbp, 16) */
movq %rdi, %r10 /* Save a copy of the register area. */
movq %r8, %r11 /* Save a copy of the target fn. */
/* Load up all argument registers. */
movq (%r10), %rdi
movq 0x08(%r10), %rsi
movq 0x10(%r10), %rdx
movq 0x18(%r10), %rcx
movq 0x20(%r10), %r8
movq 0x28(%r10), %r9
movl 0xb0(%r10), %eax /* Set number of SSE registers. */
testl %eax, %eax
jnz L(load_sse)
L(ret_from_load_sse):
/* Deallocate the reg arg area, except for r10, then load via pop. */
leaq 0xb8(%r10), %rsp
popq %r10
/* Call the user function. */
call *%r11
/* Deallocate stack arg area; local stack frame in redzone. */
leaq 24(%rbp), %rsp
movq 0(%rbp), %rcx /* Reload flags. */
movq 8(%rbp), %rdi /* Reload raddr. */
movq 16(%rbp), %rbp /* Reload old frame pointer. */
L(UW2):
/* cfi_remember_state */
/* cfi_def_cfa(%rsp, 8) */
/* cfi_restore(%rbp) */
/* The first byte of the flags contains the FFI_TYPE. */
cmpb $UNIX64_RET_LAST, %cl
movzbl %cl, %r10d
leaq L(store_table)(%rip), %r11
ja L(sa)
#ifdef __CET__
/* NB: Originally, each slot is 8 byte. 4 bytes of ENDBR64 +
4 bytes NOP padding double slot size to 16 bytes. */
addl %r10d, %r10d
#endif
leaq (%r11, %r10, 8), %r10
/* Prep for the structure cases: scratch area in redzone. */
leaq -20(%rsp), %rsi
jmp *%r10
.balign 8
L(store_table):
E(L(store_table), UNIX64_RET_VOID)
_CET_ENDBR
ret
E(L(store_table), UNIX64_RET_UINT8)
_CET_ENDBR
movzbl %al, %eax
movq %rax, (%rdi)
ret
E(L(store_table), UNIX64_RET_UINT16)
_CET_ENDBR
movzwl %ax, %eax
movq %rax, (%rdi)
ret
E(L(store_table), UNIX64_RET_UINT32)
_CET_ENDBR
movl %eax, %eax
movq %rax, (%rdi)
ret
E(L(store_table), UNIX64_RET_SINT8)
_CET_ENDBR
movsbq %al, %rax
movq %rax, (%rdi)
ret
E(L(store_table), UNIX64_RET_SINT16)
_CET_ENDBR
movswq %ax, %rax
movq %rax, (%rdi)
ret
E(L(store_table), UNIX64_RET_SINT32)
_CET_ENDBR
cltq
movq %rax, (%rdi)
ret
E(L(store_table), UNIX64_RET_INT64)
_CET_ENDBR
movq %rax, (%rdi)
ret
E(L(store_table), UNIX64_RET_XMM32)
_CET_ENDBR
movd %xmm0, (%rdi)
ret
E(L(store_table), UNIX64_RET_XMM64)
_CET_ENDBR
movq %xmm0, (%rdi)
ret
E(L(store_table), UNIX64_RET_X87)
_CET_ENDBR
fstpt (%rdi)
ret
E(L(store_table), UNIX64_RET_X87_2)
_CET_ENDBR
fstpt (%rdi)
fstpt 16(%rdi)
ret
E(L(store_table), UNIX64_RET_ST_XMM0_RAX)
_CET_ENDBR
movq %rax, 8(%rsi)
jmp L(s3)
E(L(store_table), UNIX64_RET_ST_RAX_XMM0)
_CET_ENDBR
movq %xmm0, 8(%rsi)
jmp L(s2)
E(L(store_table), UNIX64_RET_ST_XMM0_XMM1)
_CET_ENDBR
movq %xmm1, 8(%rsi)
jmp L(s3)
E(L(store_table), UNIX64_RET_ST_RAX_RDX)
_CET_ENDBR
movq %rdx, 8(%rsi)
L(s2):
movq %rax, (%rsi)
shrl $UNIX64_SIZE_SHIFT, %ecx
rep movsb
ret
.balign 8
L(s3):
movq %xmm0, (%rsi)
shrl $UNIX64_SIZE_SHIFT, %ecx
rep movsb
ret
L(sa): call PLT(C(abort))
/* Many times we can avoid loading any SSE registers at all.
It's not worth an indirect jump to load the exact set of
SSE registers needed; zero or all is a good compromise. */
.balign 2
L(UW3):
/* cfi_restore_state */
L(load_sse):
movdqa 0x30(%r10), %xmm0
movdqa 0x40(%r10), %xmm1
movdqa 0x50(%r10), %xmm2
movdqa 0x60(%r10), %xmm3
movdqa 0x70(%r10), %xmm4
movdqa 0x80(%r10), %xmm5
movdqa 0x90(%r10), %xmm6
movdqa 0xa0(%r10), %xmm7
jmp L(ret_from_load_sse)
L(UW4):
ENDF(C(ffi_call_unix64))