Welcome to mirror list, hosted at ThFree Co, Russian Federation.

github.com/mono/corert.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMichal Strehovsky <michals@microsoft.com>2018-05-12 18:30:54 +0300
committerMichal Strehovsky <michals@microsoft.com>2018-05-12 18:30:54 +0300
commitcecadf1808d028de68cddc54529feb526ca7854b (patch)
tree990315ca2d926bf36eec780bc723ebc9bbbd0382 /src/Native
parentae10405baffdb976e81a376379e756d0d1656f95 (diff)
Move TypeLoader.Native to open
[tfs-changeset: 1699905]
Diffstat (limited to 'src/Native')
-rw-r--r--src/Native/System.Private.TypeLoader.Native/amd64/ConstrainedCallSupportHelpers.asm85
-rw-r--r--src/Native/System.Private.TypeLoader.Native/amd64/MethodEntrypointStubs.asm67
-rw-r--r--src/Native/System.Private.TypeLoader.Native/amd64/VTableResolver.asm221
-rw-r--r--src/Native/System.Private.TypeLoader.Native/arm/ConstrainedCallSupportHelpers.asm78
-rw-r--r--src/Native/System.Private.TypeLoader.Native/arm/MethodEntrypointStubs.asm5
-rw-r--r--src/Native/System.Private.TypeLoader.Native/arm/VTableResolver.asm163
-rw-r--r--src/Native/System.Private.TypeLoader.Native/arm64/ConstrainedCallSupportHelpers.asm67
-rw-r--r--src/Native/System.Private.TypeLoader.Native/arm64/MethodEntrypointStubs.asm5
-rw-r--r--src/Native/System.Private.TypeLoader.Native/arm64/VTableResolver.asm155
-rw-r--r--src/Native/System.Private.TypeLoader.Native/i386/ConstrainedCallSupportHelpers.asm100
-rw-r--r--src/Native/System.Private.TypeLoader.Native/i386/MethodEntrypointStubs.asm5
-rw-r--r--src/Native/System.Private.TypeLoader.Native/i386/VTableResolver.asm246
12 files changed, 1197 insertions, 0 deletions
diff --git a/src/Native/System.Private.TypeLoader.Native/amd64/ConstrainedCallSupportHelpers.asm b/src/Native/System.Private.TypeLoader.Native/amd64/ConstrainedCallSupportHelpers.asm
new file mode 100644
index 000000000..4d22d914c
--- /dev/null
+++ b/src/Native/System.Private.TypeLoader.Native/amd64/ConstrainedCallSupportHelpers.asm
@@ -0,0 +1,85 @@
+;; Licensed to the .NET Foundation under one or more agreements.
+;; The .NET Foundation licenses this file to you under the MIT license.
+;; See the LICENSE file in the project root for more information.
+
+;; -----------------------------------------------------------------------------------------------------------
+;; #include "asmmacros.inc"
+;; -----------------------------------------------------------------------------------------------------------
+
+LEAF_ENTRY macro Name, Section
+ Section segment para 'CODE'
+ align 16
+ public Name
+ Name proc
+endm
+
+LEAF_END macro Name, Section
+ Name endp
+ Section ends
+endm
+
+; - TAILCALL_RAX: ("jmp rax") should be used for tailcalls, this emits an instruction
+; sequence which is recognized by the unwinder as a valid epilogue terminator
+TAILJMP_RAX TEXTEQU <DB 048h, 0FFh, 0E0h>
+POINTER_SIZE equ 08h
+
+;;
+;; Note: The "__jmpstub__" prefix is used to indicate to debugger
+;; that it must step-through this stub when it encounters it while
+;; stepping.
+;;
+
+;;
+;; __jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub
+;;
+;; r10 - AddressOfAddressOfFunctionToCallAfterDereferencingThis
+;;
+LEAF_ENTRY __jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub, _TEXT
+ mov rax, [r10] ; Tail jumps go through RAX, so copy function pointer there
+ mov rcx, [rcx] ; Deference this to get real function pointer
+ TAILJMP_RAX
+LEAF_END __jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub, _TEXT
+
+;;
+;; void ConstrainedCallSupport_GetStubs(IntPtr *__jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub,
+;; IntPtr *__jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub)
+;;
+LEAF_ENTRY ConstrainedCallSupport_GetStubs, _TEXT
+ lea rax, [__jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub]
+ mov [rcx], rax
+ lea rax, [__jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub]
+ mov [rdx], rax
+ ret
+LEAF_END ConstrainedCallSupport_GetStubs, _TEXT
+
+;;
+;; __jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub
+;;
+;; struct ConstrainedCallDesc
+;; {
+;; ULONG_PTR ExactTarget;
+;; ULONG_PTR LookupFunc; // Put UniversalThunk here
+;; }
+;;
+;; struct CommonCallingStubInputData
+;; {
+;; ULONG_PTR ConstrainedCallDesc;
+;; ULONG_PTR DirectConstrainedCallResolver;
+;; }
+;;
+;; r10 - Points at CommonCallingStubInputData
+;;
+;;
+LEAF_ENTRY __jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub, _TEXT
+ mov r11, [r10] ; put ConstrainedCallDesc into r11 (Arg to LookupFunc/Temp for getting ExactTarget)
+ mov rax, [r11] ; put ExactTarget into rax
+ test rax, rax ; compare against null
+ jnz JumpToTarget ; if not null, we don't need to call helper to get result. Just jump
+ ; If we reach here, we need to use a universal thunk to call the LookupFunc
+ mov rax, [r11 + POINTER_SIZE] ; Get Universal thunk function pointer into rax
+ mov r10, [r10 + POINTER_SIZE] ; Put DirectConstrainedCallResolver into r10 for UniversalTransitionThunk call
+JumpToTarget:
+ TAILJMP_RAX
+LEAF_END __jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub, _TEXT
+
+end
diff --git a/src/Native/System.Private.TypeLoader.Native/amd64/MethodEntrypointStubs.asm b/src/Native/System.Private.TypeLoader.Native/amd64/MethodEntrypointStubs.asm
new file mode 100644
index 000000000..f0c212b12
--- /dev/null
+++ b/src/Native/System.Private.TypeLoader.Native/amd64/MethodEntrypointStubs.asm
@@ -0,0 +1,67 @@
+;; Licensed to the .NET Foundation under one or more agreements.
+;; The .NET Foundation licenses this file to you under the MIT license.
+;; See the LICENSE file in the project root for more information.
+
+LEAF_ENTRY macro Name, Section
+ Section segment para 'CODE'
+ align 16
+ public Name
+ Name proc
+endm
+
+LEAF_END macro Name, Section
+ Name endp
+ Section ends
+endm
+
+.data
+
+g_methodEntrypointThunk qword 0 ; The method which operates on universal transition
+g_universalTransition qword 0 ; The address of Redhawk's UniversalTransition thunk
+
+.code
+
+; - TAILCALL_RAX: ("jmp rax") should be used for tailcalls, this emits an instruction
+; sequence which is recognized by the unwinder as a valid epilogue terminator
+TAILJMP_RAX TEXTEQU <DB 048h, 0FFh, 0E0h>
+
+PointerSize equ 8
+
+;;
+;; __jmpstub__MethodEntrypointStubs_CommonCallingStub(?)
+;; Used when we dynamically need a VTableResolver not pre-generated
+;;
+;; r10 contains a pointer to a VTableResolverStruct
+;; struct MethodEntryPointStubInfo
+;; {
+;; IntPtr targetCodePointer;
+;; IntPtr MethodEntrypointStructPointer;
+;; };
+;;
+LEAF_ENTRY __jmpstub__MethodEntrypointStubs_CommonCallingStub, _TEXT
+ ;; r10 <- stub info
+ mov rax, [r10]
+ cmp rax, 0
+ je SLOW_PATH
+ mov rax, [r10]
+ TAILJMP_RAX
+SLOW_PATH:
+ mov r11, [r10 + 8]
+ mov r10, g_methodEntrypointThunk
+ mov rax, g_universalTransition
+ TAILJMP_RAX
+LEAF_END __jmpstub__MethodEntrypointStubs_CommonCallingStub, _TEXT
+
+;; Returns the size of the pre-generated thunks
+;; IntPtr MethodEntrypointStubs_SetupPointers(
+;; IntPtr universalTransition,
+;; IntPtr methodEntrypointThunk)
+;;
+LEAF_ENTRY MethodEntrypointStubs_SetupPointers, _TEXT
+ mov g_universalTransition, rcx
+ mov g_methodEntrypointThunk, rdx
+ lea rax, [__jmpstub__MethodEntrypointStubs_CommonCallingStub]
+ ret
+LEAF_END MethodEntrypointStubs_SetupPointers, _TEXT
+
+end
diff --git a/src/Native/System.Private.TypeLoader.Native/amd64/VTableResolver.asm b/src/Native/System.Private.TypeLoader.Native/amd64/VTableResolver.asm
new file mode 100644
index 000000000..8f540bdcc
--- /dev/null
+++ b/src/Native/System.Private.TypeLoader.Native/amd64/VTableResolver.asm
@@ -0,0 +1,221 @@
+;; Licensed to the .NET Foundation under one or more agreements.
+;; The .NET Foundation licenses this file to you under the MIT license.
+;; See the LICENSE file in the project root for more information.
+
+LEAF_ENTRY macro Name, Section
+ Section segment para 'CODE'
+ align 16
+ public Name
+ Name proc
+endm
+
+LEAF_END macro Name, Section
+ Name endp
+ Section ends
+endm
+
+.data
+
+g_vtableResolveCallback qword 0 ; The vtableresolve method
+g_universalTransition qword 0 ; The address of Redhawk's UniversalTransition thunk
+
+.code
+
+; - TAILCALL_RAX: ("jmp rax") should be used for tailcalls, this emits an instruction
+; sequence which is recognized by the unwinder as a valid epilogue terminator
+TAILJMP_RAX TEXTEQU <DB 048h, 0FFh, 0E0h>
+
+;;
+;; When an EEType is created its vTable entries will be initially filled with calls to the vTableThunk for the appropriate
+;; slot number. When the thunk is invoked it'll check to see if the slot has already been resolved. If so then just call
+;; the universal thunk, otherwise call to do the resolution which will also update the vtable slot and then call
+;; the universal thunk
+;;
+
+VTableThunkSize equ 30h
+;; TODO - do something similar to Redhawk's asmoffsets to compute the value at compile time
+EETypeVTableOffset equ 18h
+PointerSize equ 8
+
+;;
+;; __jmpstub__VTableResolver_CommonCallingStub(?)
+;; Used when we dynamically need a VTableResolver not pre-generated
+;;
+;; r10 contains a pointer to a VTableResolverStruct
+;; struct VTableResolverStruct
+;; {
+;; int offsetFromStartOfEETypePtr;
+;; IntPtr VTableThunkAddress;
+;; };
+;;
+LEAF_ENTRY __jmpstub__VTableResolver_CommonCallingStub, _TEXT
+ ;; r10 <- stub info
+ ;; rcx is the this pointer to the call being made
+ mov rax, [rcx]
+ ;; rax is the EEType pointer add the VTableOffset + slot_number * pointer size to get the vtable entry
+ mov r11, [r10]
+ ;; r11 now has offset from start of EEType to interesting slot
+
+ ;; compare the that value to the address of the thunk being executed and if the values are equal then
+ ;; call to the resolver otherwise call the method
+
+ mov rax, [rax + r11]
+ cmp rax, [r10 + 8]
+ je SLOW_DYNAMIC_STUB
+ TAILJMP_RAX
+SLOW_DYNAMIC_STUB:
+ mov r10, g_vtableResolveCallback
+ mov rax, g_universalTransition
+ TAILJMP_RAX
+LEAF_END __jmpstub__VTableResolver_CommonCallingStub, _TEXT
+
+;; Returns the size of the pre-generated thunks
+;; int VTableResolver_Init(IntPtr *__jmpstub__VTableResolverSlot0,
+;; IntPtr vtableResolveCallback,
+;; IntPtr universalTransition,
+;; int *slotCount)
+;;
+LEAF_ENTRY VTableResolver_Init, _TEXT
+ lea rax, [__jmpstub__VTableSlot0]
+ mov [rcx], rax
+ mov g_vtableResolveCallback, rdx
+ mov g_universalTransition, r8
+ mov rax, 100 ;; 100 Pregenerated Stubs
+ mov [r9], rax
+ mov rax, VTableThunkSize ;; Thunk size
+ ret
+LEAF_END VTableResolver_Init, _TEXT
+
+;; void* VTableResolver_GetCommonCallingStub()
+;; - Get the address of the common calling stub
+LEAF_ENTRY VTableResolver_GetCommonCallingStub, _TEXT
+ lea rax, [__jmpstub__VTableResolver_CommonCallingStub]
+ ret
+LEAF_END VTableResolver_GetCommonCallingStub, _TEXT
+
+VTableThunkDecl macro name, slot_number
+
+LEAF_ENTRY name, _TEXT
+ALIGN 16 ; The alignment here forces the thunks to be the same size which gives all of the macros the same size and allows us to index
+ ;; rcx is the this pointer to the call being made
+ mov rax, [rcx]
+ mov r11, EETypeVTableOffset + slot_number * PointerSize
+ ;; rax is the EEType pointer add the VTableOffset + slot_number * pointer size to get the vtable entry
+ ;; compare the that value to the address of the thunk being executed and if the values are equal then
+ ;; call to the resolver otherwise call the method
+ mov rax, [rax + r11]
+ lea r10, name
+ cmp rax, r10
+ je SLOW
+ TAILJMP_RAX
+SLOW:
+ ;; r11 is already set to the EEType offset
+ mov r10, g_vtableResolveCallback
+ mov rax, g_universalTransition
+ TAILJMP_RAX
+LEAF_END name, _TEXT
+
+ endm
+
+VTableThunkDecl __jmpstub__VTableSlot0,0
+VTableThunkDecl __jmpstub__VTableSlot1,1
+VTableThunkDecl __jmpstub__VTableSlot2,2
+VTableThunkDecl __jmpstub__VTableSlot3,3
+VTableThunkDecl __jmpstub__VTableSlot4,4
+VTableThunkDecl __jmpstub__VTableSlot5,5
+VTableThunkDecl __jmpstub__VTableSlot6,6
+VTableThunkDecl __jmpstub__VTableSlot7,7
+VTableThunkDecl __jmpstub__VTableSlot8,8
+VTableThunkDecl __jmpstub__VTableSlot9,9
+VTableThunkDecl __jmpstub__VTableSlot10,10
+VTableThunkDecl __jmpstub__VTableSlot11,11
+VTableThunkDecl __jmpstub__VTableSlot12,12
+VTableThunkDecl __jmpstub__VTableSlot13,13
+VTableThunkDecl __jmpstub__VTableSlot14,14
+VTableThunkDecl __jmpstub__VTableSlot15,15
+VTableThunkDecl __jmpstub__VTableSlot16,16
+VTableThunkDecl __jmpstub__VTableSlot17,17
+VTableThunkDecl __jmpstub__VTableSlot18,18
+VTableThunkDecl __jmpstub__VTableSlot19,19
+VTableThunkDecl __jmpstub__VTableSlot20,20
+VTableThunkDecl __jmpstub__VTableSlot21,21
+VTableThunkDecl __jmpstub__VTableSlot22,22
+VTableThunkDecl __jmpstub__VTableSlot23,23
+VTableThunkDecl __jmpstub__VTableSlot24,24
+VTableThunkDecl __jmpstub__VTableSlot25,25
+VTableThunkDecl __jmpstub__VTableSlot26,26
+VTableThunkDecl __jmpstub__VTableSlot27,27
+VTableThunkDecl __jmpstub__VTableSlot28,28
+VTableThunkDecl __jmpstub__VTableSlot29,29
+VTableThunkDecl __jmpstub__VTableSlot30,30
+VTableThunkDecl __jmpstub__VTableSlot31,31
+VTableThunkDecl __jmpstub__VTableSlot32,32
+VTableThunkDecl __jmpstub__VTableSlot33,33
+VTableThunkDecl __jmpstub__VTableSlot34,34
+VTableThunkDecl __jmpstub__VTableSlot35,35
+VTableThunkDecl __jmpstub__VTableSlot36,36
+VTableThunkDecl __jmpstub__VTableSlot37,37
+VTableThunkDecl __jmpstub__VTableSlot38,38
+VTableThunkDecl __jmpstub__VTableSlot39,39
+VTableThunkDecl __jmpstub__VTableSlot40,40
+VTableThunkDecl __jmpstub__VTableSlot41,41
+VTableThunkDecl __jmpstub__VTableSlot42,42
+VTableThunkDecl __jmpstub__VTableSlot43,43
+VTableThunkDecl __jmpstub__VTableSlot44,44
+VTableThunkDecl __jmpstub__VTableSlot45,45
+VTableThunkDecl __jmpstub__VTableSlot46,46
+VTableThunkDecl __jmpstub__VTableSlot47,47
+VTableThunkDecl __jmpstub__VTableSlot48,48
+VTableThunkDecl __jmpstub__VTableSlot49,49
+VTableThunkDecl __jmpstub__VTableSlot50,50
+VTableThunkDecl __jmpstub__VTableSlot51,51
+VTableThunkDecl __jmpstub__VTableSlot52,52
+VTableThunkDecl __jmpstub__VTableSlot53,53
+VTableThunkDecl __jmpstub__VTableSlot54,54
+VTableThunkDecl __jmpstub__VTableSlot55,55
+VTableThunkDecl __jmpstub__VTableSlot56,56
+VTableThunkDecl __jmpstub__VTableSlot57,57
+VTableThunkDecl __jmpstub__VTableSlot58,58
+VTableThunkDecl __jmpstub__VTableSlot59,59
+VTableThunkDecl __jmpstub__VTableSlot60,60
+VTableThunkDecl __jmpstub__VTableSlot61,61
+VTableThunkDecl __jmpstub__VTableSlot62,62
+VTableThunkDecl __jmpstub__VTableSlot63,63
+VTableThunkDecl __jmpstub__VTableSlot64,64
+VTableThunkDecl __jmpstub__VTableSlot65,65
+VTableThunkDecl __jmpstub__VTableSlot66,66
+VTableThunkDecl __jmpstub__VTableSlot67,67
+VTableThunkDecl __jmpstub__VTableSlot68,68
+VTableThunkDecl __jmpstub__VTableSlot69,69
+VTableThunkDecl __jmpstub__VTableSlot70,70
+VTableThunkDecl __jmpstub__VTableSlot71,71
+VTableThunkDecl __jmpstub__VTableSlot72,72
+VTableThunkDecl __jmpstub__VTableSlot73,73
+VTableThunkDecl __jmpstub__VTableSlot74,74
+VTableThunkDecl __jmpstub__VTableSlot75,75
+VTableThunkDecl __jmpstub__VTableSlot76,76
+VTableThunkDecl __jmpstub__VTableSlot77,77
+VTableThunkDecl __jmpstub__VTableSlot78,78
+VTableThunkDecl __jmpstub__VTableSlot79,79
+VTableThunkDecl __jmpstub__VTableSlot80,80
+VTableThunkDecl __jmpstub__VTableSlot81,81
+VTableThunkDecl __jmpstub__VTableSlot82,82
+VTableThunkDecl __jmpstub__VTableSlot83,83
+VTableThunkDecl __jmpstub__VTableSlot84,84
+VTableThunkDecl __jmpstub__VTableSlot85,85
+VTableThunkDecl __jmpstub__VTableSlot86,86
+VTableThunkDecl __jmpstub__VTableSlot87,87
+VTableThunkDecl __jmpstub__VTableSlot88,88
+VTableThunkDecl __jmpstub__VTableSlot89,89
+VTableThunkDecl __jmpstub__VTableSlot90,90
+VTableThunkDecl __jmpstub__VTableSlot91,91
+VTableThunkDecl __jmpstub__VTableSlot92,92
+VTableThunkDecl __jmpstub__VTableSlot93,93
+VTableThunkDecl __jmpstub__VTableSlot94,94
+VTableThunkDecl __jmpstub__VTableSlot95,95
+VTableThunkDecl __jmpstub__VTableSlot96,96
+VTableThunkDecl __jmpstub__VTableSlot97,97
+VTableThunkDecl __jmpstub__VTableSlot98,98
+VTableThunkDecl __jmpstub__VTableSlot99,99
+
+end
diff --git a/src/Native/System.Private.TypeLoader.Native/arm/ConstrainedCallSupportHelpers.asm b/src/Native/System.Private.TypeLoader.Native/arm/ConstrainedCallSupportHelpers.asm
new file mode 100644
index 000000000..1f04e2a43
--- /dev/null
+++ b/src/Native/System.Private.TypeLoader.Native/arm/ConstrainedCallSupportHelpers.asm
@@ -0,0 +1,78 @@
+;; Licensed to the .NET Foundation under one or more agreements.
+;; The .NET Foundation licenses this file to you under the MIT license.
+;; See the LICENSE file in the project root for more information.
+
+#include "kxarm.h"
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; DATA SECTIONS ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ConstrainedCall Support Helpers ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+;;
+;; Note: The "__jmpstub__" prefix is used to indicate to debugger
+;; that it must step-through this stub when it encounters it while
+;; stepping.
+;;
+ ;;
+ ;; sp-4 - AddressOfAddressOfFunctionToCallAfterDereferencingThis
+ ;;
+ LEAF_ENTRY __jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub
+ ldr r12, [sp, #-4]
+ ldr r12, [r12]
+ ldr r0, [r0]
+ bx r12
+ LEAF_END __jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub
+
+;;
+;; void ConstrainedCallSupport_GetStubs(IntPtr *__jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub,
+;; IntPtr *__jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub)
+;;
+ LEAF_ENTRY ConstrainedCallSupport_GetStubs
+ ldr r12, =__jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub
+ str r12, [r0]
+ ldr r12, =__jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub
+ str r12, [r1]
+ bx lr
+ LEAF_END ConstrainedCallSupport_GetStubs
+
+;;
+;; __jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub
+;;
+;; struct ConstrainedCallDesc
+;; {
+;; ULONG_PTR ExactTarget;
+;; ULONG_PTR LookupFunc; // Put UniversalThunk here
+;; }
+;;
+;; struct CommonCallingStubInputData
+;; {
+;; ULONG_PTR ConstrainedCallDesc;
+;; ULONG_PTR DirectConstrainedCallResolver;
+;; }
+;;
+;; sp-4 - Points at CommonCallingStubInputData
+;;
+;;
+ LEAF_ENTRY __jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub
+ ldr r12, [sp, #-4] ; put CommonCallingStubInputData into r12 (Temp for getting ExactTarget)
+ ldr r12, [r12] ; put ConstrainedCallDesc into r12 (Temp for getting ExactTarget)
+ ldr r12, [r12] ; put ExactTarget into r12
+ cmp r12, 0 ; Is ExactTarget null?
+ beq NeedHelperCall ; if null use a helper call
+ bx r12 ; Otherwise tail-call the ExactTarget
+NeedHelperCall
+ ;; Setup arguments for UniversalThunk and call it.
+ ldr r12, [sp, #-4] ; put CommonCallingStubInputData into r12 (Temp for getting ConstrainedCallDesc)
+ ldr r12, [r12] ; put ConstrainedCallDesc into r12
+ str r12, [sp, #-8] ; put ConstrainedCallDesc into sp-8 (red zone location of custom calling convention for universal thunk)
+
+ ldr r12, [sp, #-4] ; put CommonCallingStubInputData into r12 (Temp for getting DirectConstrainedCallResolver)
+ ldr r12, [r12, #4] ; put DirectConstrainedCallResolver into r12
+ str r12, [sp, #-4] ; put DirectConstrainedCallResolver into sp-4 (red zone location of custom calling convention for universal thunk)
+
+ ldr r12, [sp, #-8] ; put ConstrainedCallDesc into r12 (Temp for getting ExactTarget)
+ ldr r12, [r12, #4] ; put LookupFunc into r12 (This should be universal thunk pointer)
+ bx r12 ; Tail-Call Universal thunk
+ LEAF_END __jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub
+
+ END
diff --git a/src/Native/System.Private.TypeLoader.Native/arm/MethodEntrypointStubs.asm b/src/Native/System.Private.TypeLoader.Native/arm/MethodEntrypointStubs.asm
new file mode 100644
index 000000000..0e0cdcc5d
--- /dev/null
+++ b/src/Native/System.Private.TypeLoader.Native/arm/MethodEntrypointStubs.asm
@@ -0,0 +1,5 @@
+;; Licensed to the .NET Foundation under one or more agreements.
+;; The .NET Foundation licenses this file to you under the MIT license.
+;; See the LICENSE file in the project root for more information.
+
+ END
diff --git a/src/Native/System.Private.TypeLoader.Native/arm/VTableResolver.asm b/src/Native/System.Private.TypeLoader.Native/arm/VTableResolver.asm
new file mode 100644
index 000000000..36e244e93
--- /dev/null
+++ b/src/Native/System.Private.TypeLoader.Native/arm/VTableResolver.asm
@@ -0,0 +1,163 @@
+;; Licensed to the .NET Foundation under one or more agreements.
+;; The .NET Foundation licenses this file to you under the MIT license.
+;; See the LICENSE file in the project root for more information.
+
+#include "kxarm.h"
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; DATA SECTIONS ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+ DATAAREA
+
+g_vtableResolveCallback DCD 0 ; The vtableresolve method
+ EXPORT g_vtableResolveCallback
+g_universalTransition DCD 0 ; The address of Redhawk's UniversalTransition thunk
+
+#define VTableThunkSize 0x20
+;; TODO - do something similar to Redhawk's asmoffsets to compute the value at compile time
+#define EETypeVTableOffset 0x14
+#define PointerSize 4
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ConstrainedCall Support Helpers ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+ TEXTAREA
+;;
+;; Note: The "__jmpstub__" prefix is used to indicate to debugger
+;; that it must step-through this stub when it encounters it while
+;; stepping.
+;;
+
+;; Returns the size of the pre-generated thunks
+;; int VTableResolver_Init(IntPtr *__jmpstub__VTableResolverSlot0,
+;; IntPtr vtableResolveCallback,
+;; IntPtr universalTransition,
+;; int *slotCount)
+;;
+ LEAF_ENTRY VTableResolver_Init
+ ldr r12, __jmpstub__VTableSlot000
+ add r12, r12, 1 ; Add thumb bit
+ str r12, [r0]
+ ldr r12, =g_vtableResolveCallback
+ str r1, [r12]
+ ldr r12, =g_universalTransition
+ str r2, [r12]
+ mov r12, 100 ; This file defines 100 slot helpers
+ str r12, [r3]
+ mov r0, VTableThunkSize ; Each thunk is VTableThunkSize in bytes
+ bx lr
+ LEAF_END VTableResolver_Init
+
+;; void* VTableResolver_GetCommonCallingStub()
+;; - Get the address of the common calling stub
+ LEAF_ENTRY VTableResolver_GetCommonCallingStub
+ ldr r0, __jmpstub__VTableResolver_CommonCallingStub
+ bx lr
+ LEAF_END VTableResolver_GetCommonCallingStub
+
+;;
+;; __jmpstub__VTableResolver_CommonCallingStub(?)
+;; Used when we dynamically need a VTableResolver not pre-generated
+;;
+;; sp-4 contains a pointer to a VTableResolverStruct
+;; struct VTableResolverStruct
+;; {
+;; int offsetFromStartOfEETypePtr;
+;; IntPtr VTableThunkAddress;
+;; };
+;;
+ LEAF_ENTRY __jmpstub__VTableResolver_CommonCallingStub
+ ;; Custom calling convention:
+ ;; red zone has pointer to the VTableResolverStruct
+ ;; Copy red zone value into r12 so that the PROLOG_PUSH doesn't destroy it
+ PROLOG_NOP ldr r12, [sp, #-4]
+ PROLOG_PUSH {r3}
+ PROLOG_PUSH {r1-r2}
+ ldr r2, [r0]
+ ;; r2 is the EEType pointer add the VTableOffset + slot_number * pointer size to get the vtable entry
+ ;; compare the that value to the address of the thunk being executed and if the values are equal then
+ ;; call to the resolver otherwise call the method
+ ldr r1, [r12]
+ ;; r1 is the offset from start of EEType to interesting slot
+ ldr r3, [r2,r1]
+ ;; r3 is now the function pointer in the vtable
+ ldr r2,[r12,#4]
+ ;; is now the address of the function pointer that serves as the entry point for this particular instantiation
+ ;; of __jmpstub__VTableResolver_CommonCallingStub
+ cmp r2,r3
+ beq __jmpstub__JumpToVTableResolver
+ mov r12,r3 ; Move the target function pointer to r12
+ EPILOG_POP {r1,r2}
+ EPILOG_POP {r3}
+ EPILOG_BRANCH_REG r12
+ LEAF_END __jmpstub__VTableResolver_CommonCallingStub
+
+;; stub for dispatch will come in with r1 set to EETypeVTableOffset + ($slot_number * PointerSize),
+;; and r1, r2 and r3 of the function we really want to call pushed on the stack
+ LEAF_ENTRY __jmpstub__JumpToVTableResolver
+ mov r3, r1
+ POP {r1,r2}
+ str r3, [sp, #-4] ; Store slot number into red zone at appropriate spot
+ POP {r3}
+ ldr r12, =g_vtableResolveCallback
+ ldr r12, [r12]
+ str r12, [sp, #-4] ; Store vtable resolve callback into red zone
+ ldr r12, =g_universalTransition
+ ldr r12, [r12]
+ bx r12
+ LEAF_END __jmpstub__VTableResolver_Init
+
+
+ MACRO
+ VTableThunkDecl $name, $slot_number
+ ALIGN 16 ; The alignment here forces the thunks to be the same size which gives all of the macros the same size and allows us to index
+ LEAF_ENTRY __jmpstub__$name
+ ;; rcx is the this pointer to the call being made
+ PUSH {r3}
+ PUSH {r1,r2} ; Push r1,r2
+ ldr r2, [r0]
+ ;; r2 is the EEType pointer add the VTableOffset + slot_number * pointer size to get the vtable entry
+ ;; compare the that value to the address of the thunk being executed and if the values are equal then
+ ;; call to the resolver otherwise call the method
+ mov r1, EETypeVTableOffset + ($slot_number * PointerSize)
+ ldr r3, [r2,r1]
+ ; r3 is now the function pointer in the vtable
+ ldr r2,=__jmpstub__$name
+ cmp r2,r3
+ beq JumpVTableResolver$name
+ mov r12,r3 ; Move the target function pointer to r12 before popping r2 and r3. We used r3 instead of r12 here so that
+ ; we could use the 2 byte thumb instructions and the whole thunk could fit in less than 32 bytes
+ POP {r1,r2,r3}
+ bx r12
+JumpVTableResolver$name
+ b __jmpstub__JumpToVTableResolver
+ LEAF_END __jmpstub__$name
+ MEND
+
+ MACRO
+ VTableThunkDeclTen $slotnumberDecimal
+ VTableThunkDecl VTableSlot$slotnumberDecimal0,$slotnumberDecimal0
+ VTableThunkDecl VTableSlot$slotnumberDecimal1,$slotnumberDecimal1
+ VTableThunkDecl VTableSlot$slotnumberDecimal2,$slotnumberDecimal2
+ VTableThunkDecl VTableSlot$slotnumberDecimal3,$slotnumberDecimal3
+ VTableThunkDecl VTableSlot$slotnumberDecimal4,$slotnumberDecimal4
+ VTableThunkDecl VTableSlot$slotnumberDecimal5,$slotnumberDecimal5
+ VTableThunkDecl VTableSlot$slotnumberDecimal6,$slotnumberDecimal6
+ VTableThunkDecl VTableSlot$slotnumberDecimal7,$slotnumberDecimal7
+ VTableThunkDecl VTableSlot$slotnumberDecimal8,$slotnumberDecimal8
+ VTableThunkDecl VTableSlot$slotnumberDecimal9,$slotnumberDecimal9
+ MEND
+
+ MACRO
+ VTableThunkDeclHundred $slotnumberPerHundred
+ VTableThunkDeclTen $slotnumberPerHundred0
+ VTableThunkDeclTen $slotnumberPerHundred1
+ VTableThunkDeclTen $slotnumberPerHundred2
+ VTableThunkDeclTen $slotnumberPerHundred3
+ VTableThunkDeclTen $slotnumberPerHundred4
+ VTableThunkDeclTen $slotnumberPerHundred5
+ VTableThunkDeclTen $slotnumberPerHundred6
+ VTableThunkDeclTen $slotnumberPerHundred7
+ VTableThunkDeclTen $slotnumberPerHundred8
+ VTableThunkDeclTen $slotnumberPerHundred9
+ MEND
+
+ VTableThunkDeclHundred 0
+
+ END
diff --git a/src/Native/System.Private.TypeLoader.Native/arm64/ConstrainedCallSupportHelpers.asm b/src/Native/System.Private.TypeLoader.Native/arm64/ConstrainedCallSupportHelpers.asm
new file mode 100644
index 000000000..f38b392c4
--- /dev/null
+++ b/src/Native/System.Private.TypeLoader.Native/arm64/ConstrainedCallSupportHelpers.asm
@@ -0,0 +1,67 @@
+;; Licensed to the .NET Foundation under one or more agreements.
+;; The .NET Foundation licenses this file to you under the MIT license.
+;; See the LICENSE file in the project root for more information.
+
+#include "ksarm64.h"
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; DATA SECTIONS ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+POINTER_SIZE equ 0x08
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ConstrainedCall Support Helpers ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+;;
+;; Note: The "__jmpstub__" prefix is used to indicate to debugger
+;; that it must step-through this stub when it encounters it while
+;; stepping.
+;;
+;; INPUT: xip0: AddressOfAddressOfFunctionToCallAfterDereferencingThis
+;;
+ LEAF_ENTRY __jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub
+ ldr x12, [xip0] ; Load tail jump target
+ ldr x0, [x0] ; Dereference this to get real function pointer
+ ret x12
+ LEAF_END __jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub
+
+;;
+;; void ConstrainedCallSupport_GetStubs(IntPtr *__jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub,
+;; IntPtr *__jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub)
+;;
+ LEAF_ENTRY ConstrainedCallSupport_GetStubs
+ ldr x12, =__jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub
+ str x12, [x0]
+ ldr x12, =__jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub
+ str x12, [x1]
+ ret
+ LEAF_END ConstrainedCallSupport_GetStubs
+
+;;
+;; __jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub
+;;
+;; struct ConstrainedCallDesc
+;; {
+;; ULONG_PTR ExactTarget;
+;; ULONG_PTR LookupFunc; // Put UniversalThunk here
+;; }
+;;
+;; struct CommonCallingStubInputData
+;; {
+;; ULONG_PTR ConstrainedCallDesc;
+;; ULONG_PTR DirectConstrainedCallResolver;
+;; }
+;;
+;; INPUT: xip0: Points at CommonCallingStubInputData
+;;
+;;
+ LEAF_ENTRY __jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub
+ ldr xip1, [xip0] ; put ConstrainedCallDesc into xip1 (Arg to LookupFunc/Temp for getting ExactTarget)
+ ldr x12, [xip1] ; put ExactTarget into x12
+ cbnz x12, JumpToTarget ; compare against null
+ ; If we reach here, we need to use a universal thunk to call the LookupFunc
+ ldr x12, [xip1, #POINTER_SIZE] ; Get Universal thunk function pointer into x12
+ ldr xip0, [xip0, #POINTER_SIZE] ; Put DirectConstrainedCallResolver into xip0 for UniversalTransitionThunk call
+JumpToTarget
+ ret x12
+ LEAF_END __jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub
+
+ END
diff --git a/src/Native/System.Private.TypeLoader.Native/arm64/MethodEntrypointStubs.asm b/src/Native/System.Private.TypeLoader.Native/arm64/MethodEntrypointStubs.asm
new file mode 100644
index 000000000..0e0cdcc5d
--- /dev/null
+++ b/src/Native/System.Private.TypeLoader.Native/arm64/MethodEntrypointStubs.asm
@@ -0,0 +1,5 @@
+;; Licensed to the .NET Foundation under one or more agreements.
+;; The .NET Foundation licenses this file to you under the MIT license.
+;; See the LICENSE file in the project root for more information.
+
+ END
diff --git a/src/Native/System.Private.TypeLoader.Native/arm64/VTableResolver.asm b/src/Native/System.Private.TypeLoader.Native/arm64/VTableResolver.asm
new file mode 100644
index 000000000..d22395850
--- /dev/null
+++ b/src/Native/System.Private.TypeLoader.Native/arm64/VTableResolver.asm
@@ -0,0 +1,155 @@
+;; Licensed to the .NET Foundation under one or more agreements.
+;; The .NET Foundation licenses this file to you under the MIT license.
+;; See the LICENSE file in the project root for more information.
+
+#include "kxarm64.h"
+
+ DATAAREA
+
+g_vtableResolveCallback DCQ 0 ; Address of virtual dispatch resolution callback
+g_universalTransition DCQ 0 ; Address of RhpUniversalTransition thunk
+
+VTableThunkSize EQU 0x20
+;; TODO - do something similar to Redhawk's asmoffsets to compute the value at compile time
+EETypeVTableOffset EQU 0x18
+PointerSize EQU 8
+
+ TEXTAREA
+
+;;
+;; When an EEType is created, its VTable entries are initially filled with calls to the VTableSlot thunks for the appropriate
+;; slot numbers. When the thunk is invoked, it checks whether the slot has already been resolved. If yes, then it just calls
+;; the universal thunk. Otherwise, it calls the dispatch resolution callback, which will update the VTable slot and then call
+;; the universal thunk.
+;;
+
+;;
+;; Note: The "__jmpstub__" prefix is used to indicate to debugger
+;; that it must step-through this stub when it encounters it while
+;; stepping.
+;;
+
+ ;; int VTableResolver_Init(IntPtr *__jmpstub__VTableResolverSlot0,
+ ;; IntPtr vtableResolveCallback,
+ ;; IntPtr universalTransition,
+ ;; int *slotCount)
+ ;; Returns the size of the pre-generated thunks.
+ ;;
+ LEAF_ENTRY VTableResolver_Init
+ adr x9, __jmpstub__VTableSlot00
+ str x9, [x0]
+ ADDROF x10, g_vtableResolveCallback
+ str x1, [x10]
+ ADDROF x11, g_universalTransition
+ str x2, [x11]
+ mov x12, 100 ; This file defines 100 slot helpers
+ str x12, [x3]
+ mov x0, VTableThunkSize ; Each thunk is VTableThunkSize in bytes
+ ret
+ LEAF_END VTableResolver_Init
+
+ ;; void* VTableResolver_GetCommonCallingStub()
+ ;; Returns the address of the common calling stub.
+ ;;
+ LEAF_ENTRY VTableResolver_GetCommonCallingStub
+ adr x0, __jmpstub__VTableResolver_CommonCallingStub
+ ret
+ LEAF_END VTableResolver_GetCommonCallingStub
+
+ ;; __jmpstub__VTableResolver_CommonCallingStub(?)
+ ;; Used when we dynamically need a VTableResolver not pre-generated.
+ ;;
+ ;; xip0 contains a pointer to a VTableResolverStruct
+ ;; struct VTableResolverStruct
+ ;; {
+ ;; IntPtr offsetFromStartOfEETypePtr;
+ ;; IntPtr VTableThunkAddress;
+ ;; };
+ ;;
+ LEAF_ENTRY __jmpstub__VTableResolver_CommonCallingStub
+ ;; Load the EEType pointer and add (EETypeVTableOffset + $slot_number * PointerSize) to calculate the VTable slot
+ ;; address. Compare the pointer stored in the slot to the address of the thunk being executed. If the values are
+ ;; equal, call the dispatch resolution callback; otherwise, call the function pointer stored in the slot.
+
+ ;; x9 = EEType pointer (x0 is the "this" pointer for the call being made)
+ ldr x9, [x0]
+ ;; xip1 = slot offset relative to EEType
+ ldr xip1, [xip0]
+ ;; x10 = function pointer stored in the slot
+ ldr x10, [x9,xip1]
+ ;; x11 = address of this thunk
+ ldr x11, [xip0,#PointerSize]
+ ;; Compare two pointers
+ cmp x10, x11
+ ;; If the method is not resolved yet, resolve it first
+ beq __jmpstub__JumpToVTableResolver
+ ;; Otherwise, just call it
+ br x10
+ LEAF_END __jmpstub__VTableResolver_CommonCallingStub
+
+ ;; Calls the dispatch resolution callback with xip1 set to EETypeVTableOffset + ($slot_number * PointerSize)
+ LEAF_ENTRY __jmpstub__JumpToVTableResolver
+ ADDROF xip0, g_vtableResolveCallback
+ ldr xip0, [xip0]
+ ADDROF x9, g_universalTransition
+ ldr x9, [x9]
+ br x9
+ LEAF_END __jmpstub__VTableResolver_Init
+
+ MACRO
+ VTableThunkDecl $name, $slot_number
+ ;; Force all thunks to be the same size, which allows us to index
+ ALIGN 16
+ LEAF_ENTRY __jmpstub__$name
+ ;; Load the EEType pointer and add (EETypeVTableOffset + $slot_number * PointerSize) to calculate the VTable slot
+ ;; address. Compare the pointer stored in the slot to the address of the thunk being executed. If the values are
+ ;; equal, call the dispatch resolution callback; otherwise, call the function pointer stored in the slot.
+
+ ;; x9 = EEType pointer (x0 is the "this" pointer for the call being made)
+ ldr x9, [x0]
+ ;; xip1 = slot offset relative to EEType
+ mov xip1, EETypeVTableOffset + ($slot_number * PointerSize)
+ ;; x10 = function pointer stored in the slot
+ ldr x10, [x9,xip1]
+ ;; x11 = address of this thunk
+ adr x11, __jmpstub__$name
+ ;; Compare two pointers
+ cmp x10, x11
+ ;; If the method is not resolved yet, resolve it first
+ beq __jmpstub__JumpToVTableResolver
+ ;; Otherwise, just call it
+ br x10
+ LEAF_END __jmpstub__$name
+ MEND
+
+ MACRO
+ VTableThunkDeclTen $slotnumberDecimal
+ VTableThunkDecl VTableSlot$slotnumberDecimal0,$slotnumberDecimal0
+ VTableThunkDecl VTableSlot$slotnumberDecimal1,$slotnumberDecimal1
+ VTableThunkDecl VTableSlot$slotnumberDecimal2,$slotnumberDecimal2
+ VTableThunkDecl VTableSlot$slotnumberDecimal3,$slotnumberDecimal3
+ VTableThunkDecl VTableSlot$slotnumberDecimal4,$slotnumberDecimal4
+ VTableThunkDecl VTableSlot$slotnumberDecimal5,$slotnumberDecimal5
+ VTableThunkDecl VTableSlot$slotnumberDecimal6,$slotnumberDecimal6
+ VTableThunkDecl VTableSlot$slotnumberDecimal7,$slotnumberDecimal7
+ VTableThunkDecl VTableSlot$slotnumberDecimal8,$slotnumberDecimal8
+ VTableThunkDecl VTableSlot$slotnumberDecimal9,$slotnumberDecimal9
+ MEND
+
+ MACRO
+ VTableThunkDeclHundred $slotnumberPerHundred
+ VTableThunkDeclTen $slotnumberPerHundred0
+ VTableThunkDeclTen $slotnumberPerHundred1
+ VTableThunkDeclTen $slotnumberPerHundred2
+ VTableThunkDeclTen $slotnumberPerHundred3
+ VTableThunkDeclTen $slotnumberPerHundred4
+ VTableThunkDeclTen $slotnumberPerHundred5
+ VTableThunkDeclTen $slotnumberPerHundred6
+ VTableThunkDeclTen $slotnumberPerHundred7
+ VTableThunkDeclTen $slotnumberPerHundred8
+ VTableThunkDeclTen $slotnumberPerHundred9
+ MEND
+
+ VTableThunkDeclHundred ""
+
+ END
diff --git a/src/Native/System.Private.TypeLoader.Native/i386/ConstrainedCallSupportHelpers.asm b/src/Native/System.Private.TypeLoader.Native/i386/ConstrainedCallSupportHelpers.asm
new file mode 100644
index 000000000..8b3298180
--- /dev/null
+++ b/src/Native/System.Private.TypeLoader.Native/i386/ConstrainedCallSupportHelpers.asm
@@ -0,0 +1,100 @@
+;; Licensed to the .NET Foundation under one or more agreements.
+;; The .NET Foundation licenses this file to you under the MIT license.
+;; See the LICENSE file in the project root for more information.
+
+.586
+.model flat
+option casemap:none
+.code
+
+;; -----------------------------------------------------------------------------------------------------------
+;; standard macros
+;; -----------------------------------------------------------------------------------------------------------
+LEAF_ENTRY macro Name, Section
+ Section segment para 'CODE'
+ public Name
+ Name proc
+endm
+
+LEAF_END macro Name, Section
+ Name endp
+ Section ends
+endm
+
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; DATA SECTIONS ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ConstrainedCall Support Helpers ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+
+
+;;
+;; Note: The "__jmpstub__" prefix is used to indicate to debugger
+;; that it must step-through this stub when it encounters it while
+;; stepping.
+;;
+
+;;
+;; __jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub
+;;
+;; eax - AddressOfAddressOfFunctionToCallAfterDereferencingThis
+;;
+LEAF_ENTRY __jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub, _TEXT
+ mov eax, [eax] ; Get function pointer to call
+ mov ecx, [ecx] ; Deference this to get real this pointer
+ jmp eax
+LEAF_END __jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub, _TEXT
+
+;;
+;; void ConstrainedCallSupport_GetStubs(IntPtr *__jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub,
+;; IntPtr *__jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub)
+;;
+LEAF_ENTRY ConstrainedCallSupport_GetStubs, _TEXT
+ lea eax, [__jmpstub__ConstrainedCallSupport_DerefThisAndCall_CommonCallingStub]
+ mov ecx, [esp+04h]
+ mov [ecx], eax
+ lea eax, [__jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub]
+ mov ecx, [esp+08h]
+ mov [ecx], eax
+ retn 8h
+LEAF_END ConstrainedCallSupport_GetStubs, _TEXT
+
+;;
+;; __jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub
+;;
+;; struct ConstrainedCallDesc
+;; {
+;; ULONG_PTR ExactTarget;
+;; ULONG_PTR LookupFunc; // Put UniversalThunk here
+;; }
+;;
+;; struct CommonCallingStubInputData
+;; {
+;; ULONG_PTR ConstrainedCallDesc;
+;; ULONG_PTR DirectConstrainedCallResolver;
+;; }
+;;
+;; eax - Points at CommonCallingStubInputData
+;;
+;;
+LEAF_ENTRY __jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub, _TEXT
+;; eax points at CommonCallingStubInputData
+ push eax ; save eax
+ mov eax,[eax] ; put ConstrainedCallDesc in eax
+ mov eax,[eax] ; Load ExactTarget into eax
+ test eax,eax ; Check ExactTarget for null
+ jz NeedsHelperCall
+ add esp,4 ; Adjust eax back to what it was before the first instruction push
+ jmp eax ; TailCall exact target
+NeedsHelperCall:
+ pop eax ; Restore back to exact state that was present at start of function
+;; eax points at CommonCallingStubInputData
+ push ebp
+ mov ebp, esp
+ push [eax] ; First argument (ConstrainedCallDesc)
+ push [eax+4] ; Second argument (DirectConstrainedCallResolver)
+ mov eax,[eax] ; Load ConstrainedCallDesc into eax
+ mov eax,[eax+4] ; Load Universal Thunk address into eax
+ jmp eax
+LEAF_END __jmpstub__ConstrainedCallSupport_DirectConstrainedCallCommonStub, _TEXT
+
+end
diff --git a/src/Native/System.Private.TypeLoader.Native/i386/MethodEntrypointStubs.asm b/src/Native/System.Private.TypeLoader.Native/i386/MethodEntrypointStubs.asm
new file mode 100644
index 000000000..026e0aa6d
--- /dev/null
+++ b/src/Native/System.Private.TypeLoader.Native/i386/MethodEntrypointStubs.asm
@@ -0,0 +1,5 @@
+;; Licensed to the .NET Foundation under one or more agreements.
+;; The .NET Foundation licenses this file to you under the MIT license.
+;; See the LICENSE file in the project root for more information.
+
+end \ No newline at end of file
diff --git a/src/Native/System.Private.TypeLoader.Native/i386/VTableResolver.asm b/src/Native/System.Private.TypeLoader.Native/i386/VTableResolver.asm
new file mode 100644
index 000000000..d81948578
--- /dev/null
+++ b/src/Native/System.Private.TypeLoader.Native/i386/VTableResolver.asm
@@ -0,0 +1,246 @@
+;; Licensed to the .NET Foundation under one or more agreements.
+;; The .NET Foundation licenses this file to you under the MIT license.
+;; See the LICENSE file in the project root for more information.
+
+.586
+.model flat
+option casemap:none
+
+;; -----------------------------------------------------------------------------------------------------------
+;; standard macros
+;; -----------------------------------------------------------------------------------------------------------
+LEAF_ENTRY macro Name, Section
+ Section segment para 'CODE'
+ public Name
+ Name proc
+endm
+
+LEAF_END macro Name, Section
+ Name endp
+ Section ends
+endm
+
+;;
+;; When an EEType is created its vTable entries will be initially filled with calls to the vTableThunk for the appropriate
+;; slot number. When the thunk is invoked it'll check to see if the slot has already been resolved. If so then just call
+;; the universal thunk, otherwise call to do the resolution which will also update the vtable slot and then call
+;; the universal thunk
+;;
+
+VTableThunkSize equ 20h
+;; TODO - do something similar to Redhawk's asmoffsets to compute the value at compile time
+EETypeVTableOffset equ 14h
+PointerSize equ 4
+
+.data
+
+g_vtableResolveCallback dword 0 ; The vtableresolve method
+g_universalTransition dword 0 ; The address of Redhawk's UniversalTransition thunk
+
+.code
+
+;;
+;; __jmpstub__VTableResolver_CommonCallingStub(?)
+;; Used when we dynamically need a VTableResolver not pre-generated
+;;
+;; eax contains a pointer to a VTableResolverStruct
+;; struct VTableResolverStruct
+;; {
+;; int offsetFromStartOfEETypePtr;
+;; IntPtr VTableThunkAddress;
+;; };
+;;
+LEAF_ENTRY __jmpstub__VTableResolver_CommonCallingStub, _TEXT
+ ;; eax <- stub info
+ push esi
+ push edi
+ mov esi, [eax] ; Get EEType offset into esi
+
+ mov edi, [eax + 4]
+ ;; edi now has specific address of the exact thunk being processed here
+
+ ;; ecx is the this pointer to the call being made
+ mov eax, [ecx]
+ ;; eax is the EEType pointer add the VTableOffset + slot_number * pointer size to get the vtable entry
+
+ ;; compare the that value to the address of the thunk being executed and if the values are equal then
+ ;; call to the resolver otherwise call the method
+
+ mov eax, [eax + esi]
+ cmp eax, edi
+ je SLOW_DYNAMIC_STUB
+ pop edi
+ pop esi
+ jmp eax
+SLOW_DYNAMIC_STUB:
+ ;; Capture EEType offset into eax
+ mov eax,esi
+
+ ;; Restore edi and esi so that we can set up the call into the universal transition thunk
+ pop edi
+ pop esi
+
+ jmp __jmpstub__JumpToVTableResolver
+LEAF_END __jmpstub__VTableResolver_CommonCallingStub, _TEXT
+
+;; Returns the size of the pre-generated thunks
+;; int VTableResolver_Init(IntPtr *__jmpstub__VTableResolverSlot0,
+;; IntPtr vtableResolveCallback,
+;; IntPtr universalTransition,
+;; int *slotCount)
+;;
+LEAF_ENTRY VTableResolver_Init, _TEXT
+ lea eax, dword ptr [__jmpstub__VTableSlot0]
+ mov ecx, [esp+04h]
+ mov [ecx], eax
+ mov ecx, [esp+08h]
+ mov g_vtableResolveCallback, ecx
+ mov ecx, [esp+0Ch]
+ mov g_universalTransition, ecx
+ mov ecx, [esp+10h]
+ mov eax, 100 ;; 100 Pregenerated thunks
+ mov [ecx], eax
+ mov eax, VTableThunkSize
+ ret
+LEAF_END VTableResolver_Init, _TEXT
+
+;; void* VTableResolver_GetCommonCallingStub()
+;; - Get the address of the common calling stub
+LEAF_ENTRY VTableResolver_GetCommonCallingStub, _TEXT
+ lea eax, [__jmpstub__VTableResolver_CommonCallingStub]
+ ret
+LEAF_END VTableResolver_GetCommonCallingStub, _TEXT
+
+
+;; stub for dispatch will come in with eax set to EETypeVTableOffset + ($slot_number * PointerSize),
+LEAF_ENTRY __jmpstub__JumpToVTableResolver, _TEXT
+ push ebp
+ mov ebp, esp
+ push eax ; First argument
+ push g_vtableResolveCallback
+ jmp g_universalTransition
+LEAF_END __jmpstub__JumpToVTableResolver, _TEXT
+
+
+VTableThunkDecl macro name, slot_number
+
+ALIGN 16 ; The alignment here forces the thunks to be the same size which gives all of the macros the same size and allows us to index
+LEAF_ENTRY name, _TEXT
+ ;; rcx is the this pointer to the call being made
+ mov eax, [ecx]
+ ;; eax is the EEType pointer add the VTableOffset + slot_number * pointer size to get the vtable entry
+ ;; compare the that value to the address of the thunk being executed and if the values are equal then
+ ;; call to the resolver otherwise call the method
+ mov eax, [eax + EETypeVTableOffset + slot_number * PointerSize]
+ cmp eax, name
+ je SLOW
+ jmp eax
+SLOW:
+ mov eax, EETypeVTableOffset + slot_number * PointerSize
+ jmp __jmpstub__JumpToVTableResolver
+LEAF_END name, _TEXT
+
+ endm
+
+VTableThunkDecl __jmpstub__VTableSlot0,0
+VTableThunkDecl __jmpstub__VTableSlot1,1
+VTableThunkDecl __jmpstub__VTableSlot2,2
+VTableThunkDecl __jmpstub__VTableSlot3,3
+VTableThunkDecl __jmpstub__VTableSlot4,4
+VTableThunkDecl __jmpstub__VTableSlot5,5
+VTableThunkDecl __jmpstub__VTableSlot6,6
+VTableThunkDecl __jmpstub__VTableSlot7,7
+VTableThunkDecl __jmpstub__VTableSlot8,8
+VTableThunkDecl __jmpstub__VTableSlot9,9
+VTableThunkDecl __jmpstub__VTableSlot10,10
+VTableThunkDecl __jmpstub__VTableSlot11,11
+VTableThunkDecl __jmpstub__VTableSlot12,12
+VTableThunkDecl __jmpstub__VTableSlot13,13
+VTableThunkDecl __jmpstub__VTableSlot14,14
+VTableThunkDecl __jmpstub__VTableSlot15,15
+VTableThunkDecl __jmpstub__VTableSlot16,16
+VTableThunkDecl __jmpstub__VTableSlot17,17
+VTableThunkDecl __jmpstub__VTableSlot18,18
+VTableThunkDecl __jmpstub__VTableSlot19,19
+VTableThunkDecl __jmpstub__VTableSlot20,20
+VTableThunkDecl __jmpstub__VTableSlot21,21
+VTableThunkDecl __jmpstub__VTableSlot22,22
+VTableThunkDecl __jmpstub__VTableSlot23,23
+VTableThunkDecl __jmpstub__VTableSlot24,24
+VTableThunkDecl __jmpstub__VTableSlot25,25
+VTableThunkDecl __jmpstub__VTableSlot26,26
+VTableThunkDecl __jmpstub__VTableSlot27,27
+VTableThunkDecl __jmpstub__VTableSlot28,28
+VTableThunkDecl __jmpstub__VTableSlot29,29
+VTableThunkDecl __jmpstub__VTableSlot30,30
+VTableThunkDecl __jmpstub__VTableSlot31,31
+VTableThunkDecl __jmpstub__VTableSlot32,32
+VTableThunkDecl __jmpstub__VTableSlot33,33
+VTableThunkDecl __jmpstub__VTableSlot34,34
+VTableThunkDecl __jmpstub__VTableSlot35,35
+VTableThunkDecl __jmpstub__VTableSlot36,36
+VTableThunkDecl __jmpstub__VTableSlot37,37
+VTableThunkDecl __jmpstub__VTableSlot38,38
+VTableThunkDecl __jmpstub__VTableSlot39,39
+VTableThunkDecl __jmpstub__VTableSlot40,40
+VTableThunkDecl __jmpstub__VTableSlot41,41
+VTableThunkDecl __jmpstub__VTableSlot42,42
+VTableThunkDecl __jmpstub__VTableSlot43,43
+VTableThunkDecl __jmpstub__VTableSlot44,44
+VTableThunkDecl __jmpstub__VTableSlot45,45
+VTableThunkDecl __jmpstub__VTableSlot46,46
+VTableThunkDecl __jmpstub__VTableSlot47,47
+VTableThunkDecl __jmpstub__VTableSlot48,48
+VTableThunkDecl __jmpstub__VTableSlot49,49
+VTableThunkDecl __jmpstub__VTableSlot50,50
+VTableThunkDecl __jmpstub__VTableSlot51,51
+VTableThunkDecl __jmpstub__VTableSlot52,52
+VTableThunkDecl __jmpstub__VTableSlot53,53
+VTableThunkDecl __jmpstub__VTableSlot54,54
+VTableThunkDecl __jmpstub__VTableSlot55,55
+VTableThunkDecl __jmpstub__VTableSlot56,56
+VTableThunkDecl __jmpstub__VTableSlot57,57
+VTableThunkDecl __jmpstub__VTableSlot58,58
+VTableThunkDecl __jmpstub__VTableSlot59,59
+VTableThunkDecl __jmpstub__VTableSlot60,60
+VTableThunkDecl __jmpstub__VTableSlot61,61
+VTableThunkDecl __jmpstub__VTableSlot62,62
+VTableThunkDecl __jmpstub__VTableSlot63,63
+VTableThunkDecl __jmpstub__VTableSlot64,64
+VTableThunkDecl __jmpstub__VTableSlot65,65
+VTableThunkDecl __jmpstub__VTableSlot66,66
+VTableThunkDecl __jmpstub__VTableSlot67,67
+VTableThunkDecl __jmpstub__VTableSlot68,68
+VTableThunkDecl __jmpstub__VTableSlot69,69
+VTableThunkDecl __jmpstub__VTableSlot70,70
+VTableThunkDecl __jmpstub__VTableSlot71,71
+VTableThunkDecl __jmpstub__VTableSlot72,72
+VTableThunkDecl __jmpstub__VTableSlot73,73
+VTableThunkDecl __jmpstub__VTableSlot74,74
+VTableThunkDecl __jmpstub__VTableSlot75,75
+VTableThunkDecl __jmpstub__VTableSlot76,76
+VTableThunkDecl __jmpstub__VTableSlot77,77
+VTableThunkDecl __jmpstub__VTableSlot78,78
+VTableThunkDecl __jmpstub__VTableSlot79,79
+VTableThunkDecl __jmpstub__VTableSlot80,80
+VTableThunkDecl __jmpstub__VTableSlot81,81
+VTableThunkDecl __jmpstub__VTableSlot82,82
+VTableThunkDecl __jmpstub__VTableSlot83,83
+VTableThunkDecl __jmpstub__VTableSlot84,84
+VTableThunkDecl __jmpstub__VTableSlot85,85
+VTableThunkDecl __jmpstub__VTableSlot86,86
+VTableThunkDecl __jmpstub__VTableSlot87,87
+VTableThunkDecl __jmpstub__VTableSlot88,88
+VTableThunkDecl __jmpstub__VTableSlot89,89
+VTableThunkDecl __jmpstub__VTableSlot90,90
+VTableThunkDecl __jmpstub__VTableSlot91,91
+VTableThunkDecl __jmpstub__VTableSlot92,92
+VTableThunkDecl __jmpstub__VTableSlot93,93
+VTableThunkDecl __jmpstub__VTableSlot94,94
+VTableThunkDecl __jmpstub__VTableSlot95,95
+VTableThunkDecl __jmpstub__VTableSlot96,96
+VTableThunkDecl __jmpstub__VTableSlot97,97
+VTableThunkDecl __jmpstub__VTableSlot98,98
+VTableThunkDecl __jmpstub__VTableSlot99,99
+
+end