1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209
|
%def bincmp(condition=""):
/*
* Generic two-operand compare-and-branch operation. Provide a "condition"
* fragment that specifies the comparison to perform.
*
* For: if-eq, if-ne, if-lt, if-ge, if-gt, if-le
*/
/* if-cmp vA, vB, +CCCC */
mov r1, rINST, lsr #12 @ r1<- B
ubfx r0, rINST, #8, #4 @ r0<- A
GET_VREG r3, r1 @ r3<- vB
GET_VREG r0, r0 @ r0<- vA
FETCH_S rINST, 1 @ rINST<- branch offset, in code units
cmp r0, r3 @ compare (vA, vB)
b${condition} MterpCommonTakenBranchNoFlags
cmp rPROFILE, #JIT_CHECK_OSR @ possible OSR re-entry?
beq .L_check_not_taken_osr
FETCH_ADVANCE_INST 2
GET_INST_OPCODE ip @ extract opcode from rINST
GOTO_OPCODE ip @ jump to next instruction
%def zcmp(condition=""):
/*
* Generic one-operand compare-and-branch operation. Provide a "condition"
* fragment that specifies the comparison to perform.
*
* for: if-eqz, if-nez, if-ltz, if-gez, if-gtz, if-lez
*/
/* if-cmp vAA, +BBBB */
mov r0, rINST, lsr #8 @ r0<- AA
GET_VREG r0, r0 @ r0<- vAA
FETCH_S rINST, 1 @ rINST<- branch offset, in code units
cmp r0, #0 @ compare (vA, 0)
b${condition} MterpCommonTakenBranchNoFlags
cmp rPROFILE, #JIT_CHECK_OSR @ possible OSR re-entry?
beq .L_check_not_taken_osr
FETCH_ADVANCE_INST 2
GET_INST_OPCODE ip @ extract opcode from rINST
GOTO_OPCODE ip @ jump to next instruction
%def op_goto():
/*
* Unconditional branch, 8-bit offset.
*
* The branch distance is a signed code-unit offset, which we need to
* double to get a byte offset.
*/
/* goto +AA */
sbfx rINST, rINST, #8, #8 @ rINST<- ssssssAA (sign-extended)
b MterpCommonTakenBranchNoFlags
%def op_goto_16():
/*
* Unconditional branch, 16-bit offset.
*
* The branch distance is a signed code-unit offset, which we need to
* double to get a byte offset.
*/
/* goto/16 +AAAA */
FETCH_S rINST, 1 @ rINST<- ssssAAAA (sign-extended)
b MterpCommonTakenBranchNoFlags
%def op_goto_32():
/*
* Unconditional branch, 32-bit offset.
*
* The branch distance is a signed code-unit offset, which we need to
* double to get a byte offset.
*
* Unlike most opcodes, this one is allowed to branch to itself, so
* our "backward branch" test must be "<=0" instead of "<0". Because
* we need the V bit set, we'll use an adds to convert from Dalvik
* offset to byte offset.
*/
/* goto/32 +AAAAAAAA */
FETCH r0, 1 @ r0<- aaaa (lo)
FETCH r3, 2 @ r1<- AAAA (hi)
orrs rINST, r0, r3, lsl #16 @ rINST<- AAAAaaaa
b MterpCommonTakenBranch
%def op_if_eq():
% bincmp(condition="eq")
%def op_if_eqz():
% zcmp(condition="eq")
%def op_if_ge():
% bincmp(condition="ge")
%def op_if_gez():
% zcmp(condition="ge")
%def op_if_gt():
% bincmp(condition="gt")
%def op_if_gtz():
% zcmp(condition="gt")
%def op_if_le():
% bincmp(condition="le")
%def op_if_lez():
% zcmp(condition="le")
%def op_if_lt():
% bincmp(condition="lt")
%def op_if_ltz():
% zcmp(condition="lt")
%def op_if_ne():
% bincmp(condition="ne")
%def op_if_nez():
% zcmp(condition="ne")
%def op_packed_switch(func="MterpDoPackedSwitch"):
/*
* Handle a packed-switch or sparse-switch instruction. In both cases
* we decode it and hand it off to a helper function.
*
* We don't really expect backward branches in a switch statement, but
* they're perfectly legal, so we check for them here.
*
* for: packed-switch, sparse-switch
*/
/* op vAA, +BBBB */
FETCH r0, 1 @ r0<- bbbb (lo)
FETCH r1, 2 @ r1<- BBBB (hi)
mov r3, rINST, lsr #8 @ r3<- AA
orr r0, r0, r1, lsl #16 @ r0<- BBBBbbbb
GET_VREG r1, r3 @ r1<- vAA
add r0, rPC, r0, lsl #1 @ r0<- PC + BBBBbbbb*2
bl $func @ r0<- code-unit branch offset
movs rINST, r0
b MterpCommonTakenBranch
%def op_return():
/*
* Return a 32-bit value.
*
* for: return, return-object
*/
/* op vAA */
.extern MterpThreadFenceForConstructor
bl MterpThreadFenceForConstructor
ldr lr, [rSELF, #THREAD_FLAGS_OFFSET]
mov r0, rSELF
ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
blne MterpSuspendCheck @ (self)
mov r2, rINST, lsr #8 @ r2<- AA
GET_VREG r0, r2 @ r0<- vAA
mov r1, #0
b MterpReturn
%def op_return_object():
% op_return()
%def op_return_void():
.extern MterpThreadFenceForConstructor
bl MterpThreadFenceForConstructor
ldr lr, [rSELF, #THREAD_FLAGS_OFFSET]
mov r0, rSELF
ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
blne MterpSuspendCheck @ (self)
mov r0, #0
mov r1, #0
b MterpReturn
%def op_return_void_no_barrier():
ldr lr, [rSELF, #THREAD_FLAGS_OFFSET]
mov r0, rSELF
ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
blne MterpSuspendCheck @ (self)
mov r0, #0
mov r1, #0
b MterpReturn
%def op_return_wide():
/*
* Return a 64-bit value.
*/
/* return-wide vAA */
.extern MterpThreadFenceForConstructor
bl MterpThreadFenceForConstructor
ldr lr, [rSELF, #THREAD_FLAGS_OFFSET]
mov r0, rSELF
ands lr, #THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
blne MterpSuspendCheck @ (self)
mov r2, rINST, lsr #8 @ r2<- AA
VREG_INDEX_TO_ADDR r2, r2 @ r2<- &fp[AA]
GET_VREG_WIDE_BY_ADDR r0, r1, r2 @ r0/r1 <- vAA/vAA+1
b MterpReturn
%def op_sparse_switch():
% op_packed_switch(func="MterpDoSparseSwitch")
%def op_throw():
/*
* Throw an exception object in the current thread.
*/
/* throw vAA */
EXPORT_PC
mov r2, rINST, lsr #8 @ r2<- AA
GET_VREG r1, r2 @ r1<- vAA (exception object)
cmp r1, #0 @ null object?
beq common_errNullObject @ yes, throw an NPE instead
str r1, [rSELF, #THREAD_EXCEPTION_OFFSET] @ thread->exception<- obj
b MterpException
|