1%def op_check_cast():
2   // Fast-path which gets the class from thread-local cache.
3   EXPORT_PC
4   FETCH_FROM_THREAD_CACHE r1, 3f
5   cmp     rMR, #0
6   bne     4f
71:
8   lsr     r2, rINST, #8               // r2<- A
9   GET_VREG r0, r2                     // r0<- vA (object)
10   cmp     r0, #0
11   beq     2f
12   bl      art_quick_check_instance_of
132:
14   FETCH_ADVANCE_INST 2
15   GET_INST_OPCODE ip
16   GOTO_OPCODE ip
173:
18   mov     r0, rSELF
19   ldr     r1, [sp]
20   mov     r2, rPC
21   bl      nterp_get_class_or_allocate_object
22   mov     r1, r0
23   b       1b
244:
25   bl      art_quick_read_barrier_mark_reg01
26   b       1b
27
28%def op_instance_of():
29   /* instance-of vA, vB, class@CCCC */
30   // Fast-path which gets the class from thread-local cache.
31   EXPORT_PC
32   FETCH_FROM_THREAD_CACHE r1, 3f
33   cmp     rMR, #0
34   bne     4f
351:
36   lsr     r2, rINST, #12              // r2<- B
37   GET_VREG r0, r2                     // r0<- vB (object)
38   cmp     r0, #0
39   beq     2f
40   bl      artInstanceOfFromCode
412:
42   ubfx    r1, rINST, #8, #4           // r1<- A
43   SET_VREG r0, r1
44   FETCH_ADVANCE_INST 2
45   GET_INST_OPCODE ip
46   GOTO_OPCODE ip
473:
48   mov     r0, rSELF
49   ldr     r1, [sp]
50   mov     r2, rPC
51   bl      nterp_get_class_or_allocate_object
52   mov     r1, r0
53   b       1b
544:
55   bl      art_quick_read_barrier_mark_reg01
56   b       1b
57
58%def op_iget_boolean():
59%  op_iget(load="ldrb", wide="0", is_object="0")
60
61%def op_iget_byte():
62%  op_iget(load="ldrsb", wide="0", is_object="0")
63
64%def op_iget_char():
65%  op_iget(load="ldrh", wide="0", is_object="0")
66
67%def op_iget_short():
68%  op_iget(load="ldrsh", wide="0", is_object="0")
69
70%def op_iget(load="ldr", wide="0", is_object="0"):
71%  slow_path = add_helper(lambda: op_iget_slow_path(load, wide, is_object))
72   // Fast-path which gets the field from thread-local cache.
73   FETCH_FROM_THREAD_CACHE r0, ${slow_path}
74.L${opcode}_resume:
75   lsr     r2, rINST, #12              // r2<- B
76   GET_VREG r3, r2                     // r3<- object we're operating on
77   ubfx    r2, rINST, #8, #4           // r2<- A
78   cmp     r3, #0
79   beq     common_errNullObject        // object was null
80   .if $wide
81   add     r3, r3, r0
82   ldrd    r0, r1, [r3]
83   CLEAR_SHADOW_PAIR r2, ip, lr
84   VREG_INDEX_TO_ADDR r2, r2
85   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
86   .elseif $is_object
87   $load   r0, [r3, r0]
88   cmp     rMR, #0
89   bne     .L${opcode}_read_barrier
90.L${opcode}_resume_after_read_barrier:
91   SET_VREG_OBJECT r0, r2              // fp[A] <- value
92   .else
93   $load   r0, [r3, r0]
94   SET_VREG r0, r2                     // fp[A] <- value
95   .endif
96   FETCH_ADVANCE_INST 2
97   GET_INST_OPCODE ip
98   GOTO_OPCODE ip
99   .if $is_object
100.L${opcode}_read_barrier:
101   bl      art_quick_read_barrier_mark_reg00
102   b       .L${opcode}_resume_after_read_barrier
103   .endif
104
105%def op_iget_slow_path(load, wide, is_object):
106   mov     r0, rSELF
107   ldr     r1, [sp]
108   mov     r2, rPC
109   mov     r3, #0
110   EXPORT_PC
111   bl      nterp_get_instance_field_offset
112   cmp     r0, #0
113   bge     .L${opcode}_resume
114   CLEAR_INSTANCE_VOLATILE_MARKER r0
115   lsr     r2, rINST, #12              // r2<- B
116   GET_VREG r3, r2                     // r3<- object we're operating on
117   ubfx    r2, rINST, #8, #4           // r2<- A
118   cmp     r3, #0
119   beq     common_errNullObject            // object was null
120   .if $wide
121   add     ip, r3, r0
122   ATOMIC_LOAD64 ip, r0, r1, r3, .L${opcode}_slow_path_atomic_load
123   dmb     ish
124   CLEAR_SHADOW_PAIR r2, ip, lr
125   VREG_INDEX_TO_ADDR r2, r2
126   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
127   .else
128   $load   r0, [r3, r0]
129   dmb     ish
130   .if $is_object
131   cmp     rMR, #0
132   bne     .L${opcode}_read_barrier
133   SET_VREG_OBJECT r0, r2              // fp[A] <- value
134   .else
135   SET_VREG r0, r2                     // fp[A] <- value
136   .endif
137   .endif
138   FETCH_ADVANCE_INST 2
139   GET_INST_OPCODE ip
140   GOTO_OPCODE ip
141
142%def op_iget_wide():
143%  op_iget(load="ldr", wide="1", is_object="0")
144
145%def op_iget_object():
146%  op_iget(load="ldr", wide="0", is_object="1")
147
148%def op_iput_boolean():
149%  op_iput(store="strb", wide="0", is_object="0")
150
151%def op_iput_byte():
152%  op_iput(store="strb", wide="0", is_object="0")
153
154%def op_iput_char():
155%  op_iput(store="strh", wide="0", is_object="0")
156
157%def op_iput_short():
158%  op_iput(store="strh", wide="0", is_object="0")
159
160%def op_iput(store="str", wide="0", is_object="0"):
161   // Share slow paths for boolean and byte (strb) and slow paths for char and short (strh).
162   // It does not matter to which `.L${opcode}_resume` the slow path returns.
163%  slow_path = "nterp_op_iput_helper_" + store + wide + is_object
164%  add_helper(lambda: op_iput_slow_path(store, wide, is_object), slow_path)
165   .if !$wide
166   ubfx    r4, rINST, #8, #4           // r4<- A
167   GET_VREG r4, r4                     // r4 <- v[A]
168   .endif
169   // Fast-path which gets the field from thread-local cache.
170   FETCH_FROM_THREAD_CACHE r0, ${slow_path}
171.L${opcode}_resume:
172   lsr     r1, rINST, #12              // r1<- B
173   GET_VREG r1, r1                     // vB (object we're operating on)
174   cmp     r1, #0
175   beq     common_errNullObject
176   .if $wide
177   ubfx    r4, rINST, #8, #4           // r4<- A
178   VREG_INDEX_TO_ADDR r4, r4
179   GET_VREG_WIDE_BY_ADDR r2, r3, r4      // fp[A] <- value
180   add     r1, r1, r0
181   strd    r2, r3, [r1]
182   .else
183   $store  r4, [r1, r0]
184   WRITE_BARRIER_IF_OBJECT $is_object, r4, r1, .L${opcode}_skip_write_barrier, r0
185   .endif
186   FETCH_ADVANCE_INST 2
187   GET_INST_OPCODE ip
188   GOTO_OPCODE ip
189
190%def op_iput_slow_path(store, wide, is_object):
191   mov     r0, rSELF
192   ldr     r1, [sp]
193   mov     r2, rPC
194   .if $is_object
195   mov     r3, r4
196   .else
197   mov     r3, #0
198   .endif
199   EXPORT_PC
200   bl      nterp_get_instance_field_offset
201   .if $is_object
202   // Reload the value as it may have moved.
203   ubfx    r4, rINST, #8, #4           // r4<- A
204   GET_VREG r4, r4                     // r4 <- v[A]
205   .endif
206   cmp     r0, #0
207   bge     .L${opcode}_resume
208   CLEAR_INSTANCE_VOLATILE_MARKER r0
209   .if $wide
210   lsr     r4, rINST, #12              // r4<- B
211   ubfx    r1, rINST, #8, #4           // r1<- A
212   GET_VREG r4, r4                     // vB (object we're operating on)
213   cmp     r4, #0
214   beq     common_errNullObject
215   VREG_INDEX_TO_ADDR r1, r1
216   GET_VREG_WIDE_BY_ADDR r2, r3, r1
217   add     ip, r4, r0
218   dmb     ish
219   ATOMIC_STORE64 ip, r2, r3, r0, r1, .L${opcode}_slow_path_atomic_store
220   dmb     ish
221   .else
222   lsr     r1, rINST, #12              // r4<- B
223   GET_VREG r1, r1                     // vB (object we're operating on)
224   cmp     r1, #0
225   beq     common_errNullObject
226   dmb     ish
227   $store  r4, [r1, r0]
228   dmb     ish
229   WRITE_BARRIER_IF_OBJECT $is_object, r4, r1, .L${opcode}_slow_path_skip_write_barrier, r0
230   .endif
231   FETCH_ADVANCE_INST 2
232   GET_INST_OPCODE ip
233   GOTO_OPCODE ip
234
235%def op_iput_wide():
236%  op_iput(store="str", wide="1", is_object="0")
237
238%def op_iput_object():
239%  op_iput(store="str", wide="0", is_object="1")
240
241%def op_sget_boolean():
242%  op_sget(load="ldrb", wide="0", is_object="0")
243
244%def op_sget_byte():
245%  op_sget(load="ldrsb", wide="0", is_object="0")
246
247%def op_sget_char():
248%  op_sget(load="ldrh", wide="0", is_object="0")
249
250%def op_sget_short():
251%  op_sget(load="ldrsh", wide="0", is_object="0")
252
253%def op_sget(load="ldr", wide="0", is_object="0"):
254%  slow_path = add_helper(lambda: op_sget_slow_path(load, wide, is_object))
255   // Fast-path which gets the field from thread-local cache.
256   FETCH_FROM_THREAD_CACHE r0, ${slow_path}
257.L${opcode}_resume:
258   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
259   lsr     r2, rINST, #8               // r2 <- A
260   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
261   cmp     rMR, #0
262   bne     .L${opcode}_read_barrier
263.L${opcode}_resume_after_read_barrier:
264   .if $wide
265   add     r0, r0, r1
266   ldrd    r0, r1, [r0]
267   CLEAR_SHADOW_PAIR r2, ip, lr
268   VREG_INDEX_TO_ADDR r2, r2
269   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
270   .elseif $is_object
271   $load   r0, [r0, r1]
272   // No need to check the marking register, we know it's not set here.
273.L${opcode}_after_reference_load:
274   SET_VREG_OBJECT r0, r2              // fp[A] <- value
275   .else
276   $load   r0, [r0, r1]
277   SET_VREG r0, r2                     // fp[A] <- value
278   .endif
279   FETCH_ADVANCE_INST 2
280   GET_INST_OPCODE ip
281   GOTO_OPCODE ip
282.L${opcode}_read_barrier:
283   bl      art_quick_read_barrier_mark_reg00
284   .if $is_object
285   ldr     r0, [r0, r1]
286.L${opcode}_mark_after_load:
287   // Here, we know the marking register is set.
288   bl      art_quick_read_barrier_mark_reg00
289   b       .L${opcode}_after_reference_load
290   .else
291   b       .L${opcode}_resume_after_read_barrier
292   .endif
293
294%def op_sget_slow_path(load="ldr", wide="0", is_object="0"):
295   mov     r0, rSELF
296   ldr     r1, [sp]
297   mov     r2, rPC
298   mov     r3, #0
299   EXPORT_PC
300   bl      nterp_get_static_field
301   tst     r0, #1
302   beq     .L${opcode}_resume
303   CLEAR_STATIC_VOLATILE_MARKER r0
304   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
305   lsr     r2, rINST, #8               // r2 <- A
306   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
307   cmp     rMR, #0
308   bne     .L${opcode}_slow_path_read_barrier
309.L${opcode}_slow_path_resume_after_read_barrier:
310   .if $wide
311   add     ip, r0, r1
312   ATOMIC_LOAD64 ip, r0, r1, r3, .L${opcode}_slow_path_atomic_load
313   dmb     ish
314   CLEAR_SHADOW_PAIR r2, ip, lr
315   VREG_INDEX_TO_ADDR r2, r2
316   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
317   .else
318   $load   r0, [r0, r1]
319   dmb     ish
320   .if $is_object
321   cmp     rMR, #0
322   bne     .L${opcode}_mark_after_load
323   SET_VREG_OBJECT r0, r2              // fp[A] <- value
324   .else
325   SET_VREG r0, r2                     // fp[A] <- value
326   .endif
327   .endif
328   FETCH_ADVANCE_INST 2
329   GET_INST_OPCODE ip
330   GOTO_OPCODE ip
331.L${opcode}_slow_path_read_barrier:
332   bl      art_quick_read_barrier_mark_reg00
333   b       .L${opcode}_slow_path_resume_after_read_barrier
334
335%def op_sget_wide():
336%  op_sget(load="ldr", wide="1", is_object="0")
337
338%def op_sget_object():
339%  op_sget(load="ldr", wide="0", is_object="1")
340
341%def op_sput_boolean():
342%  op_sput(store="strb", wide="0", is_object="0")
343
344%def op_sput_byte():
345%  op_sput(store="strb", wide="0", is_object="0")
346
347%def op_sput_char():
348%  op_sput(store="strh", wide="0", is_object="0")
349
350%def op_sput_short():
351%  op_sput(store="strh", wide="0", is_object="0")
352
353%def op_sput(store="str", wide="0", is_object="0"):
354   // Share slow paths for boolean and byte (strb) and slow paths for char and short (strh).
355   // It does not matter to which `.L${opcode}_resume` the slow path returns.
356%  slow_path = "nterp_op_sput_helper_" + store + wide + is_object
357%  add_helper(lambda: op_sput_slow_path(store, wide, is_object), slow_path)
358   .if !$wide
359   lsr     r4, rINST, #8               // r4 <- A
360   GET_VREG r4, r4                     // r4 <- v[A]
361   .endif
362   // Fast-path which gets the field from thread-local cache.
363   FETCH_FROM_THREAD_CACHE r0, ${slow_path}
364.L${opcode}_resume:
365   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
366   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
367   cmp     rMR, #0
368   bne     .L${opcode}_read_barrier
369.L${opcode}_resume_after_read_barrier:
370   .if $wide
371   lsr     r2, rINST, #8               // r2 <- A
372   VREG_INDEX_TO_ADDR r2, r2
373   GET_VREG_WIDE_BY_ADDR r2, r3, r2    // fp[A] <- value
374   add     r0, r0, r1
375   strd    r2, r3, [r0]
376   .else
377   $store  r4, [r0, r1]
378   WRITE_BARRIER_IF_OBJECT $is_object, r4, r0, .L${opcode}_skip_write_barrier, r1
379   .endif
380   FETCH_ADVANCE_INST 2
381   GET_INST_OPCODE ip
382   GOTO_OPCODE ip
383.L${opcode}_read_barrier:
384   bl      art_quick_read_barrier_mark_reg00
385   b       .L${opcode}_resume_after_read_barrier
386
387%def op_sput_slow_path(store, wide, is_object):
388   mov     r0, rSELF
389   ldr     r1, [sp]
390   mov     r2, rPC
391   .if $is_object
392   mov     r3, r4
393   .else
394   mov     r3, #0
395   .endif
396   EXPORT_PC
397   bl      nterp_get_static_field
398   .if $is_object
399   // Reload the value as it may have moved.
400   lsr     r4, rINST, #8               // r4 <- A
401   GET_VREG r4, r4                     // r4 <- v[A]
402   .endif
403   tst     r0, #1
404   beq     .L${opcode}_resume
405   CLEAR_STATIC_VOLATILE_MARKER r0
406   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
407   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
408   cmp     rMR, #0
409   bne     .L${opcode}_slow_path_read_barrier
410.L${opcode}_slow_path_resume_after_read_barrier:
411   .if $wide
412   lsr     r2, rINST, #8               // r2 <- A
413   VREG_INDEX_TO_ADDR r2, r2
414   GET_VREG_WIDE_BY_ADDR r2, r3, r2
415   add     ip, r0, r1
416   dmb     ish
417   ATOMIC_STORE64 ip, r2, r3, r0, r1, .L${opcode}_slow_path_atomic_store
418   dmb     ish
419   .else
420   dmb     ish
421   $store  r4, [r0, r1]
422   dmb     ish
423   WRITE_BARRIER_IF_OBJECT $is_object, r4, r0, .L${opcode}_slow_path_skip_write_barrier, r1
424   .endif
425   FETCH_ADVANCE_INST 2
426   GET_INST_OPCODE ip
427   GOTO_OPCODE ip
428.L${opcode}_slow_path_read_barrier:
429   bl      art_quick_read_barrier_mark_reg00
430   b       .L${opcode}_slow_path_resume_after_read_barrier
431
432%def op_sput_wide():
433%  op_sput(store="str", wide="1", is_object="0")
434
435%def op_sput_object():
436%  op_sput(store="str", wide="0", is_object="1")
437
438%def op_new_instance():
439   // The routine is too big to fit in a handler, so jump to it.
440   EXPORT_PC
441   // Fast-path which gets the class from thread-local cache.
442   FETCH_FROM_THREAD_CACHE r0, 2f
443   cmp     rMR, #0
444   bne     3f
4454:
446   ldr     lr, [rSELF, #THREAD_ALLOC_OBJECT_ENTRYPOINT_OFFSET]
447   blx     lr
4481:
449   lsr     r1, rINST, #8                    // r1 <- A
450   SET_VREG_OBJECT r0, r1               // fp[A] <- value
451   FETCH_ADVANCE_INST 2
452   GET_INST_OPCODE ip
453   GOTO_OPCODE ip
4542:
455   mov     r0, rSELF
456   ldr     r1, [sp]
457   mov     r2, rPC
458   bl      nterp_get_class_or_allocate_object
459   b       1b
4603:
461   bl      art_quick_read_barrier_mark_reg00
462   b       4b
463