1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "jni_compiler.h"
18
19 #include <algorithm>
20 #include <fstream>
21 #include <ios>
22 #include <memory>
23 #include <vector>
24
25 #include "art_method.h"
26 #include "base/arena_allocator.h"
27 #include "base/arena_containers.h"
28 #include "base/enums.h"
29 #include "base/logging.h" // For VLOG.
30 #include "base/macros.h"
31 #include "base/malloc_arena_pool.h"
32 #include "base/memory_region.h"
33 #include "base/utils.h"
34 #include "calling_convention.h"
35 #include "class_linker.h"
36 #include "dwarf/debug_frame_opcode_writer.h"
37 #include "dex/dex_file-inl.h"
38 #include "driver/compiler_options.h"
39 #include "entrypoints/quick/quick_entrypoints.h"
40 #include "jni/jni_env_ext.h"
41 #include "thread.h"
42 #include "utils/arm/managed_register_arm.h"
43 #include "utils/arm64/managed_register_arm64.h"
44 #include "utils/assembler.h"
45 #include "utils/jni_macro_assembler.h"
46 #include "utils/managed_register.h"
47 #include "utils/x86/managed_register_x86.h"
48
49 #define __ jni_asm->
50
51 namespace art {
52
53 template <PointerSize kPointerSize>
54 static void CopyParameter(JNIMacroAssembler<kPointerSize>* jni_asm,
55 ManagedRuntimeCallingConvention* mr_conv,
56 JniCallingConvention* jni_conv);
57 template <PointerSize kPointerSize>
58 static void SetNativeParameter(JNIMacroAssembler<kPointerSize>* jni_asm,
59 JniCallingConvention* jni_conv,
60 ManagedRegister in_reg);
61
62 template <PointerSize kPointerSize>
GetMacroAssembler(ArenaAllocator * allocator,InstructionSet isa,const InstructionSetFeatures * features)63 static std::unique_ptr<JNIMacroAssembler<kPointerSize>> GetMacroAssembler(
64 ArenaAllocator* allocator, InstructionSet isa, const InstructionSetFeatures* features) {
65 return JNIMacroAssembler<kPointerSize>::Create(allocator, isa, features);
66 }
67
68 enum class JniEntrypoint {
69 kStart,
70 kEnd
71 };
72
73 template <PointerSize kPointerSize>
GetJniEntrypointThreadOffset(JniEntrypoint which,bool reference_return,bool is_synchronized,bool is_fast_native)74 static ThreadOffset<kPointerSize> GetJniEntrypointThreadOffset(JniEntrypoint which,
75 bool reference_return,
76 bool is_synchronized,
77 bool is_fast_native) {
78 if (which == JniEntrypoint::kStart) { // JniMethodStart
79 ThreadOffset<kPointerSize> jni_start =
80 is_synchronized
81 ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodStartSynchronized)
82 : (is_fast_native
83 ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodFastStart)
84 : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodStart));
85
86 return jni_start;
87 } else { // JniMethodEnd
88 ThreadOffset<kPointerSize> jni_end(-1);
89 if (reference_return) {
90 // Pass result.
91 jni_end = is_synchronized
92 ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndWithReferenceSynchronized)
93 : (is_fast_native
94 ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodFastEndWithReference)
95 : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndWithReference));
96 } else {
97 jni_end = is_synchronized
98 ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEndSynchronized)
99 : (is_fast_native
100 ? QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodFastEnd)
101 : QUICK_ENTRYPOINT_OFFSET(kPointerSize, pJniMethodEnd));
102 }
103
104 return jni_end;
105 }
106 }
107
108
109 // Generate the JNI bridge for the given method, general contract:
110 // - Arguments are in the managed runtime format, either on stack or in
111 // registers, a reference to the method object is supplied as part of this
112 // convention.
113 //
114 template <PointerSize kPointerSize>
ArtJniCompileMethodInternal(const CompilerOptions & compiler_options,uint32_t access_flags,uint32_t method_idx,const DexFile & dex_file)115 static JniCompiledMethod ArtJniCompileMethodInternal(const CompilerOptions& compiler_options,
116 uint32_t access_flags,
117 uint32_t method_idx,
118 const DexFile& dex_file) {
119 const bool is_native = (access_flags & kAccNative) != 0;
120 CHECK(is_native);
121 const bool is_static = (access_flags & kAccStatic) != 0;
122 const bool is_synchronized = (access_flags & kAccSynchronized) != 0;
123 const char* shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(method_idx));
124 InstructionSet instruction_set = compiler_options.GetInstructionSet();
125 const InstructionSetFeatures* instruction_set_features =
126 compiler_options.GetInstructionSetFeatures();
127
128 // i.e. if the method was annotated with @FastNative
129 const bool is_fast_native = (access_flags & kAccFastNative) != 0u;
130
131 // i.e. if the method was annotated with @CriticalNative
132 const bool is_critical_native = (access_flags & kAccCriticalNative) != 0u;
133
134 VLOG(jni) << "JniCompile: Method :: "
135 << dex_file.PrettyMethod(method_idx, /* with signature */ true)
136 << " :: access_flags = " << std::hex << access_flags << std::dec;
137
138 if (UNLIKELY(is_fast_native)) {
139 VLOG(jni) << "JniCompile: Fast native method detected :: "
140 << dex_file.PrettyMethod(method_idx, /* with signature */ true);
141 }
142
143 if (UNLIKELY(is_critical_native)) {
144 VLOG(jni) << "JniCompile: Critical native method detected :: "
145 << dex_file.PrettyMethod(method_idx, /* with signature */ true);
146 }
147
148 if (kIsDebugBuild) {
149 // Don't allow both @FastNative and @CriticalNative. They are mutually exclusive.
150 if (UNLIKELY(is_fast_native && is_critical_native)) {
151 LOG(FATAL) << "JniCompile: Method cannot be both @CriticalNative and @FastNative"
152 << dex_file.PrettyMethod(method_idx, /* with_signature= */ true);
153 }
154
155 // @CriticalNative - extra checks:
156 // -- Don't allow virtual criticals
157 // -- Don't allow synchronized criticals
158 // -- Don't allow any objects as parameter or return value
159 if (UNLIKELY(is_critical_native)) {
160 CHECK(is_static)
161 << "@CriticalNative functions cannot be virtual since that would"
162 << "require passing a reference parameter (this), which is illegal "
163 << dex_file.PrettyMethod(method_idx, /* with_signature= */ true);
164 CHECK(!is_synchronized)
165 << "@CriticalNative functions cannot be synchronized since that would"
166 << "require passing a (class and/or this) reference parameter, which is illegal "
167 << dex_file.PrettyMethod(method_idx, /* with_signature= */ true);
168 for (size_t i = 0; i < strlen(shorty); ++i) {
169 CHECK_NE(Primitive::kPrimNot, Primitive::GetType(shorty[i]))
170 << "@CriticalNative methods' shorty types must not have illegal references "
171 << dex_file.PrettyMethod(method_idx, /* with_signature= */ true);
172 }
173 }
174 }
175
176 MallocArenaPool pool;
177 ArenaAllocator allocator(&pool);
178
179 // Calling conventions used to iterate over parameters to method
180 std::unique_ptr<JniCallingConvention> main_jni_conv =
181 JniCallingConvention::Create(&allocator,
182 is_static,
183 is_synchronized,
184 is_critical_native,
185 shorty,
186 instruction_set);
187 bool reference_return = main_jni_conv->IsReturnAReference();
188
189 std::unique_ptr<ManagedRuntimeCallingConvention> mr_conv(
190 ManagedRuntimeCallingConvention::Create(
191 &allocator, is_static, is_synchronized, shorty, instruction_set));
192
193 // Calling conventions to call into JNI method "end" possibly passing a returned reference, the
194 // method and the current thread.
195 const char* jni_end_shorty;
196 if (reference_return && is_synchronized) {
197 jni_end_shorty = "ILL";
198 } else if (reference_return) {
199 jni_end_shorty = "IL";
200 } else if (is_synchronized) {
201 jni_end_shorty = "VL";
202 } else {
203 jni_end_shorty = "V";
204 }
205
206 std::unique_ptr<JniCallingConvention> end_jni_conv(
207 JniCallingConvention::Create(&allocator,
208 is_static,
209 is_synchronized,
210 is_critical_native,
211 jni_end_shorty,
212 instruction_set));
213
214 // Assembler that holds generated instructions
215 std::unique_ptr<JNIMacroAssembler<kPointerSize>> jni_asm =
216 GetMacroAssembler<kPointerSize>(&allocator, instruction_set, instruction_set_features);
217 jni_asm->cfi().SetEnabled(compiler_options.GenerateAnyDebugInfo());
218 jni_asm->SetEmitRunTimeChecksInDebugMode(compiler_options.EmitRunTimeChecksInDebugMode());
219
220 // 1. Build the frame saving all callee saves, Method*, and PC return address.
221 // For @CriticalNative, this includes space for out args, otherwise just the managed frame.
222 const size_t managed_frame_size = main_jni_conv->FrameSize();
223 const size_t main_out_arg_size = main_jni_conv->OutFrameSize();
224 size_t current_frame_size = is_critical_native ? main_out_arg_size : managed_frame_size;
225 ManagedRegister method_register =
226 is_critical_native ? ManagedRegister::NoRegister() : mr_conv->MethodRegister();
227 ArrayRef<const ManagedRegister> callee_save_regs = main_jni_conv->CalleeSaveRegisters();
228 __ BuildFrame(current_frame_size, method_register, callee_save_regs);
229 DCHECK_EQ(jni_asm->cfi().GetCurrentCFAOffset(), static_cast<int>(current_frame_size));
230
231 if (LIKELY(!is_critical_native)) {
232 // Spill all register arguments.
233 // TODO: Pass these in a single call to let the assembler use multi-register stores.
234 // TODO: Spill native stack args straight to their stack locations (adjust SP earlier).
235 mr_conv->ResetIterator(FrameOffset(current_frame_size));
236 for (; mr_conv->HasNext(); mr_conv->Next()) {
237 if (mr_conv->IsCurrentParamInRegister()) {
238 size_t size = mr_conv->IsCurrentParamALongOrDouble() ? 8u : 4u;
239 __ Store(mr_conv->CurrentParamStackOffset(), mr_conv->CurrentParamRegister(), size);
240 }
241 }
242
243 // 2. Write out the end of the quick frames.
244 __ StoreStackPointerToThread(Thread::TopOfManagedStackOffset<kPointerSize>());
245
246 // NOTE: @CriticalNative does not need to store the stack pointer to the thread
247 // because garbage collections are disabled within the execution of a
248 // @CriticalNative method.
249 // (TODO: We could probably disable it for @FastNative too).
250 } // if (!is_critical_native)
251
252 // 3. Move frame down to allow space for out going args.
253 size_t current_out_arg_size = main_out_arg_size;
254 if (UNLIKELY(is_critical_native)) {
255 DCHECK_EQ(main_out_arg_size, current_frame_size);
256 } else {
257 __ IncreaseFrameSize(main_out_arg_size);
258 current_frame_size += main_out_arg_size;
259 }
260
261 // 4. Check if we need to go to the slow path to emit the read barrier for the declaring class
262 // in the method for a static call.
263 // Skip this for @CriticalNative because we're not passing a `jclass` to the native method.
264 std::unique_ptr<JNIMacroLabel> jclass_read_barrier_slow_path;
265 std::unique_ptr<JNIMacroLabel> jclass_read_barrier_return;
266 if (kUseReadBarrier && is_static && !is_critical_native) {
267 jclass_read_barrier_slow_path = __ CreateLabel();
268 jclass_read_barrier_return = __ CreateLabel();
269
270 // Check if gc_is_marking is set -- if it's not, we don't need a read barrier.
271 __ TestGcMarking(jclass_read_barrier_slow_path.get(), JNIMacroUnaryCondition::kNotZero);
272
273 // If marking, the slow path returns after the check.
274 __ Bind(jclass_read_barrier_return.get());
275 }
276
277 // 5. Call into appropriate JniMethodStart passing Thread* so that transition out of Runnable
278 // can occur. The result is the saved JNI local state that is restored by the exit call. We
279 // abuse the JNI calling convention here, that is guaranteed to support passing 2 pointer
280 // arguments.
281 constexpr size_t cookie_size = JniCallingConvention::SavedLocalReferenceCookieSize();
282 ManagedRegister saved_cookie_register = ManagedRegister::NoRegister();
283 if (LIKELY(!is_critical_native)) {
284 // Skip this for @CriticalNative methods. They do not call JniMethodStart.
285 ThreadOffset<kPointerSize> jni_start(
286 GetJniEntrypointThreadOffset<kPointerSize>(JniEntrypoint::kStart,
287 reference_return,
288 is_synchronized,
289 is_fast_native).SizeValue());
290 main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
291 if (is_synchronized) {
292 // Pass object for locking.
293 if (is_static) {
294 // Pass the pointer to the method's declaring class as the first argument.
295 DCHECK_EQ(ArtMethod::DeclaringClassOffset().SizeValue(), 0u);
296 SetNativeParameter(jni_asm.get(), main_jni_conv.get(), method_register);
297 } else {
298 // TODO: Use the register that still holds the `this` reference.
299 mr_conv->ResetIterator(FrameOffset(current_frame_size));
300 FrameOffset this_offset = mr_conv->CurrentParamStackOffset();
301 if (main_jni_conv->IsCurrentParamOnStack()) {
302 FrameOffset out_off = main_jni_conv->CurrentParamStackOffset();
303 __ CreateJObject(out_off, this_offset, /*null_allowed=*/ false);
304 } else {
305 ManagedRegister out_reg = main_jni_conv->CurrentParamRegister();
306 __ CreateJObject(out_reg,
307 this_offset,
308 ManagedRegister::NoRegister(),
309 /*null_allowed=*/ false);
310 }
311 }
312 main_jni_conv->Next();
313 }
314 if (main_jni_conv->IsCurrentParamInRegister()) {
315 __ GetCurrentThread(main_jni_conv->CurrentParamRegister());
316 __ Call(main_jni_conv->CurrentParamRegister(), Offset(jni_start));
317 } else {
318 __ GetCurrentThread(main_jni_conv->CurrentParamStackOffset());
319 __ CallFromThread(jni_start);
320 }
321 method_register = ManagedRegister::NoRegister(); // Method register is clobbered.
322 if (is_synchronized) { // Check for exceptions from monitor enter.
323 __ ExceptionPoll(main_out_arg_size);
324 }
325
326 // Store into stack_frame[saved_cookie_offset] the return value of JniMethodStart.
327 saved_cookie_register = main_jni_conv->SavedLocalReferenceCookieRegister();
328 __ Move(saved_cookie_register, main_jni_conv->IntReturnRegister(), cookie_size);
329 }
330
331 // 6. Fill arguments.
332 if (UNLIKELY(is_critical_native)) {
333 ArenaVector<ArgumentLocation> src_args(allocator.Adapter());
334 ArenaVector<ArgumentLocation> dest_args(allocator.Adapter());
335 // Move the method pointer to the hidden argument register.
336 size_t pointer_size = static_cast<size_t>(kPointerSize);
337 dest_args.push_back(ArgumentLocation(main_jni_conv->HiddenArgumentRegister(), pointer_size));
338 src_args.push_back(ArgumentLocation(mr_conv->MethodRegister(), pointer_size));
339 // Move normal arguments to their locations.
340 mr_conv->ResetIterator(FrameOffset(current_frame_size));
341 main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
342 for (; mr_conv->HasNext(); mr_conv->Next(), main_jni_conv->Next()) {
343 DCHECK(main_jni_conv->HasNext());
344 size_t size = mr_conv->IsCurrentParamALongOrDouble() ? 8u : 4u;
345 src_args.push_back(mr_conv->IsCurrentParamInRegister()
346 ? ArgumentLocation(mr_conv->CurrentParamRegister(), size)
347 : ArgumentLocation(mr_conv->CurrentParamStackOffset(), size));
348 dest_args.push_back(main_jni_conv->IsCurrentParamInRegister()
349 ? ArgumentLocation(main_jni_conv->CurrentParamRegister(), size)
350 : ArgumentLocation(main_jni_conv->CurrentParamStackOffset(), size));
351 }
352 DCHECK(!main_jni_conv->HasNext());
353 __ MoveArguments(ArrayRef<ArgumentLocation>(dest_args), ArrayRef<ArgumentLocation>(src_args));
354 } else {
355 // Iterate over arguments placing values from managed calling convention in
356 // to the convention required for a native call (shuffling). For references
357 // place an index/pointer to the reference after checking whether it is
358 // null (which must be encoded as null).
359 // Note: we do this prior to materializing the JNIEnv* and static's jclass to
360 // give as many free registers for the shuffle as possible.
361 mr_conv->ResetIterator(FrameOffset(current_frame_size));
362 uint32_t args_count = 0;
363 while (mr_conv->HasNext()) {
364 args_count++;
365 mr_conv->Next();
366 }
367
368 // Do a backward pass over arguments, so that the generated code will be "mov
369 // R2, R3; mov R1, R2" instead of "mov R1, R2; mov R2, R3."
370 // TODO: A reverse iterator to improve readability.
371 // TODO: This is currently useless as all archs spill args when building the frame.
372 // To avoid the full spilling, we would have to do one pass before the BuildFrame()
373 // to determine which arg registers are clobbered before they are needed.
374 for (uint32_t i = 0; i < args_count; ++i) {
375 mr_conv->ResetIterator(FrameOffset(current_frame_size));
376 main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
377
378 // Skip the extra JNI parameters for now.
379 main_jni_conv->Next(); // Skip JNIEnv*.
380 if (is_static) {
381 main_jni_conv->Next(); // Skip Class for now.
382 }
383 // Skip to the argument we're interested in.
384 for (uint32_t j = 0; j < args_count - i - 1; ++j) {
385 mr_conv->Next();
386 main_jni_conv->Next();
387 }
388 CopyParameter(jni_asm.get(), mr_conv.get(), main_jni_conv.get());
389 }
390
391 // 7. For static method, create jclass argument as a pointer to the method's declaring class.
392 if (is_static) {
393 main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
394 main_jni_conv->Next(); // Skip JNIEnv*
395 // Load reference to the method's declaring class. The method register has been
396 // clobbered by the above call, so we need to load the method from the stack.
397 FrameOffset method_offset =
398 FrameOffset(current_out_arg_size + mr_conv->MethodStackOffset().SizeValue());
399 DCHECK_EQ(ArtMethod::DeclaringClassOffset().SizeValue(), 0u);
400 if (main_jni_conv->IsCurrentParamOnStack()) {
401 FrameOffset out_off = main_jni_conv->CurrentParamStackOffset();
402 __ Copy(out_off, method_offset, static_cast<size_t>(kPointerSize));
403 // TODO(x86): Get hold of the register used to copy the method pointer,
404 // so that we can use it also for loading the method entrypoint below.
405 } else {
406 ManagedRegister out_reg = main_jni_conv->CurrentParamRegister();
407 __ Load(out_reg, method_offset, static_cast<size_t>(kPointerSize));
408 // Reuse the register also for loading the method entrypoint below.
409 method_register = out_reg;
410 }
411 }
412
413 // Set the iterator back to the incoming Method*.
414 main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
415
416 // 8. Create 1st argument, the JNI environment ptr.
417 // Register that will hold local indirect reference table
418 if (main_jni_conv->IsCurrentParamInRegister()) {
419 ManagedRegister jni_env = main_jni_conv->CurrentParamRegister();
420 __ LoadRawPtrFromThread(jni_env, Thread::JniEnvOffset<kPointerSize>());
421 } else {
422 FrameOffset jni_env = main_jni_conv->CurrentParamStackOffset();
423 __ CopyRawPtrFromThread(jni_env, Thread::JniEnvOffset<kPointerSize>());
424 }
425 }
426
427 // 9. Plant call to native code associated with method.
428 MemberOffset jni_entrypoint_offset =
429 ArtMethod::EntryPointFromJniOffset(InstructionSetPointerSize(instruction_set));
430 if (UNLIKELY(is_critical_native)) {
431 if (main_jni_conv->UseTailCall()) {
432 __ Jump(main_jni_conv->HiddenArgumentRegister(), jni_entrypoint_offset);
433 } else {
434 __ Call(main_jni_conv->HiddenArgumentRegister(), jni_entrypoint_offset);
435 }
436 } else {
437 if (method_register.IsRegister()) {
438 __ Call(method_register, jni_entrypoint_offset);
439 } else {
440 __ Call(FrameOffset(current_out_arg_size + mr_conv->MethodStackOffset().SizeValue()),
441 jni_entrypoint_offset);
442 }
443 }
444
445 // 10. Fix differences in result widths.
446 if (main_jni_conv->RequiresSmallResultTypeExtension()) {
447 DCHECK(main_jni_conv->HasSmallReturnType());
448 CHECK(!is_critical_native || !main_jni_conv->UseTailCall());
449 if (main_jni_conv->GetReturnType() == Primitive::kPrimByte ||
450 main_jni_conv->GetReturnType() == Primitive::kPrimShort) {
451 __ SignExtend(main_jni_conv->ReturnRegister(),
452 Primitive::ComponentSize(main_jni_conv->GetReturnType()));
453 } else {
454 CHECK(main_jni_conv->GetReturnType() == Primitive::kPrimBoolean ||
455 main_jni_conv->GetReturnType() == Primitive::kPrimChar);
456 __ ZeroExtend(main_jni_conv->ReturnRegister(),
457 Primitive::ComponentSize(main_jni_conv->GetReturnType()));
458 }
459 }
460
461 // 11. Process return value
462 bool spill_return_value = main_jni_conv->SpillsReturnValue();
463 FrameOffset return_save_location =
464 spill_return_value ? main_jni_conv->ReturnValueSaveLocation() : FrameOffset(0);
465 if (spill_return_value) {
466 DCHECK(!is_critical_native);
467 // For normal JNI, store the return value on the stack because the call to
468 // JniMethodEnd will clobber the return value. It will be restored in (13).
469 CHECK_LT(return_save_location.Uint32Value(), current_frame_size);
470 __ Store(return_save_location,
471 main_jni_conv->ReturnRegister(),
472 main_jni_conv->SizeOfReturnValue());
473 } else if (UNLIKELY(is_critical_native) && main_jni_conv->SizeOfReturnValue() != 0) {
474 // For @CriticalNative only,
475 // move the JNI return register into the managed return register (if they don't match).
476 ManagedRegister jni_return_reg = main_jni_conv->ReturnRegister();
477 ManagedRegister mr_return_reg = mr_conv->ReturnRegister();
478
479 // Check if the JNI return register matches the managed return register.
480 // If they differ, only then do we have to do anything about it.
481 // Otherwise the return value is already in the right place when we return.
482 if (!jni_return_reg.Equals(mr_return_reg)) {
483 CHECK(!main_jni_conv->UseTailCall());
484 // This is typically only necessary on ARM32 due to native being softfloat
485 // while managed is hardfloat.
486 // -- For example VMOV {r0, r1} -> D0; VMOV r0 -> S0.
487 __ Move(mr_return_reg, jni_return_reg, main_jni_conv->SizeOfReturnValue());
488 } else if (jni_return_reg.IsNoRegister() && mr_return_reg.IsNoRegister()) {
489 // Check that if the return value is passed on the stack for some reason,
490 // that the size matches.
491 CHECK_EQ(main_jni_conv->SizeOfReturnValue(), mr_conv->SizeOfReturnValue());
492 }
493 }
494
495 if (LIKELY(!is_critical_native)) {
496 // Increase frame size for out args if needed by the end_jni_conv.
497 const size_t end_out_arg_size = end_jni_conv->OutFrameSize();
498 if (end_out_arg_size > current_out_arg_size) {
499 size_t out_arg_size_diff = end_out_arg_size - current_out_arg_size;
500 current_out_arg_size = end_out_arg_size;
501 __ IncreaseFrameSize(out_arg_size_diff);
502 current_frame_size += out_arg_size_diff;
503 return_save_location = FrameOffset(return_save_location.SizeValue() + out_arg_size_diff);
504 }
505 end_jni_conv->ResetIterator(FrameOffset(end_out_arg_size));
506
507 // 12. Call JniMethodEnd
508 ThreadOffset<kPointerSize> jni_end(
509 GetJniEntrypointThreadOffset<kPointerSize>(JniEntrypoint::kEnd,
510 reference_return,
511 is_synchronized,
512 is_fast_native).SizeValue());
513 if (reference_return) {
514 // Pass result.
515 SetNativeParameter(jni_asm.get(), end_jni_conv.get(), end_jni_conv->ReturnRegister());
516 end_jni_conv->Next();
517 }
518 // Pass saved local reference state.
519 if (end_jni_conv->IsCurrentParamOnStack()) {
520 FrameOffset out_off = end_jni_conv->CurrentParamStackOffset();
521 __ Store(out_off, saved_cookie_register, cookie_size);
522 } else {
523 ManagedRegister out_reg = end_jni_conv->CurrentParamRegister();
524 __ Move(out_reg, saved_cookie_register, cookie_size);
525 }
526 end_jni_conv->Next();
527 if (is_synchronized) {
528 // Pass object for unlocking.
529 if (is_static) {
530 // Load reference to the method's declaring class. The method register has been
531 // clobbered by the above call, so we need to load the method from the stack.
532 FrameOffset method_offset =
533 FrameOffset(current_out_arg_size + mr_conv->MethodStackOffset().SizeValue());
534 DCHECK_EQ(ArtMethod::DeclaringClassOffset().SizeValue(), 0u);
535 if (end_jni_conv->IsCurrentParamOnStack()) {
536 FrameOffset out_off = end_jni_conv->CurrentParamStackOffset();
537 __ Copy(out_off, method_offset, static_cast<size_t>(kPointerSize));
538 } else {
539 ManagedRegister out_reg = end_jni_conv->CurrentParamRegister();
540 __ Load(out_reg, method_offset, static_cast<size_t>(kPointerSize));
541 }
542 } else {
543 mr_conv->ResetIterator(FrameOffset(current_frame_size));
544 FrameOffset this_offset = mr_conv->CurrentParamStackOffset();
545 if (end_jni_conv->IsCurrentParamOnStack()) {
546 FrameOffset out_off = end_jni_conv->CurrentParamStackOffset();
547 __ CreateJObject(out_off, this_offset, /*null_allowed=*/ false);
548 } else {
549 ManagedRegister out_reg = end_jni_conv->CurrentParamRegister();
550 __ CreateJObject(out_reg,
551 this_offset,
552 ManagedRegister::NoRegister(),
553 /*null_allowed=*/ false);
554 }
555 }
556 end_jni_conv->Next();
557 }
558 if (end_jni_conv->IsCurrentParamInRegister()) {
559 __ GetCurrentThread(end_jni_conv->CurrentParamRegister());
560 __ Call(end_jni_conv->CurrentParamRegister(), Offset(jni_end));
561 } else {
562 __ GetCurrentThread(end_jni_conv->CurrentParamStackOffset());
563 __ CallFromThread(jni_end);
564 }
565
566 // 13. Reload return value
567 if (spill_return_value) {
568 __ Load(mr_conv->ReturnRegister(), return_save_location, mr_conv->SizeOfReturnValue());
569 }
570 } // if (!is_critical_native)
571
572 // 14. Move frame up now we're done with the out arg space.
573 // @CriticalNative remove out args together with the frame in RemoveFrame().
574 if (LIKELY(!is_critical_native)) {
575 __ DecreaseFrameSize(current_out_arg_size);
576 current_frame_size -= current_out_arg_size;
577 }
578
579 // 15. Process pending exceptions from JNI call or monitor exit.
580 // @CriticalNative methods do not need exception poll in the stub.
581 if (LIKELY(!is_critical_native)) {
582 __ ExceptionPoll(/* stack_adjust= */ 0);
583 }
584
585 // 16. Remove activation - need to restore callee save registers since the GC may have changed
586 // them.
587 DCHECK_EQ(jni_asm->cfi().GetCurrentCFAOffset(), static_cast<int>(current_frame_size));
588 if (LIKELY(!is_critical_native) || !main_jni_conv->UseTailCall()) {
589 // We expect the compiled method to possibly be suspended during its
590 // execution, except in the case of a CriticalNative method.
591 bool may_suspend = !is_critical_native;
592 __ RemoveFrame(current_frame_size, callee_save_regs, may_suspend);
593 DCHECK_EQ(jni_asm->cfi().GetCurrentCFAOffset(), static_cast<int>(current_frame_size));
594 }
595
596 // 17. Read barrier slow path for the declaring class in the method for a static call.
597 // Skip this for @CriticalNative because we're not passing a `jclass` to the native method.
598 if (kUseReadBarrier && is_static && !is_critical_native) {
599 __ Bind(jclass_read_barrier_slow_path.get());
600
601 // We do the marking check after adjusting for outgoing arguments. That ensures that
602 // we have space available for at least two params in case we need to pass the read
603 // barrier parameters on stack (only x86). But that means we must adjust the CFI
604 // offset accordingly as it does not include the outgoing args after `RemoveFrame().
605 if (main_out_arg_size != 0) {
606 // Note: The DW_CFA_def_cfa_offset emitted by `RemoveFrame()` above
607 // is useless when it is immediatelly overridden here but avoiding
608 // it adds a lot of code complexity for minimal gain.
609 jni_asm->cfi().AdjustCFAOffset(main_out_arg_size);
610 }
611
612 // We enter the slow path with the method register unclobbered.
613 method_register = mr_conv->MethodRegister();
614
615 // Construct slow path for read barrier:
616 //
617 // Call into the runtime's ReadBarrierJni and have it fix up
618 // the object address if it was moved.
619
620 ThreadOffset<kPointerSize> read_barrier = QUICK_ENTRYPOINT_OFFSET(kPointerSize,
621 pReadBarrierJni);
622 main_jni_conv->ResetIterator(FrameOffset(main_out_arg_size));
623 // Pass the pointer to the method's declaring class as the first argument.
624 DCHECK_EQ(ArtMethod::DeclaringClassOffset().SizeValue(), 0u);
625 SetNativeParameter(jni_asm.get(), main_jni_conv.get(), method_register);
626 main_jni_conv->Next();
627 // Pass the current thread as the second argument and call.
628 if (main_jni_conv->IsCurrentParamInRegister()) {
629 __ GetCurrentThread(main_jni_conv->CurrentParamRegister());
630 __ Call(main_jni_conv->CurrentParamRegister(), Offset(read_barrier));
631 } else {
632 __ GetCurrentThread(main_jni_conv->CurrentParamStackOffset());
633 __ CallFromThread(read_barrier);
634 }
635 if (is_synchronized) {
636 // Reload the method pointer in the slow path because it is needed
637 // as an argument for the `JniMethodStartSynchronized`.
638 __ Load(method_register,
639 FrameOffset(main_out_arg_size + mr_conv->MethodStackOffset().SizeValue()),
640 static_cast<size_t>(kPointerSize));
641 }
642
643 // Return to main path.
644 __ Jump(jclass_read_barrier_return.get());
645
646 // Undo the CFI offset adjustment at the start of the slow path.
647 if (main_out_arg_size != 0) {
648 jni_asm->cfi().AdjustCFAOffset(-main_out_arg_size);
649 }
650 }
651
652 // 18. Finalize code generation
653 __ FinalizeCode();
654 size_t cs = __ CodeSize();
655 std::vector<uint8_t> managed_code(cs);
656 MemoryRegion code(&managed_code[0], managed_code.size());
657 __ FinalizeInstructions(code);
658
659 return JniCompiledMethod(instruction_set,
660 std::move(managed_code),
661 managed_frame_size,
662 main_jni_conv->CoreSpillMask(),
663 main_jni_conv->FpSpillMask(),
664 ArrayRef<const uint8_t>(*jni_asm->cfi().data()));
665 }
666
667 // Copy a single parameter from the managed to the JNI calling convention.
668 template <PointerSize kPointerSize>
CopyParameter(JNIMacroAssembler<kPointerSize> * jni_asm,ManagedRuntimeCallingConvention * mr_conv,JniCallingConvention * jni_conv)669 static void CopyParameter(JNIMacroAssembler<kPointerSize>* jni_asm,
670 ManagedRuntimeCallingConvention* mr_conv,
671 JniCallingConvention* jni_conv) {
672 // We spilled all registers, so use stack locations.
673 // TODO: Move args in registers for @CriticalNative.
674 bool input_in_reg = false; // mr_conv->IsCurrentParamInRegister();
675 bool output_in_reg = jni_conv->IsCurrentParamInRegister();
676 FrameOffset spilled_reference_offset(0);
677 bool null_allowed = false;
678 bool ref_param = jni_conv->IsCurrentParamAReference();
679 CHECK(!ref_param || mr_conv->IsCurrentParamAReference());
680 if (output_in_reg) { // output shouldn't straddle registers and stack
681 CHECK(!jni_conv->IsCurrentParamOnStack());
682 } else {
683 CHECK(jni_conv->IsCurrentParamOnStack());
684 }
685 // References are spilled to caller's reserved out vreg area.
686 if (ref_param) {
687 null_allowed = mr_conv->IsCurrentArgPossiblyNull();
688 // Compute spilled reference offset. Note that null is spilled but the jobject
689 // passed to the native code must be null (not a pointer into the spilled value
690 // as with regular references).
691 spilled_reference_offset = mr_conv->CurrentParamStackOffset();
692 // Check that spilled reference offset is in the spill area in the caller's frame.
693 CHECK_GT(spilled_reference_offset.Uint32Value(), mr_conv->GetDisplacement().Uint32Value());
694 }
695 if (input_in_reg && output_in_reg) {
696 ManagedRegister in_reg = mr_conv->CurrentParamRegister();
697 ManagedRegister out_reg = jni_conv->CurrentParamRegister();
698 if (ref_param) {
699 __ CreateJObject(out_reg, spilled_reference_offset, in_reg, null_allowed);
700 } else {
701 if (!mr_conv->IsCurrentParamOnStack()) {
702 // regular non-straddling move
703 __ Move(out_reg, in_reg, mr_conv->CurrentParamSize());
704 } else {
705 UNIMPLEMENTED(FATAL); // we currently don't expect to see this case
706 }
707 }
708 } else if (!input_in_reg && !output_in_reg) {
709 FrameOffset out_off = jni_conv->CurrentParamStackOffset();
710 if (ref_param) {
711 __ CreateJObject(out_off, spilled_reference_offset, null_allowed);
712 } else {
713 FrameOffset in_off = mr_conv->CurrentParamStackOffset();
714 size_t param_size = mr_conv->CurrentParamSize();
715 CHECK_EQ(param_size, jni_conv->CurrentParamSize());
716 __ Copy(out_off, in_off, param_size);
717 }
718 } else if (!input_in_reg && output_in_reg) {
719 FrameOffset in_off = mr_conv->CurrentParamStackOffset();
720 ManagedRegister out_reg = jni_conv->CurrentParamRegister();
721 // Check that incoming stack arguments are above the current stack frame.
722 CHECK_GT(in_off.Uint32Value(), mr_conv->GetDisplacement().Uint32Value());
723 if (ref_param) {
724 __ CreateJObject(out_reg,
725 spilled_reference_offset,
726 ManagedRegister::NoRegister(),
727 null_allowed);
728 } else {
729 size_t param_size = mr_conv->CurrentParamSize();
730 CHECK_EQ(param_size, jni_conv->CurrentParamSize());
731 __ Load(out_reg, in_off, param_size);
732 }
733 } else {
734 CHECK(input_in_reg && !output_in_reg);
735 ManagedRegister in_reg = mr_conv->CurrentParamRegister();
736 FrameOffset out_off = jni_conv->CurrentParamStackOffset();
737 // Check outgoing argument is within frame part dedicated to out args.
738 CHECK_LT(out_off.Uint32Value(), jni_conv->GetDisplacement().Uint32Value());
739 if (ref_param) {
740 // TODO: recycle value in in_reg rather than reload from spill slot.
741 __ CreateJObject(out_off, spilled_reference_offset, null_allowed);
742 } else {
743 size_t param_size = mr_conv->CurrentParamSize();
744 CHECK_EQ(param_size, jni_conv->CurrentParamSize());
745 if (!mr_conv->IsCurrentParamOnStack()) {
746 // regular non-straddling store
747 __ Store(out_off, in_reg, param_size);
748 } else {
749 // store where input straddles registers and stack
750 CHECK_EQ(param_size, 8u);
751 FrameOffset in_off = mr_conv->CurrentParamStackOffset();
752 __ StoreSpanning(out_off, in_reg, in_off);
753 }
754 }
755 }
756 }
757
758 template <PointerSize kPointerSize>
SetNativeParameter(JNIMacroAssembler<kPointerSize> * jni_asm,JniCallingConvention * jni_conv,ManagedRegister in_reg)759 static void SetNativeParameter(JNIMacroAssembler<kPointerSize>* jni_asm,
760 JniCallingConvention* jni_conv,
761 ManagedRegister in_reg) {
762 if (jni_conv->IsCurrentParamOnStack()) {
763 FrameOffset dest = jni_conv->CurrentParamStackOffset();
764 __ StoreRawPtr(dest, in_reg);
765 } else {
766 if (!jni_conv->CurrentParamRegister().Equals(in_reg)) {
767 __ Move(jni_conv->CurrentParamRegister(), in_reg, jni_conv->CurrentParamSize());
768 }
769 }
770 }
771
ArtQuickJniCompileMethod(const CompilerOptions & compiler_options,uint32_t access_flags,uint32_t method_idx,const DexFile & dex_file)772 JniCompiledMethod ArtQuickJniCompileMethod(const CompilerOptions& compiler_options,
773 uint32_t access_flags,
774 uint32_t method_idx,
775 const DexFile& dex_file) {
776 if (Is64BitInstructionSet(compiler_options.GetInstructionSet())) {
777 return ArtJniCompileMethodInternal<PointerSize::k64>(
778 compiler_options, access_flags, method_idx, dex_file);
779 } else {
780 return ArtJniCompileMethodInternal<PointerSize::k32>(
781 compiler_options, access_flags, method_idx, dex_file);
782 }
783 }
784
785 } // namespace art
786