1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "jni_macro_assembler_x86.h"
18
19 #include "base/casts.h"
20 #include "entrypoints/quick/quick_entrypoints.h"
21 #include "thread.h"
22 #include "utils/assembler.h"
23
24 namespace art {
25 namespace x86 {
26
GetScratchRegister()27 static Register GetScratchRegister() {
28 // ECX is an argument register on entry and gets spilled in BuildFrame().
29 // After that, we can use it as a scratch register.
30 return ECX;
31 }
32
33 // Slowpath entered when Thread::Current()->_exception is non-null
34 class X86ExceptionSlowPath final : public SlowPath {
35 public:
X86ExceptionSlowPath(size_t stack_adjust)36 explicit X86ExceptionSlowPath(size_t stack_adjust) : stack_adjust_(stack_adjust) {}
37 void Emit(Assembler *sp_asm) override;
38 private:
39 const size_t stack_adjust_;
40 };
41
DWARFReg(Register reg)42 static dwarf::Reg DWARFReg(Register reg) {
43 return dwarf::Reg::X86Core(static_cast<int>(reg));
44 }
45
46 constexpr size_t kFramePointerSize = 4;
47
48 static constexpr size_t kNativeStackAlignment = 16;
49 static_assert(kNativeStackAlignment == kStackAlignment);
50
51 #define __ asm_.
52
BuildFrame(size_t frame_size,ManagedRegister method_reg,ArrayRef<const ManagedRegister> spill_regs)53 void X86JNIMacroAssembler::BuildFrame(size_t frame_size,
54 ManagedRegister method_reg,
55 ArrayRef<const ManagedRegister> spill_regs) {
56 DCHECK_EQ(CodeSize(), 0U); // Nothing emitted yet.
57 cfi().SetCurrentCFAOffset(4); // Return address on stack.
58 if (frame_size == kFramePointerSize) {
59 // For @CriticalNative tail call.
60 CHECK(method_reg.IsNoRegister());
61 CHECK(spill_regs.empty());
62 } else if (method_reg.IsNoRegister()) {
63 CHECK_ALIGNED(frame_size, kNativeStackAlignment);
64 } else {
65 CHECK_ALIGNED(frame_size, kStackAlignment);
66 }
67 int gpr_count = 0;
68 for (int i = spill_regs.size() - 1; i >= 0; --i) {
69 Register spill = spill_regs[i].AsX86().AsCpuRegister();
70 __ pushl(spill);
71 gpr_count++;
72 cfi().AdjustCFAOffset(kFramePointerSize);
73 cfi().RelOffset(DWARFReg(spill), 0);
74 }
75
76 // return address then method on stack.
77 int32_t adjust = frame_size - gpr_count * kFramePointerSize -
78 kFramePointerSize /*return address*/ -
79 (method_reg.IsRegister() ? kFramePointerSize /*method*/ : 0u);
80 if (adjust != 0) {
81 __ addl(ESP, Immediate(-adjust));
82 cfi().AdjustCFAOffset(adjust);
83 }
84 if (method_reg.IsRegister()) {
85 __ pushl(method_reg.AsX86().AsCpuRegister());
86 cfi().AdjustCFAOffset(kFramePointerSize);
87 }
88 DCHECK_EQ(static_cast<size_t>(cfi().GetCurrentCFAOffset()), frame_size);
89 }
90
RemoveFrame(size_t frame_size,ArrayRef<const ManagedRegister> spill_regs,bool may_suspend ATTRIBUTE_UNUSED)91 void X86JNIMacroAssembler::RemoveFrame(size_t frame_size,
92 ArrayRef<const ManagedRegister> spill_regs,
93 bool may_suspend ATTRIBUTE_UNUSED) {
94 CHECK_ALIGNED(frame_size, kNativeStackAlignment);
95 cfi().RememberState();
96 // -kFramePointerSize for ArtMethod*.
97 int adjust = frame_size - spill_regs.size() * kFramePointerSize - kFramePointerSize;
98 if (adjust != 0) {
99 __ addl(ESP, Immediate(adjust));
100 cfi().AdjustCFAOffset(-adjust);
101 }
102 for (size_t i = 0; i < spill_regs.size(); ++i) {
103 Register spill = spill_regs[i].AsX86().AsCpuRegister();
104 __ popl(spill);
105 cfi().AdjustCFAOffset(-static_cast<int>(kFramePointerSize));
106 cfi().Restore(DWARFReg(spill));
107 }
108 __ ret();
109 // The CFI should be restored for any code that follows the exit block.
110 cfi().RestoreState();
111 cfi().DefCFAOffset(frame_size);
112 }
113
IncreaseFrameSize(size_t adjust)114 void X86JNIMacroAssembler::IncreaseFrameSize(size_t adjust) {
115 if (adjust != 0u) {
116 CHECK_ALIGNED(adjust, kNativeStackAlignment);
117 __ addl(ESP, Immediate(-adjust));
118 cfi().AdjustCFAOffset(adjust);
119 }
120 }
121
DecreaseFrameSizeImpl(X86Assembler * assembler,size_t adjust)122 static void DecreaseFrameSizeImpl(X86Assembler* assembler, size_t adjust) {
123 if (adjust != 0u) {
124 CHECK_ALIGNED(adjust, kNativeStackAlignment);
125 assembler->addl(ESP, Immediate(adjust));
126 assembler->cfi().AdjustCFAOffset(-adjust);
127 }
128 }
129
DecreaseFrameSize(size_t adjust)130 void X86JNIMacroAssembler::DecreaseFrameSize(size_t adjust) {
131 DecreaseFrameSizeImpl(&asm_, adjust);
132 }
133
Store(FrameOffset offs,ManagedRegister msrc,size_t size)134 void X86JNIMacroAssembler::Store(FrameOffset offs, ManagedRegister msrc, size_t size) {
135 X86ManagedRegister src = msrc.AsX86();
136 if (src.IsNoRegister()) {
137 CHECK_EQ(0u, size);
138 } else if (src.IsCpuRegister()) {
139 CHECK_EQ(4u, size);
140 __ movl(Address(ESP, offs), src.AsCpuRegister());
141 } else if (src.IsRegisterPair()) {
142 CHECK_EQ(8u, size);
143 __ movl(Address(ESP, offs), src.AsRegisterPairLow());
144 __ movl(Address(ESP, FrameOffset(offs.Int32Value()+4)), src.AsRegisterPairHigh());
145 } else if (src.IsX87Register()) {
146 if (size == 4) {
147 __ fstps(Address(ESP, offs));
148 } else {
149 __ fstpl(Address(ESP, offs));
150 }
151 } else {
152 CHECK(src.IsXmmRegister());
153 if (size == 4) {
154 __ movss(Address(ESP, offs), src.AsXmmRegister());
155 } else {
156 __ movsd(Address(ESP, offs), src.AsXmmRegister());
157 }
158 }
159 }
160
StoreRef(FrameOffset dest,ManagedRegister msrc)161 void X86JNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
162 X86ManagedRegister src = msrc.AsX86();
163 CHECK(src.IsCpuRegister());
164 __ movl(Address(ESP, dest), src.AsCpuRegister());
165 }
166
StoreRawPtr(FrameOffset dest,ManagedRegister msrc)167 void X86JNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
168 X86ManagedRegister src = msrc.AsX86();
169 CHECK(src.IsCpuRegister());
170 __ movl(Address(ESP, dest), src.AsCpuRegister());
171 }
172
StoreImmediateToFrame(FrameOffset dest,uint32_t imm)173 void X86JNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest, uint32_t imm) {
174 __ movl(Address(ESP, dest), Immediate(imm));
175 }
176
StoreStackOffsetToThread(ThreadOffset32 thr_offs,FrameOffset fr_offs)177 void X86JNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset32 thr_offs, FrameOffset fr_offs) {
178 Register scratch = GetScratchRegister();
179 __ leal(scratch, Address(ESP, fr_offs));
180 __ fs()->movl(Address::Absolute(thr_offs), scratch);
181 }
182
StoreStackPointerToThread(ThreadOffset32 thr_offs)183 void X86JNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs) {
184 __ fs()->movl(Address::Absolute(thr_offs), ESP);
185 }
186
StoreSpanning(FrameOffset,ManagedRegister,FrameOffset)187 void X86JNIMacroAssembler::StoreSpanning(FrameOffset /*dst*/,
188 ManagedRegister /*src*/,
189 FrameOffset /*in_off*/) {
190 UNIMPLEMENTED(FATAL); // this case only currently exists for ARM
191 }
192
Load(ManagedRegister mdest,FrameOffset src,size_t size)193 void X86JNIMacroAssembler::Load(ManagedRegister mdest, FrameOffset src, size_t size) {
194 X86ManagedRegister dest = mdest.AsX86();
195 if (dest.IsNoRegister()) {
196 CHECK_EQ(0u, size);
197 } else if (dest.IsCpuRegister()) {
198 CHECK_EQ(4u, size);
199 __ movl(dest.AsCpuRegister(), Address(ESP, src));
200 } else if (dest.IsRegisterPair()) {
201 CHECK_EQ(8u, size);
202 __ movl(dest.AsRegisterPairLow(), Address(ESP, src));
203 __ movl(dest.AsRegisterPairHigh(), Address(ESP, FrameOffset(src.Int32Value()+4)));
204 } else if (dest.IsX87Register()) {
205 if (size == 4) {
206 __ flds(Address(ESP, src));
207 } else {
208 __ fldl(Address(ESP, src));
209 }
210 } else {
211 CHECK(dest.IsXmmRegister());
212 if (size == 4) {
213 __ movss(dest.AsXmmRegister(), Address(ESP, src));
214 } else {
215 __ movsd(dest.AsXmmRegister(), Address(ESP, src));
216 }
217 }
218 }
219
LoadFromThread(ManagedRegister mdest,ThreadOffset32 src,size_t size)220 void X86JNIMacroAssembler::LoadFromThread(ManagedRegister mdest, ThreadOffset32 src, size_t size) {
221 X86ManagedRegister dest = mdest.AsX86();
222 if (dest.IsNoRegister()) {
223 CHECK_EQ(0u, size);
224 } else if (dest.IsCpuRegister()) {
225 if (size == 1u) {
226 __ fs()->movzxb(dest.AsCpuRegister(), Address::Absolute(src));
227 } else {
228 CHECK_EQ(4u, size);
229 __ fs()->movl(dest.AsCpuRegister(), Address::Absolute(src));
230 }
231 } else if (dest.IsRegisterPair()) {
232 CHECK_EQ(8u, size);
233 __ fs()->movl(dest.AsRegisterPairLow(), Address::Absolute(src));
234 __ fs()->movl(dest.AsRegisterPairHigh(), Address::Absolute(ThreadOffset32(src.Int32Value()+4)));
235 } else if (dest.IsX87Register()) {
236 if (size == 4) {
237 __ fs()->flds(Address::Absolute(src));
238 } else {
239 __ fs()->fldl(Address::Absolute(src));
240 }
241 } else {
242 CHECK(dest.IsXmmRegister());
243 if (size == 4) {
244 __ fs()->movss(dest.AsXmmRegister(), Address::Absolute(src));
245 } else {
246 __ fs()->movsd(dest.AsXmmRegister(), Address::Absolute(src));
247 }
248 }
249 }
250
LoadRef(ManagedRegister mdest,FrameOffset src)251 void X86JNIMacroAssembler::LoadRef(ManagedRegister mdest, FrameOffset src) {
252 X86ManagedRegister dest = mdest.AsX86();
253 CHECK(dest.IsCpuRegister());
254 __ movl(dest.AsCpuRegister(), Address(ESP, src));
255 }
256
LoadRef(ManagedRegister mdest,ManagedRegister base,MemberOffset offs,bool unpoison_reference)257 void X86JNIMacroAssembler::LoadRef(ManagedRegister mdest, ManagedRegister base, MemberOffset offs,
258 bool unpoison_reference) {
259 X86ManagedRegister dest = mdest.AsX86();
260 CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
261 __ movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
262 if (unpoison_reference) {
263 __ MaybeUnpoisonHeapReference(dest.AsCpuRegister());
264 }
265 }
266
LoadRawPtr(ManagedRegister mdest,ManagedRegister base,Offset offs)267 void X86JNIMacroAssembler::LoadRawPtr(ManagedRegister mdest,
268 ManagedRegister base,
269 Offset offs) {
270 X86ManagedRegister dest = mdest.AsX86();
271 CHECK(dest.IsCpuRegister() && dest.IsCpuRegister());
272 __ movl(dest.AsCpuRegister(), Address(base.AsX86().AsCpuRegister(), offs));
273 }
274
LoadRawPtrFromThread(ManagedRegister mdest,ThreadOffset32 offs)275 void X86JNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister mdest, ThreadOffset32 offs) {
276 X86ManagedRegister dest = mdest.AsX86();
277 CHECK(dest.IsCpuRegister());
278 __ fs()->movl(dest.AsCpuRegister(), Address::Absolute(offs));
279 }
280
SignExtend(ManagedRegister mreg,size_t size)281 void X86JNIMacroAssembler::SignExtend(ManagedRegister mreg, size_t size) {
282 X86ManagedRegister reg = mreg.AsX86();
283 CHECK(size == 1 || size == 2) << size;
284 CHECK(reg.IsCpuRegister()) << reg;
285 if (size == 1) {
286 __ movsxb(reg.AsCpuRegister(), reg.AsByteRegister());
287 } else {
288 __ movsxw(reg.AsCpuRegister(), reg.AsCpuRegister());
289 }
290 }
291
ZeroExtend(ManagedRegister mreg,size_t size)292 void X86JNIMacroAssembler::ZeroExtend(ManagedRegister mreg, size_t size) {
293 X86ManagedRegister reg = mreg.AsX86();
294 CHECK(size == 1 || size == 2) << size;
295 CHECK(reg.IsCpuRegister()) << reg;
296 if (size == 1) {
297 __ movzxb(reg.AsCpuRegister(), reg.AsByteRegister());
298 } else {
299 __ movzxw(reg.AsCpuRegister(), reg.AsCpuRegister());
300 }
301 }
302
MoveArguments(ArrayRef<ArgumentLocation> dests,ArrayRef<ArgumentLocation> srcs)303 void X86JNIMacroAssembler::MoveArguments(ArrayRef<ArgumentLocation> dests,
304 ArrayRef<ArgumentLocation> srcs) {
305 DCHECK_EQ(dests.size(), srcs.size());
306 bool found_hidden_arg = false;
307 for (size_t i = 0, arg_count = srcs.size(); i != arg_count; ++i) {
308 const ArgumentLocation& src = srcs[i];
309 const ArgumentLocation& dest = dests[i];
310 DCHECK_EQ(src.GetSize(), dest.GetSize());
311 if (src.IsRegister()) {
312 if (UNLIKELY(dest.IsRegister())) {
313 // Native ABI has only stack arguments but we may pass one "hidden arg" in register.
314 CHECK(!found_hidden_arg);
315 found_hidden_arg = true;
316 DCHECK(
317 !dest.GetRegister().Equals(X86ManagedRegister::FromCpuRegister(GetScratchRegister())));
318 Move(dest.GetRegister(), src.GetRegister(), dest.GetSize());
319 } else {
320 Store(dest.GetFrameOffset(), src.GetRegister(), dest.GetSize());
321 }
322 } else {
323 // Delay copying until we have spilled all registers, including the scratch register ECX.
324 }
325 }
326 for (size_t i = 0, arg_count = srcs.size(); i != arg_count; ++i) {
327 const ArgumentLocation& src = srcs[i];
328 const ArgumentLocation& dest = dests[i];
329 DCHECK_EQ(src.GetSize(), dest.GetSize());
330 if (!src.IsRegister()) {
331 DCHECK(!dest.IsRegister());
332 Copy(dest.GetFrameOffset(), src.GetFrameOffset(), dest.GetSize());
333 }
334 }
335 }
336
Move(ManagedRegister mdest,ManagedRegister msrc,size_t size)337 void X86JNIMacroAssembler::Move(ManagedRegister mdest, ManagedRegister msrc, size_t size) {
338 DCHECK(!mdest.Equals(X86ManagedRegister::FromCpuRegister(GetScratchRegister())));
339 X86ManagedRegister dest = mdest.AsX86();
340 X86ManagedRegister src = msrc.AsX86();
341 if (!dest.Equals(src)) {
342 if (dest.IsCpuRegister() && src.IsCpuRegister()) {
343 __ movl(dest.AsCpuRegister(), src.AsCpuRegister());
344 } else if (src.IsX87Register() && dest.IsXmmRegister()) {
345 // Pass via stack and pop X87 register
346 IncreaseFrameSize(16);
347 if (size == 4) {
348 CHECK_EQ(src.AsX87Register(), ST0);
349 __ fstps(Address(ESP, 0));
350 __ movss(dest.AsXmmRegister(), Address(ESP, 0));
351 } else {
352 CHECK_EQ(src.AsX87Register(), ST0);
353 __ fstpl(Address(ESP, 0));
354 __ movsd(dest.AsXmmRegister(), Address(ESP, 0));
355 }
356 DecreaseFrameSize(16);
357 } else {
358 // TODO: x87, SSE
359 UNIMPLEMENTED(FATAL) << ": Move " << dest << ", " << src;
360 }
361 }
362 }
363
CopyRef(FrameOffset dest,FrameOffset src)364 void X86JNIMacroAssembler::CopyRef(FrameOffset dest, FrameOffset src) {
365 Register scratch = GetScratchRegister();
366 __ movl(scratch, Address(ESP, src));
367 __ movl(Address(ESP, dest), scratch);
368 }
369
CopyRef(FrameOffset dest,ManagedRegister base,MemberOffset offs,bool unpoison_reference)370 void X86JNIMacroAssembler::CopyRef(FrameOffset dest,
371 ManagedRegister base,
372 MemberOffset offs,
373 bool unpoison_reference) {
374 Register scratch = GetScratchRegister();
375 __ movl(scratch, Address(base.AsX86().AsCpuRegister(), offs));
376 if (unpoison_reference) {
377 __ MaybeUnpoisonHeapReference(scratch);
378 }
379 __ movl(Address(ESP, dest), scratch);
380 }
381
CopyRawPtrFromThread(FrameOffset fr_offs,ThreadOffset32 thr_offs)382 void X86JNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs, ThreadOffset32 thr_offs) {
383 Register scratch = GetScratchRegister();
384 __ fs()->movl(scratch, Address::Absolute(thr_offs));
385 __ movl(Address(ESP, fr_offs), scratch);
386 }
387
CopyRawPtrToThread(ThreadOffset32 thr_offs,FrameOffset fr_offs,ManagedRegister mscratch)388 void X86JNIMacroAssembler::CopyRawPtrToThread(ThreadOffset32 thr_offs,
389 FrameOffset fr_offs,
390 ManagedRegister mscratch) {
391 X86ManagedRegister scratch = mscratch.AsX86();
392 CHECK(scratch.IsCpuRegister());
393 Load(scratch, fr_offs, 4);
394 __ fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister());
395 }
396
Copy(FrameOffset dest,FrameOffset src,size_t size)397 void X86JNIMacroAssembler::Copy(FrameOffset dest, FrameOffset src, size_t size) {
398 DCHECK(size == 4 || size == 8) << size;
399 Register scratch = GetScratchRegister();
400 __ movl(scratch, Address(ESP, src));
401 __ movl(Address(ESP, dest), scratch);
402 if (size == 8) {
403 __ movl(scratch, Address(ESP, FrameOffset(src.Int32Value() + 4)));
404 __ movl(Address(ESP, FrameOffset(dest.Int32Value() + 4)), scratch);
405 }
406 }
407
Copy(FrameOffset,ManagedRegister,Offset,ManagedRegister,size_t)408 void X86JNIMacroAssembler::Copy(FrameOffset /*dst*/,
409 ManagedRegister /*src_base*/,
410 Offset /*src_offset*/,
411 ManagedRegister /*scratch*/,
412 size_t /*size*/) {
413 UNIMPLEMENTED(FATAL);
414 }
415
Copy(ManagedRegister dest_base,Offset dest_offset,FrameOffset src,ManagedRegister scratch,size_t size)416 void X86JNIMacroAssembler::Copy(ManagedRegister dest_base,
417 Offset dest_offset,
418 FrameOffset src,
419 ManagedRegister scratch,
420 size_t size) {
421 CHECK(scratch.IsNoRegister());
422 CHECK_EQ(size, 4u);
423 __ pushl(Address(ESP, src));
424 __ popl(Address(dest_base.AsX86().AsCpuRegister(), dest_offset));
425 }
426
Copy(FrameOffset dest,FrameOffset src_base,Offset src_offset,ManagedRegister mscratch,size_t size)427 void X86JNIMacroAssembler::Copy(FrameOffset dest,
428 FrameOffset src_base,
429 Offset src_offset,
430 ManagedRegister mscratch,
431 size_t size) {
432 Register scratch = mscratch.AsX86().AsCpuRegister();
433 CHECK_EQ(size, 4u);
434 __ movl(scratch, Address(ESP, src_base));
435 __ movl(scratch, Address(scratch, src_offset));
436 __ movl(Address(ESP, dest), scratch);
437 }
438
Copy(ManagedRegister dest,Offset dest_offset,ManagedRegister src,Offset src_offset,ManagedRegister scratch,size_t size)439 void X86JNIMacroAssembler::Copy(ManagedRegister dest,
440 Offset dest_offset,
441 ManagedRegister src,
442 Offset src_offset,
443 ManagedRegister scratch,
444 size_t size) {
445 CHECK_EQ(size, 4u);
446 CHECK(scratch.IsNoRegister());
447 __ pushl(Address(src.AsX86().AsCpuRegister(), src_offset));
448 __ popl(Address(dest.AsX86().AsCpuRegister(), dest_offset));
449 }
450
Copy(FrameOffset dest,Offset dest_offset,FrameOffset src,Offset src_offset,ManagedRegister mscratch,size_t size)451 void X86JNIMacroAssembler::Copy(FrameOffset dest,
452 Offset dest_offset,
453 FrameOffset src,
454 Offset src_offset,
455 ManagedRegister mscratch,
456 size_t size) {
457 Register scratch = mscratch.AsX86().AsCpuRegister();
458 CHECK_EQ(size, 4u);
459 CHECK_EQ(dest.Int32Value(), src.Int32Value());
460 __ movl(scratch, Address(ESP, src));
461 __ pushl(Address(scratch, src_offset));
462 __ popl(Address(scratch, dest_offset));
463 }
464
MemoryBarrier(ManagedRegister)465 void X86JNIMacroAssembler::MemoryBarrier(ManagedRegister) {
466 __ mfence();
467 }
468
CreateJObject(ManagedRegister mout_reg,FrameOffset spilled_reference_offset,ManagedRegister min_reg,bool null_allowed)469 void X86JNIMacroAssembler::CreateJObject(ManagedRegister mout_reg,
470 FrameOffset spilled_reference_offset,
471 ManagedRegister min_reg,
472 bool null_allowed) {
473 X86ManagedRegister out_reg = mout_reg.AsX86();
474 X86ManagedRegister in_reg = min_reg.AsX86();
475 CHECK(in_reg.IsCpuRegister());
476 CHECK(out_reg.IsCpuRegister());
477 VerifyObject(in_reg, null_allowed);
478 if (null_allowed) {
479 Label null_arg;
480 if (!out_reg.Equals(in_reg)) {
481 __ xorl(out_reg.AsCpuRegister(), out_reg.AsCpuRegister());
482 }
483 __ testl(in_reg.AsCpuRegister(), in_reg.AsCpuRegister());
484 __ j(kZero, &null_arg);
485 __ leal(out_reg.AsCpuRegister(), Address(ESP, spilled_reference_offset));
486 __ Bind(&null_arg);
487 } else {
488 __ leal(out_reg.AsCpuRegister(), Address(ESP, spilled_reference_offset));
489 }
490 }
491
CreateJObject(FrameOffset out_off,FrameOffset spilled_reference_offset,bool null_allowed)492 void X86JNIMacroAssembler::CreateJObject(FrameOffset out_off,
493 FrameOffset spilled_reference_offset,
494 bool null_allowed) {
495 Register scratch = GetScratchRegister();
496 if (null_allowed) {
497 Label null_arg;
498 __ movl(scratch, Address(ESP, spilled_reference_offset));
499 __ testl(scratch, scratch);
500 __ j(kZero, &null_arg);
501 __ leal(scratch, Address(ESP, spilled_reference_offset));
502 __ Bind(&null_arg);
503 } else {
504 __ leal(scratch, Address(ESP, spilled_reference_offset));
505 }
506 __ movl(Address(ESP, out_off), scratch);
507 }
508
VerifyObject(ManagedRegister,bool)509 void X86JNIMacroAssembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
510 // TODO: not validating references
511 }
512
VerifyObject(FrameOffset,bool)513 void X86JNIMacroAssembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
514 // TODO: not validating references
515 }
516
Jump(ManagedRegister mbase,Offset offset)517 void X86JNIMacroAssembler::Jump(ManagedRegister mbase, Offset offset) {
518 X86ManagedRegister base = mbase.AsX86();
519 CHECK(base.IsCpuRegister());
520 __ jmp(Address(base.AsCpuRegister(), offset.Int32Value()));
521 }
522
Call(ManagedRegister mbase,Offset offset)523 void X86JNIMacroAssembler::Call(ManagedRegister mbase, Offset offset) {
524 X86ManagedRegister base = mbase.AsX86();
525 CHECK(base.IsCpuRegister());
526 __ call(Address(base.AsCpuRegister(), offset.Int32Value()));
527 // TODO: place reference map on call
528 }
529
Call(FrameOffset base,Offset offset)530 void X86JNIMacroAssembler::Call(FrameOffset base, Offset offset) {
531 Register scratch = GetScratchRegister();
532 __ movl(scratch, Address(ESP, base));
533 __ call(Address(scratch, offset));
534 }
535
CallFromThread(ThreadOffset32 offset)536 void X86JNIMacroAssembler::CallFromThread(ThreadOffset32 offset) {
537 __ fs()->call(Address::Absolute(offset));
538 }
539
GetCurrentThread(ManagedRegister dest)540 void X86JNIMacroAssembler::GetCurrentThread(ManagedRegister dest) {
541 __ fs()->movl(dest.AsX86().AsCpuRegister(),
542 Address::Absolute(Thread::SelfOffset<kX86PointerSize>()));
543 }
544
GetCurrentThread(FrameOffset offset)545 void X86JNIMacroAssembler::GetCurrentThread(FrameOffset offset) {
546 Register scratch = GetScratchRegister();
547 __ fs()->movl(scratch, Address::Absolute(Thread::SelfOffset<kX86PointerSize>()));
548 __ movl(Address(ESP, offset), scratch);
549 }
550
ExceptionPoll(size_t stack_adjust)551 void X86JNIMacroAssembler::ExceptionPoll(size_t stack_adjust) {
552 X86ExceptionSlowPath* slow = new (__ GetAllocator()) X86ExceptionSlowPath(stack_adjust);
553 __ GetBuffer()->EnqueueSlowPath(slow);
554 __ fs()->cmpl(Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>()), Immediate(0));
555 __ j(kNotEqual, slow->Entry());
556 }
557
CreateLabel()558 std::unique_ptr<JNIMacroLabel> X86JNIMacroAssembler::CreateLabel() {
559 return std::unique_ptr<JNIMacroLabel>(new X86JNIMacroLabel());
560 }
561
Jump(JNIMacroLabel * label)562 void X86JNIMacroAssembler::Jump(JNIMacroLabel* label) {
563 CHECK(label != nullptr);
564 __ jmp(X86JNIMacroLabel::Cast(label)->AsX86());
565 }
566
TestGcMarking(JNIMacroLabel * label,JNIMacroUnaryCondition cond)567 void X86JNIMacroAssembler::TestGcMarking(JNIMacroLabel* label, JNIMacroUnaryCondition cond) {
568 CHECK(label != nullptr);
569
570 art::x86::Condition x86_cond;
571 switch (cond) {
572 case JNIMacroUnaryCondition::kZero:
573 x86_cond = art::x86::kZero;
574 break;
575 case JNIMacroUnaryCondition::kNotZero:
576 x86_cond = art::x86::kNotZero;
577 break;
578 default:
579 LOG(FATAL) << "Not implemented condition: " << static_cast<int>(cond);
580 UNREACHABLE();
581 }
582
583 // CMP self->tls32_.is_gc_marking, 0
584 // Jcc <Offset>
585 DCHECK_EQ(Thread::IsGcMarkingSize(), 4u);
586 __ fs()->cmpl(Address::Absolute(Thread::IsGcMarkingOffset<kX86PointerSize>()), Immediate(0));
587 __ j(x86_cond, X86JNIMacroLabel::Cast(label)->AsX86());
588 }
589
Bind(JNIMacroLabel * label)590 void X86JNIMacroAssembler::Bind(JNIMacroLabel* label) {
591 CHECK(label != nullptr);
592 __ Bind(X86JNIMacroLabel::Cast(label)->AsX86());
593 }
594
595 #undef __
596
Emit(Assembler * sasm)597 void X86ExceptionSlowPath::Emit(Assembler *sasm) {
598 X86Assembler* sp_asm = down_cast<X86Assembler*>(sasm);
599 #define __ sp_asm->
600 __ Bind(&entry_);
601 // Note: the return value is dead
602 if (stack_adjust_ != 0) { // Fix up the frame.
603 DecreaseFrameSizeImpl(sp_asm, stack_adjust_);
604 }
605 // Pass exception as argument in EAX
606 __ fs()->movl(EAX, Address::Absolute(Thread::ExceptionOffset<kX86PointerSize>()));
607 __ fs()->call(Address::Absolute(QUICK_ENTRYPOINT_OFFSET(kX86PointerSize, pDeliverException)));
608 // this call should never return
609 __ int3();
610 #undef __
611 }
612
613 } // namespace x86
614 } // namespace art
615