Lines Matching refs:masm
170 #define __ masm->
186 static void MoveFPToInt(LocationSummary* locations, bool is64bit, MacroAssembler* masm) { in MoveFPToInt() argument
193 static void MoveIntToFP(LocationSummary* locations, bool is64bit, MacroAssembler* masm) { in MoveIntToFP() argument
252 static void GenerateReverseBytes(MacroAssembler* masm, in GenerateReverseBytes() argument
286 MacroAssembler* masm) { in GenReverseBytes() argument
289 GenerateReverseBytes(masm, type, CPURegisterFrom(in, type), CPURegisterFrom(out, type)); in GenReverseBytes()
318 MacroAssembler* masm) { in GenNumberOfLeadingZeros() argument
345 MacroAssembler* masm) { in GenNumberOfTrailingZeros() argument
373 MacroAssembler* masm) { in GenReverse() argument
398 static void GenBitCount(HInvoke* instr, DataType::Type type, MacroAssembler* masm) { in GenBitCount() argument
403 UseScratchRegisterScope temps(masm); in GenBitCount()
431 static void GenHighestOneBit(HInvoke* invoke, DataType::Type type, MacroAssembler* masm) { in GenHighestOneBit() argument
434 UseScratchRegisterScope temps(masm); in GenHighestOneBit()
464 static void GenLowestOneBit(HInvoke* invoke, DataType::Type type, MacroAssembler* masm) { in GenLowestOneBit() argument
467 UseScratchRegisterScope temps(masm); in GenLowestOneBit()
506 MacroAssembler* masm = GetVIXLAssembler(); in VisitMathSqrt() local
516 MacroAssembler* masm = GetVIXLAssembler(); in VisitMathCeil() local
526 MacroAssembler* masm = GetVIXLAssembler(); in VisitMathFloor() local
536 MacroAssembler* masm = GetVIXLAssembler(); in VisitMathRint() local
548 static void GenMathRound(HInvoke* invoke, bool is_double, vixl::aarch64::MacroAssembler* masm) { in GenMathRound() argument
606 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPeekByte() local
616 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPeekIntNative() local
626 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPeekLongNative() local
636 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPeekShortNative() local
653 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPokeByte() local
663 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPokeIntNative() local
673 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPokeLongNative() local
683 MacroAssembler* masm = GetVIXLAssembler(); in VisitMemoryPokeShortNative() local
717 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenUnsafeGet() local
846 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenUnsafePut() local
857 UseScratchRegisterScope temps(masm); in GenUnsafePut()
971 MacroAssembler* masm = assembler->GetVIXLAssembler(); in EmitLoadExclusive() local
1025 MacroAssembler* masm = assembler->GetVIXLAssembler(); in EmitStoreExclusive() local
1088 MacroAssembler* masm = assembler->GetVIXLAssembler(); in GenerateCompareAndSet() local
1185 MacroAssembler* masm = assembler->GetVIXLAssembler(); in EmitNativeCode() local
1210 UseScratchRegisterScope temps(masm); in EmitNativeCode()
1274 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenUnsafeCas() local
1290 UseScratchRegisterScope temps(masm); in GenUnsafeCas()
1401 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenerateGetAndUpdate() local
1402 UseScratchRegisterScope temps(masm); in GenerateGetAndUpdate()
1445 GenerateReverseBytes(masm, load_store_type, old_value_reg, old_value_reg); in GenerateGetAndUpdate()
1458 GenerateReverseBytes(masm, load_store_type, new_value, new_value); in GenerateGetAndUpdate()
1495 MacroAssembler* masm = GetVIXLAssembler(); in VisitStringCompareTo() local
1572 UseScratchRegisterScope scratch_scope(masm); in VisitStringCompareTo()
1735 MacroAssembler* masm = GetVIXLAssembler(); in VisitStringEquals() local
1742 UseScratchRegisterScope scratch_scope(masm); in VisitStringEquals()
1897 MacroAssembler* masm, in GenerateVisitStringIndexOf() argument
1987 MacroAssembler* masm = GetVIXLAssembler(); in VisitStringNewStringFromBytes() local
2032 MacroAssembler* masm = GetVIXLAssembler(); in VisitStringNewStringFromString() local
2240 MacroAssembler* masm = GetVIXLAssembler(); in VisitStringGetCharsNoCheck() local
2265 UseScratchRegisterScope temps(masm); in VisitStringGetCharsNoCheck()
2416 static void CheckSystemArrayCopyPosition(MacroAssembler* masm, in CheckSystemArrayCopyPosition() argument
2463 static void GenSystemArrayCopyAddresses(MacroAssembler* masm, in GenSystemArrayCopyAddresses() argument
2505 MacroAssembler* masm = GetVIXLAssembler(); in VisitSystemArrayCopyChar() local
2544 CheckSystemArrayCopyPosition(masm, in VisitSystemArrayCopyChar()
2552 CheckSystemArrayCopyPosition(masm, in VisitSystemArrayCopyChar()
2564 GenSystemArrayCopyAddresses(masm, in VisitSystemArrayCopyChar()
2577 UseScratchRegisterScope temps(masm); in VisitSystemArrayCopyChar()
2672 MacroAssembler* masm = GetVIXLAssembler(); in VisitSystemArrayCopy() local
2755 CheckSystemArrayCopyPosition(masm, in VisitSystemArrayCopy()
2764 CheckSystemArrayCopyPosition(masm, in VisitSystemArrayCopy()
2774 UseScratchRegisterScope temps(masm); in VisitSystemArrayCopy()
3056 GenSystemArrayCopyAddresses(masm, in VisitSystemArrayCopy()
3094 GenSystemArrayCopyAddresses(masm, in VisitSystemArrayCopy()
3128 MacroAssembler* masm) { in GenIsInfinite() argument
3143 MoveFPToInt(locations, is64bit, masm); in GenIsInfinite()
3181 MacroAssembler* masm = GetVIXLAssembler(); in VisitIntegerValueOf() local
3184 UseScratchRegisterScope temps(masm); in VisitIntegerValueOf()
3243 MacroAssembler* masm = GetVIXLAssembler(); in VisitReferenceGetReferent() local
3254 UseScratchRegisterScope temps(masm); in VisitReferenceGetReferent()
3263 UseScratchRegisterScope temps(masm); in VisitReferenceGetReferent()
3301 MacroAssembler* masm = codegen_->GetVIXLAssembler(); in VisitReferenceRefersTo() local
3302 UseScratchRegisterScope temps(masm); in VisitReferenceRefersTo()
3358 MacroAssembler* masm = GetVIXLAssembler(); in VisitThreadInterrupted() local
3360 UseScratchRegisterScope temps(masm); in VisitThreadInterrupted()
3398 MacroAssembler* masm = GetVIXLAssembler(); in VisitCRC32Update() local
3410 UseScratchRegisterScope temps(masm); in VisitCRC32Update()
3427 static void GenerateCodeForCalculationCRC32ValueOfBytes(MacroAssembler* masm, in GenerateCodeForCalculationCRC32ValueOfBytes() argument
3451 UseScratchRegisterScope temps(masm); in GenerateCodeForCalculationCRC32ValueOfBytes()
3550 MacroAssembler* masm = GetVIXLAssembler(); in VisitCRC32UpdateBytes() local
3577 GenerateCodeForCalculationCRC32ValueOfBytes(masm, crc, ptr, length, out); in VisitCRC32UpdateBytes()
3612 MacroAssembler* masm = GetVIXLAssembler(); in VisitCRC32UpdateByteBuffer() local
3622 GenerateCodeForCalculationCRC32ValueOfBytes(masm, crc, ptr, length, out); in VisitCRC32UpdateByteBuffer()
3639 MacroAssembler* masm = GetVIXLAssembler(); in VisitFP16ToFloat() local
3640 UseScratchRegisterScope scratch_scope(masm); in VisitFP16ToFloat()
3662 MacroAssembler* masm = GetVIXLAssembler(); in VisitFP16ToHalf() local
3663 UseScratchRegisterScope scratch_scope(masm); in VisitFP16ToHalf()
3675 MacroAssembler* masm, in GenerateFP16Round() argument
3679 UseScratchRegisterScope scratch_scope(masm); in GenerateFP16Round()
3697 MacroAssembler* masm = GetVIXLAssembler(); in VisitFP16Floor() local
3698 auto roundOp = [masm](const VRegister& out, const VRegister& in) { in VisitFP16Floor()
3701 GenerateFP16Round(invoke, codegen_, masm, roundOp); in VisitFP16Floor()
3713 MacroAssembler* masm = GetVIXLAssembler(); in VisitFP16Ceil() local
3714 auto roundOp = [masm](const VRegister& out, const VRegister& in) { in VisitFP16Ceil()
3717 GenerateFP16Round(invoke, codegen_, masm, roundOp); in VisitFP16Ceil()
3729 MacroAssembler* masm = GetVIXLAssembler(); in VisitFP16Rint() local
3730 auto roundOp = [masm](const VRegister& out, const VRegister& in) { in VisitFP16Rint()
3733 GenerateFP16Round(invoke, codegen_, masm, roundOp); in VisitFP16Rint()
3739 MacroAssembler* masm, in GenerateFP16Compare() argument
3753 MacroAssembler* masm, in GenerateFP16Compare() argument
3755 auto compareOp = [masm, cond](const Register out, const VRegister& in0, const VRegister& in1) { in GenerateFP16Compare()
3759 GenerateFP16Compare(invoke, codegen, masm, compareOp); in GenerateFP16Compare()
3773 MacroAssembler* masm = GetVIXLAssembler(); in VisitFP16Greater() local
3774 GenerateFP16Compare(invoke, codegen_, masm, gt); in VisitFP16Greater()
3788 MacroAssembler* masm = GetVIXLAssembler(); in VisitFP16GreaterEquals() local
3789 GenerateFP16Compare(invoke, codegen_, masm, ge); in VisitFP16GreaterEquals()
3803 MacroAssembler* masm = GetVIXLAssembler(); in VisitFP16Less() local
3804 GenerateFP16Compare(invoke, codegen_, masm, mi); in VisitFP16Less()
3818 MacroAssembler* masm = GetVIXLAssembler(); in VisitFP16LessEquals() local
3819 GenerateFP16Compare(invoke, codegen_, masm, ls); in VisitFP16LessEquals()
3824 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenerateDivideUnsigned() local
3865 MacroAssembler* masm = codegen_->GetVIXLAssembler(); in VisitMathMultiplyHigh() local
3944 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenerateSubTypeObjectCheckNoReadBarrier() local
3954 UseScratchRegisterScope temps(masm); in GenerateSubTypeObjectCheckNoReadBarrier()
3981 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenerateVarHandleAccessModeAndVarTypeChecks() local
3988 UseScratchRegisterScope temps(masm); in GenerateVarHandleAccessModeAndVarTypeChecks()
4031 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenerateVarHandleStaticFieldCheck() local
4036 UseScratchRegisterScope temps(masm); in GenerateVarHandleStaticFieldCheck()
4048 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenerateVarHandleInstanceFieldChecks() local
4058 UseScratchRegisterScope temps(masm); in GenerateVarHandleInstanceFieldChecks()
4093 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenerateVarHandleArrayChecks() local
4111 UseScratchRegisterScope temps(masm); in GenerateVarHandleArrayChecks()
4223 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenerateVarHandleTarget() local
4427 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenerateVarHandleGet() local
4460 UseScratchRegisterScope temps(masm); in GenerateVarHandleGet()
4486 GenerateReverseBytes(masm, type, load_reg, out); in GenerateVarHandleGet()
4542 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenerateVarHandleSet() local
4561 UseScratchRegisterScope temps(masm); in GenerateVarHandleSet()
4580 GenerateReverseBytes(masm, value_type, source, temp); in GenerateVarHandleSet()
4702 MacroAssembler* masm, in MoveToTempIfFpRegister() argument
4732 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenerateVarHandleCompareAndSetOrExchange() local
4756 UseScratchRegisterScope temps(masm); in GenerateVarHandleCompareAndSetOrExchange()
4765 Register expected_reg = MoveToTempIfFpRegister(expected, value_type, masm, &temps); in GenerateVarHandleCompareAndSetOrExchange()
4766 Register new_value_reg = MoveToTempIfFpRegister(new_value, value_type, masm, &temps); in GenerateVarHandleCompareAndSetOrExchange()
4792 GenerateReverseBytes(masm, cas_type, expected_reg, temp); in GenerateVarHandleCompareAndSetOrExchange()
4797 GenerateReverseBytes(masm, cas_type, new_value_reg, temp); in GenerateVarHandleCompareAndSetOrExchange()
4892 GenerateReverseBytes(masm, value_type, old_value, out); in GenerateVarHandleCompareAndSetOrExchange()
5032 MacroAssembler* masm = codegen->GetVIXLAssembler(); in GenerateVarHandleGetAndUpdate() local
5056 UseScratchRegisterScope temps(masm); in GenerateVarHandleGetAndUpdate()
5082 arg = MoveToTempIfFpRegister(arg, value_type, masm, &temps); in GenerateVarHandleGetAndUpdate()
5110 GenerateReverseBytes(masm, load_store_type, arg, temp); in GenerateVarHandleGetAndUpdate()
5127 GenerateReverseBytes(masm, value_type, old_value, out); in GenerateVarHandleGetAndUpdate()
5278 MacroAssembler* masm = codegen->GetVIXLAssembler(); in EmitByteArrayViewCode() local
5299 UseScratchRegisterScope temps(masm); in EmitByteArrayViewCode()