Lines Matching refs:temp0
613 const vixl32::Register temp0 = RegisterFrom(locations->GetTemp(0)); in VisitStringCompareTo() local
648 __ Lsr(temp0, temp3, 1u); in VisitStringCompareTo()
652 __ Ldr(temp0, MemOperand(str, count_offset)); in VisitStringCompareTo()
656 __ Subs(out, temp0, temp1); in VisitStringCompareTo()
665 __ mov(gt, temp0, temp1); in VisitStringCompareTo()
671 __ CompareAndBranchIfZero(temp0, &end, mirror::kUseStringCompression); in VisitStringCompareTo()
687 __ add(ne, temp0, temp0, temp0); in VisitStringCompareTo()
710 const vixl32::Register temp0 = RegisterFrom(locations->GetTemp(0)); in GenerateStringCompareToLoop() local
751 __ Subs(temp0, temp0, (mirror::kUseStringCompression ? 8 : 4)); in GenerateStringCompareToLoop()
757 __ Subs(temp0, temp0, 4); // 4 bytes previously compared. in GenerateStringCompareToLoop()
762 __ Sub(temp0, temp0, 2); in GenerateStringCompareToLoop()
785 __ Cmp(temp0, Operand(temp1, vixl32::LSR, (mirror::kUseStringCompression ? 3 : 4))); in GenerateStringCompareToLoop()
825 __ Add(temp0, temp0, temp0); // Unlike LSL, this ADD is always 16-bit. in GenerateStringCompareToLoop()
838 __ Sbc(temp0, temp0, 0); // Complete the move of the compression flag. in GenerateStringCompareToLoop()
854 __ Subs(temp0, temp0, 2); in GenerateStringCompareToLoop()
864 __ Lsrs(temp0, temp0, 1u); in GenerateStringCompareToLoop()