/art/compiler/optimizing/ |
D | code_generator_vector_x86.cc | 78 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar() local 86 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar() local 92 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecReplicateScalar() local 98 __ movd(dst, locations->InAt(0).AsRegisterPairLow<Register>()); in VisitVecReplicateScalar() local 99 __ movd(tmp, locations->InAt(0).AsRegisterPairHigh<Register>()); in VisitVecReplicateScalar() local 161 __ movd(locations->Out().AsRegister<Register>(), src); in VisitVecExtractScalar() local 166 __ movd(locations->Out().AsRegisterPairLow<Register>(), src); in VisitVecExtractScalar() local 168 __ movd(locations->Out().AsRegisterPairHigh<Register>(), tmp); in VisitVecExtractScalar() local 1139 __ movd(dst, locations->InAt(0).AsRegister<Register>()); in VisitVecSetScalars() local 1145 __ movd(dst, locations->InAt(0).AsRegisterPairLow<Register>()); in VisitVecSetScalars() local [all …]
|
D | code_generator_vector_x86_64.cc | 73 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar() local 81 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar() local 87 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ false); in VisitVecReplicateScalar() local 92 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>(), /*64-bit*/ true); in VisitVecReplicateScalar() local 148 __ movd(locations->Out().AsRegister<CpuRegister>(), src, /*64-bit*/ false); in VisitVecExtractScalar() local 152 __ movd(locations->Out().AsRegister<CpuRegister>(), src, /*64-bit*/ true); in VisitVecExtractScalar() local 1117 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>()); in VisitVecSetScalars() local 1121 __ movd(dst, locations->InAt(0).AsRegister<CpuRegister>()); // is 64-bit in VisitVecSetScalars() local
|
D | code_generator_x86.cc | 1616 __ movd(destination.AsRegister<Register>(), source.AsFpuRegister<XmmRegister>()); in Move32() local 1626 __ movd(destination.AsFpuRegister<XmmRegister>(), source.AsRegister<Register>()); in Move32() local 1666 __ movd(destination.AsRegisterPairLow<Register>(), src_reg); in Move64() local 1668 __ movd(destination.AsRegisterPairHigh<Register>(), src_reg); in Move64() local 1763 __ movd(dst.AsRegisterPairLow<Register>(), temp); in LoadFromMemoryNoBarrier() local 1765 __ movd(dst.AsRegisterPairHigh<Register>(), temp); in LoadFromMemoryNoBarrier() local 2726 __ movd(EAX, XMM0); in VisitReturn() local 2735 __ movd(EAX, XMM0); in VisitReturn() local 2739 __ movd(EDX, XMM1); in VisitReturn() local 2906 __ movd(hidden_reg, locations->InAt(invoke->GetNumberOfArguments() - 1).AsRegister<Register>()); in VisitInvokeInterface() local [all …]
|
D | intrinsics_x86.cc | 178 __ movd(output.AsRegisterPairLow<Register>(), temp); in MoveFPToInt() local 180 __ movd(output.AsRegisterPairHigh<Register>(), temp); in MoveFPToInt() local 182 __ movd(output.AsRegister<Register>(), input.AsFpuRegister<XmmRegister>()); in MoveFPToInt() local 193 __ movd(temp1, input.AsRegisterPairLow<Register>()); in MoveIntToFP() local 194 __ movd(temp2, input.AsRegisterPairHigh<Register>()); in MoveIntToFP() local 198 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<Register>()); in MoveIntToFP() local 1717 __ movd(output_lo, temp); in GenUnsafeGet() local 1719 __ movd(output_hi, temp); in GenUnsafeGet() local 2003 __ movd(temp1, value_lo); in GenUnsafePut() local 2004 __ movd(temp2, value_hi); in GenUnsafePut() local [all …]
|
D | code_generator_x86_64.cc | 1966 __ movd(dest, source.AsFpuRegister<XmmRegister>()); in Move() local 1985 __ movd(dest, source.AsRegister<CpuRegister>()); in Move() local 2923 __ movd(CpuRegister(RAX), XmmRegister(XMM0), /* is64bit= */ false); in VisitReturn() local 2933 __ movd(CpuRegister(RAX), XmmRegister(XMM0), /* is64bit= */ true); in VisitReturn() local 5400 __ movd(*temp, value.AsFpuRegister<XmmRegister>(), /*is64bit=*/ false); in Bswap() local 5402 __ movd(value.AsFpuRegister<XmmRegister>(), *temp, /*is64bit=*/ false); in Bswap() local 5407 __ movd(*temp, value.AsFpuRegister<XmmRegister>(), /*is64bit=*/ true); in Bswap() local 5409 __ movd(value.AsFpuRegister<XmmRegister>(), *temp, /*is64bit=*/ true); in Bswap() local 6495 __ movd(reg, CpuRegister(TMP)); in Exchange32() local 6501 __ movd(reg, CpuRegister(TMP)); in Exchange64() local [all …]
|
D | intrinsics_x86_64.cc | 161 __ movd(output.AsRegister<CpuRegister>(), input.AsFpuRegister<XmmRegister>(), is64bit); in MoveFPToInt() local 167 __ movd(output.AsFpuRegister<XmmRegister>(), input.AsRegister<CpuRegister>(), is64bit); in MoveIntToFP() local 2369 __ movd(out.AsFpuRegister<XmmRegister>(), CpuRegister(RAX), is64bit); in GenCompareAndSetOrExchangeFP() local 4425 __ movd(out.AsFpuRegister<XmmRegister>(), temp.AsRegister<CpuRegister>(), is64bit); in GenerateVarHandleGetAndSet() local 4727 __ movd(CpuRegister(RAX), fptemp, is64bit); in GenerateVarHandleGetAndAdd() local 4731 __ movd(fptemp, CpuRegister(RAX), is64bit); in GenerateVarHandleGetAndAdd() local 4739 __ movd(temp, fptemp, is64bit); in GenerateVarHandleGetAndAdd() local 4757 __ movd(out.AsFpuRegister<XmmRegister>(), CpuRegister(RAX), is64bit); in GenerateVarHandleGetAndAdd() local
|
/art/compiler/utils/x86_64/ |
D | assembler_x86_64.cc | 736 void X86_64Assembler::movd(XmmRegister dst, CpuRegister src) { in movd() function in art::x86_64::X86_64Assembler 740 void X86_64Assembler::movd(CpuRegister dst, XmmRegister src) { in movd() function in art::x86_64::X86_64Assembler 744 void X86_64Assembler::movd(XmmRegister dst, CpuRegister src, bool is64bit) { in movd() function in art::x86_64::X86_64Assembler 753 void X86_64Assembler::movd(CpuRegister dst, XmmRegister src, bool is64bit) { in movd() function in art::x86_64::X86_64Assembler
|
/art/compiler/utils/x86/ |
D | assembler_x86.cc | 609 void X86Assembler::movd(XmmRegister dst, Register src) { in movd() function in art::x86::X86Assembler 618 void X86Assembler::movd(Register dst, XmmRegister src) { in movd() function in art::x86::X86Assembler
|