Home
last modified time | relevance | path

Searched refs:RunTimePoolInfo (Results 1 – 21 of 21) sorted by relevance

/packages/modules/NeuralNetworks/common/include/
DCpuExecutor.h106 class RunTimePoolInfo {
108 static std::optional<RunTimePoolInfo> createFromMemory(const SharedMemory& memory);
109 static RunTimePoolInfo createFromExistingBuffer(uint8_t* buffer, uint32_t size = 0);
118 RunTimePoolInfo(const std::shared_ptr<const RunTimePoolInfoImpl>& impl);
123 bool setRunTimePoolInfosFromCanonicalMemories(std::vector<RunTimePoolInfo>* poolInfos,
126 bool setRunTimePoolInfosFromMemoryPools(std::vector<RunTimePoolInfo>* poolInfos,
150 const std::vector<RunTimePoolInfo>& modelPoolInfos,
151 const std::vector<RunTimePoolInfo>& requestPoolInfos);
168 const std::vector<RunTimePoolInfo>& requestPoolInfos,
183 const std::vector<RunTimePoolInfo>* mModelPoolInfos = nullptr;
DHalBufferTracker.h48 RunTimePoolInfo createRunTimePoolInfo() const { in createRunTimePoolInfo()
49 return RunTimePoolInfo::createFromExistingBuffer(kBuffer.get(), kSize); in createRunTimePoolInfo()
DBufferTracker.h48 RunTimePoolInfo createRunTimePoolInfo() const { in createRunTimePoolInfo()
49 return RunTimePoolInfo::createFromExistingBuffer(kBuffer.get(), kSize); in createRunTimePoolInfo()
DLegacyHalUtils.h189 class RunTimePoolInfo;
191 bool setRunTimePoolInfosFromHidlMemories(std::vector<RunTimePoolInfo>* poolInfos,
/packages/modules/NeuralNetworks/driver/sample/
DCanonicalBuffer.cpp31 void copyRunTimePoolInfos(const RunTimePoolInfo& srcPool, const RunTimePoolInfo& dstPool) { in copyRunTimePoolInfos()
42 const auto srcPool = RunTimePoolInfo::createFromMemory(src); in copyFromInternal()
71 const auto dstPool = RunTimePoolInfo::createFromMemory(dst); in copyTo()
DCanonicalPreparedModel.h40 std::vector<RunTimePoolInfo> poolInfos);
69 const std::vector<RunTimePoolInfo> kPoolInfos;
DCanonicalPreparedModel.cpp38 GeneralResult<std::pair<std::vector<RunTimePoolInfo>, std::vector<std::shared_ptr<ManagedBuffer>>>>
41 std::vector<RunTimePoolInfo> requestPoolInfos; in createRunTimePoolInfos()
48 auto buffer = RunTimePoolInfo::createFromMemory(*maybeMemory); in createRunTimePoolInfos()
119 std::vector<RunTimePoolInfo> poolInfos) in PreparedModel()
DCanonicalDevice.cpp186 std::vector<RunTimePoolInfo> poolInfos; in prepareModel()
/packages/modules/NeuralNetworks/runtime/
DMemory.h179 virtual std::optional<RunTimePoolInfo> getRunTimePoolInfo() const;
216 mutable std::optional<RunTimePoolInfo> mCachedRunTimePoolInfo;
280 std::optional<RunTimePoolInfo> getRunTimePoolInfo() const override { in getRunTimePoolInfo()
281 return RunTimePoolInfo::createFromExistingBuffer(getPointer(), nn::getSize(kMemory)); in getRunTimePoolInfo()
334 std::optional<RunTimePoolInfo> getRunTimePoolInfo() const override { in getRunTimePoolInfo()
335 return RunTimePoolInfo::createFromExistingBuffer(getPointer(), nn::getSize(kMemory)); in getRunTimePoolInfo()
DManager.cpp962 CpuPreparedModel(Model model, std::vector<RunTimePoolInfo> poolInfos) in CpuPreparedModel()
966 const std::vector<RunTimePoolInfo>& getModelPoolInfos() const { return mModelPoolInfos; } in getModelPoolInfos()
974 const std::vector<RunTimePoolInfo> mModelPoolInfos;
980 std::vector<RunTimePoolInfo> requestPoolInfos, in CpuExecution()
997 std::vector<RunTimePoolInfo> kRequestPoolInfos;
1066 std::vector<RunTimePoolInfo> poolInfos; in create()
1078 const std::vector<RunTimePoolInfo>& modelPoolInfos, in computeOnCpu()
1079 const std::vector<RunTimePoolInfo>& requestPoolInfos, const OptionalTimePoint& deadline, in computeOnCpu()
1126 static std::tuple<int, Request, std::vector<RunTimePoolInfo>> createCpuRequest( in createCpuRequest()
1129 std::vector<RunTimePoolInfo> requestPoolInfos; in createCpuRequest()
[all …]
DMemory.cpp206 std::optional<RunTimePoolInfo> RuntimeMemory::getRunTimePoolInfo() const { in getRunTimePoolInfo()
209 mCachedRunTimePoolInfo = RunTimePoolInfo::createFromMemory(kMemory); in getRunTimePoolInfo()
222 static int copyHidlMemories(const std::optional<RunTimePoolInfo>& src, in copyHidlMemories()
223 const std::optional<RunTimePoolInfo>& dst) { in copyHidlMemories()
DExecutionPlan.h796 Buffer(RunTimePoolInfo info, uint32_t offset);
802 RunTimePoolInfo mInfo;
DExecutionBuilder.h153 std::optional<RunTimePoolInfo> getRunTimePoolInfo(uint32_t poolIndex) const { in getRunTimePoolInfo()
DExecutionPlan.cpp1468 : mInfo(RunTimePoolInfo::createFromExistingBuffer(static_cast<uint8_t*>(pointer), size)), in Buffer()
1471 ExecutionPlan::Buffer::Buffer(RunTimePoolInfo info, uint32_t offset) in Buffer()
1493 if (std::optional<RunTimePoolInfo> poolInfo = in getBufferFromModelArgumentInfo()
1535 const std::optional<RunTimePoolInfo> info = location.memory->getRunTimePoolInfo(); in getBuffer()
/packages/modules/NeuralNetworks/common/
DCpuExecutor.cpp287 class RunTimePoolInfo::RunTimePoolInfoImpl {
303 RunTimePoolInfo::RunTimePoolInfoImpl::RunTimePoolInfoImpl(SharedMemory memory, Mapping mapping) in RunTimePoolInfoImpl()
306 uint8_t* RunTimePoolInfo::RunTimePoolInfoImpl::getBuffer() const { in getBuffer()
316 uint32_t RunTimePoolInfo::RunTimePoolInfoImpl::getSize() const { in getSize()
322 bool RunTimePoolInfo::RunTimePoolInfoImpl::flush() const { in flush()
328 std::optional<RunTimePoolInfo> RunTimePoolInfo::createFromMemory(const SharedMemory& memory) { in createFromMemory()
336 return RunTimePoolInfo(impl); in createFromMemory()
339 RunTimePoolInfo RunTimePoolInfo::createFromExistingBuffer(uint8_t* buffer, uint32_t size) { in createFromExistingBuffer()
343 return RunTimePoolInfo(impl); in createFromExistingBuffer()
346 RunTimePoolInfo::RunTimePoolInfo(const std::shared_ptr<const RunTimePoolInfoImpl>& impl) in RunTimePoolInfo() function in android::nn::RunTimePoolInfo
[all …]
DLegacyHalUtils.cpp315 bool setRunTimePoolInfosFromHidlMemories(std::vector<RunTimePoolInfo>* poolInfos, in setRunTimePoolInfosFromHidlMemories()
/packages/modules/NeuralNetworks/driver/sample_hidl/
DSampleDriver.cpp310 static void copyRunTimePoolInfos(const RunTimePoolInfo& srcPool, const RunTimePoolInfo& dstPool) { in copyRunTimePoolInfos()
319 const auto dstPool = RunTimePoolInfo::createFromMemory(uncheckedConvert(dst)); in copyTo()
338 const auto srcPool = RunTimePoolInfo::createFromMemory(uncheckedConvert(src)); in copyFromInternal()
369 static std::tuple<V1_3::ErrorStatus, std::vector<RunTimePoolInfo>,
373 std::vector<RunTimePoolInfo> requestPoolInfos; in createRunTimePoolInfos()
382 RunTimePoolInfo::createFromMemory(uncheckedConvert(pool.hidlMemory())); in createRunTimePoolInfos()
452 const std::vector<RunTimePoolInfo>& poolInfos, const OptionalTimePoint& deadline, in asyncExecute()
508 const std::vector<RunTimePoolInfo>& poolInfos, in executeBase()
572 const std::vector<RunTimePoolInfo>& poolInfos, in executeSynchronouslyBase()
770 const std::vector<RunTimePoolInfo>& poolInfos) in BurstExecutorWithCache()
[all …]
DSampleDriver.h175 std::vector<RunTimePoolInfo> mPoolInfos;
DSampleDriverFloatXNNPACK.cpp74 const std::vector<RunTimePoolInfo>& requestPoolInfos, in updateForArguments()
109 const V1_3::Subgraph& subgraph, const std::vector<RunTimePoolInfo>& modelPoolInfos, in initializeRunTimeInfo()
1649 std::vector<RunTimePoolInfo> requestPoolInfos; in asyncExecuteXNNPACK()
1744 std::vector<RunTimePoolInfo> requestPoolInfos; in executeSynchronouslyXNNPACKBase()
1813 std::vector<RunTimePoolInfo> requestPoolInfos; in executeFenced()
1956 std::vector<RunTimePoolInfo> poolInfos; in getSupportedOperationsImpl()
/packages/modules/NeuralNetworks/driver/sample_aidl/
DSampleDriverAidl.cpp234 static void copyRunTimePoolInfos(const RunTimePoolInfo& srcPool, const RunTimePoolInfo& dstPool) { in copyRunTimePoolInfos()
247 const auto dstPool = RunTimePoolInfo::createFromMemory(canonicalMemory.value()); in copyTo()
259 RunTimePoolInfo::createFromExistingBuffer(kBuffer->getPointer(), kBuffer->getSize()); in copyTo()
272 const auto srcPool = RunTimePoolInfo::createFromMemory(canonicalMemory.value()); in copyFromInternal()
283 const auto dstPool = RunTimePoolInfo::createFromExistingBuffer(bufferWrapper->getPointer(), in copyFromInternal()
314 static std::tuple<aidl_hal::ErrorStatus, std::vector<RunTimePoolInfo>,
318 std::vector<RunTimePoolInfo> requestPoolInfos; in createRunTimePoolInfos()
325 auto buffer = RunTimePoolInfo::createFromMemory(*memory); in createRunTimePoolInfos()
344 requestPoolInfos.push_back(RunTimePoolInfo::createFromExistingBuffer( in createRunTimePoolInfos()
DSampleDriverAidl.h142 std::vector<RunTimePoolInfo> mPoolInfos;