Home
last modified time | relevance | path

Searched refs:Timing (Results 1 – 25 of 42) sorted by relevance

12

/packages/modules/NeuralNetworks/runtime/
DExecutionCallback.h86 const Timing& timing);
154 Timing getTiming() const;
209 Timing timing);
219 Timing mTiming = {};
DExecutionBuilder.h112 void reportTimingWithoutFencedExecutionCallback(Timing timing) { in reportTimingWithoutFencedExecutionCallback()
170 virtual std::tuple<int, std::vector<OutputShape>, Timing> computeInternal(
218 Timing mTimingWithoutFencedExecutionCallback = {};
290 std::tuple<int, std::vector<OutputShape>, Timing> computeInternal(
306 std::tuple<int, std::vector<OutputShape>, Timing> computeInternal(
403 std::tuple<int, std::vector<OutputShape>, Timing> compute(
408 std::tuple<int, std::vector<OutputShape>, Timing> computeOnCpuFallback();
DManager.cpp193 std::tuple<int, std::vector<OutputShape>, Timing> execute(
201 std::tuple<int, int, ExecuteFencedInfoCallback, Timing> executeFenced(
252 std::tuple<int, std::vector<OutputShape>, Timing> compute(
255 std::tuple<int, int, ExecuteFencedInfoCallback, Timing> computeFenced(
579 std::tuple<int, std::vector<OutputShape>, Timing> DriverPreparedModel::execute( in execute()
591 ExecutionResult<std::pair<std::vector<OutputShape>, Timing>> result; in execute()
614 Timing timing; in execute()
635 std::tuple<int, int, ExecuteFencedInfoCallback, Timing> DriverPreparedModel::executeFenced( in executeFenced()
662 Timing timing = {}; in executeFenced()
733 std::tuple<int, std::vector<OutputShape>, Timing> DriverExecution::compute( in compute()
[all …]
DExecutionCallback.cpp30 const Timing& timing) { in notify()
64 Timing ExecutionCallback::getTiming() const { in getTiming()
115 std::vector<OutputShape> outputShapes, Timing timing) { in notifyInternal()
DManager.h57 virtual std::tuple<int, std::vector<OutputShape>, Timing> compute(
62 virtual std::tuple<int, int, ExecuteFencedInfoCallback, Timing> computeFenced(
79 virtual std::tuple<int, std::vector<OutputShape>, Timing> execute(
90 virtual std::tuple<int, int, ExecuteFencedInfoCallback, Timing> executeFenced(
DExecutionBuilder.cpp376 Timing timingLaunched = mTimingWithoutFencedExecutionCallback; in getDuration()
377 Timing timingFenced = timingLaunched; in getDuration()
642 static std::tuple<int, std::vector<OutputShape>, Timing> cpuFallbackFull( in cpuFallbackFull()
663 static std::tuple<int, std::vector<OutputShape>, Timing, std::shared_ptr<StepExecutor>>
682 std::tuple<int, std::vector<OutputShape>, Timing> SimpleExecutionBuilder::computeInternal( in computeInternal()
718 std::tuple<int, std::vector<OutputShape>, Timing> CompoundExecutionBuilder::computeInternal( in computeInternal()
1508 std::tuple<int, std::vector<OutputShape>, Timing> StepExecutor::compute( in compute()
1517 Timing timing; in compute()
1553 Timing timing; in computeFenced()
1578 std::tuple<int, std::vector<OutputShape>, Timing> StepExecutor::computeOnCpuFallback() { in computeOnCpuFallback()
/packages/modules/NeuralNetworks/driver/sample_aidl/
DSampleDriverAidl.h150 SampleFencedExecutionCallback(aidl_hal::Timing timingSinceLaunch, in SampleFencedExecutionCallback()
151 aidl_hal::Timing timingAfterFence, aidl_hal::ErrorStatus error) in SampleFencedExecutionCallback()
155 ndk::ScopedAStatus getExecutionInfo(aidl_hal::Timing* timingLaunched, in getExecutionInfo()
156 aidl_hal::Timing* timingFenced, in getExecutionInfo()
165 const aidl_hal::Timing kTimingSinceLaunch;
166 const aidl_hal::Timing kTimingAfterFence;
/packages/modules/NeuralNetworks/driver/sample_hidl/
DSampleDriverUtils.cpp73 const hardware::hidl_vec<V1_2::OutputShape>&, V1_2::Timing) { in notify() argument
81 const hardware::hidl_vec<V1_2::OutputShape>& outputShapes, V1_2::Timing timing) { in notify()
89 const hardware::hidl_vec<V1_2::OutputShape>& outputShapes, V1_2::Timing timing) { in notify()
DSampleDriverUtils.h48 const hardware::hidl_vec<V1_2::OutputShape>&, V1_2::Timing);
51 const hardware::hidl_vec<V1_2::OutputShape>& outputShapes, V1_2::Timing timing);
54 const hardware::hidl_vec<V1_2::OutputShape>& outputShapes, V1_2::Timing timing);
DSampleDriver.h183 SampleFencedExecutionCallback(V1_2::Timing timingSinceLaunch, V1_2::Timing timingAfterFence, in SampleFencedExecutionCallback()
194 const V1_2::Timing kTimingSinceLaunch;
195 const V1_2::Timing kTimingAfterFence;
DSampleDriver.cpp59 static const V1_2::Timing kNoTiming = {.timeOnDevice = UINT64_MAX, .timeInDriver = UINT64_MAX};
494 V1_2::Timing timing = { in asyncExecute()
568 static std::tuple<V1_3::ErrorStatus, hardware::hidl_vec<V1_2::OutputShape>, V1_2::Timing>
625 V1_2::Timing timing = { in executeSynchronouslyBase()
743 V1_2::Timing timingSinceLaunch = {.timeOnDevice = UINT64_MAX, .timeInDriver = UINT64_MAX}; in executeFenced()
744 V1_2::Timing timingAfterFence = {.timeOnDevice = UINT64_MAX, .timeInDriver = UINT64_MAX}; in executeFenced()
784 std::tuple<V1_0::ErrorStatus, hardware::hidl_vec<V1_2::OutputShape>, V1_2::Timing> execute( in execute()
835 V1_2::Timing timing = { in execute()
/packages/modules/NeuralNetworks/common/
DExecutionBurstController.cpp48 constexpr V1_2::Timing kNoTiming12 = {std::numeric_limits<uint64_t>::max(),
152 std::optional<std::tuple<V1_0::ErrorStatus, std::vector<V1_2::OutputShape>, V1_2::Timing>>
225 const V1_2::Timing timing = data.at(index).executionTiming(); in deserialize()
261 std::optional<std::tuple<V1_0::ErrorStatus, std::vector<V1_2::OutputShape>, V1_2::Timing>>
586 static std::tuple<int, std::vector<V1_2::OutputShape>, V1_2::Timing, bool> getExecutionResult( in getExecutionResult()
587 V1_0::ErrorStatus status, std::vector<V1_2::OutputShape> outputShapes, V1_2::Timing timing, in getExecutionResult()
594 std::tuple<int, std::vector<V1_2::OutputShape>, V1_2::Timing, bool>
DExecutionBurstServer.cpp46 constexpr V1_2::Timing kNoTiming = {std::numeric_limits<uint64_t>::max(),
70 std::tuple<V1_0::ErrorStatus, hardware::hidl_vec<V1_2::OutputShape>, V1_2::Timing> execute( in execute()
85 V1_2::Timing returnedTiming; in execute()
89 const V1_2::Timing& timing) { in execute()
116 V1_2::Timing timing) { in serialize()
478 V1_2::Timing timing) { in send()
/packages/modules/NeuralNetworks/common/include/
DExecutionBurstServer.h57 hardware::neuralnetworks::V1_2::Timing timing);
165 hardware::neuralnetworks::V1_2::Timing timing);
248 hardware::neuralnetworks::V1_2::Timing>
DExecutionBurstController.h73 hardware::neuralnetworks::V1_2::Timing>>
128 hardware::neuralnetworks::V1_2::Timing>>
322 hardware::neuralnetworks::V1_2::Timing, bool>
DLegacyHalUtils.h184 std::tuple<int, std::vector<OutputShape>, Timing> getExecutionResult(
186 const V1_2::Timing& timing);
370 Timing uncheckedConvert(const V1_2::Timing& timing);
387 V1_2::Timing convertToV1_2(const Timing& timing);
DLegacyUtils.h259 std::tuple<int, std::vector<OutputShape>, Timing> getExecutionResult(
260 ErrorStatus status, std::vector<OutputShape> outputShapes, Timing timing);
DDefaultExecution.h43 ExecutionResult<std::pair<std::vector<OutputShape>, Timing>> compute( in compute()
/packages/modules/NeuralNetworks/common/types/include/nnapi/
DTypeUtils.h101 std::ostream& operator<<(std::ostream& os, const Timing& timing);
138 bool operator==(const Timing& a, const Timing& b);
139 bool operator!=(const Timing& a, const Timing& b);
DTypes.h997 struct Timing { struct
1005 using ExecuteFencedInfoCallback = std::function<GeneralResult<std::pair<Timing, Timing>>()>; argument
DIExecution.h97 virtual ExecutionResult<std::pair<std::vector<OutputShape>, Timing>> compute(
DIBurst.h117 virtual ExecutionResult<std::pair<std::vector<OutputShape>, Timing>> execute(
/packages/modules/NeuralNetworks/driver/sample/
DCanonicalPreparedModel.cpp130 ExecutionResult<std::pair<std::vector<OutputShape>, Timing>> PreparedModel::execute( in execute()
179 Timing timing = {}; in execute()
268 Timing timingSinceLaunch = {}; in executeFenced()
269 Timing timingAfterFence = {}; in executeFenced()
DCanonicalBurst.h41 ExecutionResult<std::pair<std::vector<OutputShape>, Timing>> execute(
DCanonicalBurst.cpp42 ExecutionResult<std::pair<std::vector<OutputShape>, Timing>> Burst::execute( in execute()

12