/system/chre/platform/include/chre/platform/ |
D | atomic.h | 55 return load(); 63 bool load() const; 112 return load(); in uint32_t() 120 uint32_t load() const;
|
/system/security/keystore2/selinux/src/ |
D | concurrency_test.rs | 94 match turnpike_clone.load(Ordering::Relaxed) { in test_concurrent_check_access() 117 while complete_count_clone.load(Ordering::Relaxed) as usize != cpus { in test_concurrent_check_access() 132 while turnpike.load(Ordering::Relaxed) as usize != cpus { in test_concurrent_check_access() 139 turnpike.load(Ordering::Relaxed) in test_concurrent_check_access() 149 assert_eq!(turnpike.load(Ordering::Relaxed) as usize, cpus, "i = {}", i); in test_concurrent_check_access() 162 while complete_count.load(Ordering::Relaxed) as usize != cpus { in test_concurrent_check_access() 180 complete_count.load(Ordering::Relaxed) in test_concurrent_check_access()
|
/system/chre/platform/linux/include/chre/target_platform/ |
D | atomic_base_impl.h | 31 inline bool AtomicBool::load() const { in load() function 32 return mAtomic.load(); in load() 50 inline uint32_t AtomicUint32::load() const { in load() function 51 return mAtomic.load(); in load()
|
/system/libhwbinder/ |
D | Binder.cpp | 93 Extras* e = mExtras.load(std::memory_order_acquire); in isRequestingSid() 99 Extras* e = mExtras.load(std::memory_order_acquire); in setRequestingSid() 167 Extras* e = mExtras.load(std::memory_order_acquire); in findObject() 176 Extras* e = mExtras.load(std::memory_order_acquire); in detachObject() 190 Extras* e = mExtras.load(std::memory_order_relaxed); in ~BHwBinder() 204 Extras* e = mExtras.load(std::memory_order_acquire); in getOrCreateExtras() 241 if (!(mState.load(std::memory_order_relaxed)&kRemoteAcquired)) { in ~BpHwRefBase()
|
/system/chre/util/include/chre/util/system/ |
D | atomic_spsc_queue.h | 97 return (mTail.load() > UINT32_MAX - kCapacity); in nearingOverflow() 105 uint32_t head = mHead.load(); in size() 106 uint32_t tail = mTail.load(); in size() 175 uint32_t tail = mQueue.mTail.load(); in nextStorage() 176 if (tail != 0 && tail == mQueue.mHead.load()) { in nextStorage() 227 return mQueue.data()[mQueue.mHead.load() % kCapacity]; in front() 230 return mQueue.data()[mQueue.mHead.load() % kCapacity]; in front()
|
/system/chre/host/hal_generic/aidl/ |
D | event_logger.cc | 109 for (const NanoappLoad &load : mNanoappLoads) { in dump() local 115 formatLocalTime(load.timestampMs).c_str(), load.id, in dump() 116 load.version, load.sizeBytes, in dump() 117 load.success ? "ok" : "fail") > 0) { in dump()
|
/system/core/libutils/binder/ |
D | RefBase.cpp | 266 addRef(&mStrongRefs, id, mStrong.load(std::memory_order_relaxed)); in addStrongRef() 275 addRef(&mStrongRefs, id, -mStrong.load(std::memory_order_relaxed)); in removeStrongRef() 287 addRef(&mWeakRefs, id, mWeak.load(std::memory_order_relaxed)); in addWeakRef() 294 addRef(&mWeakRefs, id, -mWeak.load(std::memory_order_relaxed)); in removeWeakRef() 500 int32_t flags = refs->mFlags.load(std::memory_order_relaxed); in decStrong() 548 return mRefs->mStrong.load(std::memory_order_relaxed); in getStrongCount() 584 int32_t flags = impl->mFlags.load(std::memory_order_relaxed); in decWeak() 590 if (impl->mStrong.load(std::memory_order_relaxed) in decWeak() 618 int32_t curCount = impl->mStrong.load(std::memory_order_relaxed); in attemptIncStrong() 638 int32_t flags = impl->mFlags.load(std::memory_order_relaxed); in attemptIncStrong() [all …]
|
/system/unwinding/libunwindstack/benchmarks/ |
D | thread_unwind_benchmarks.cpp | 33 while (!done->load()) { in ThreadCall6() 65 while (tid.load() == 0) { in BM_thread_unwind() 74 unwinder.UnwindWithSignal(SIGRTMIN, tid.load()); in BM_thread_unwind()
|
/system/unwinding/libunwindstack/tests/ |
D | UnwindTest.cpp | 396 if (g_waiters.load() == 1) { in TEST_F() 401 ASSERT_NE(0, tid.load()); in TEST_F() 402 ASSERT_EQ(0, tgkill(getpid(), tid.load(), SIGUSR1)) << "Error: " << strerror(errno); in TEST_F() 407 ucontext = reinterpret_cast<void*>(g_ucontext.load()); in TEST_F() 483 while (g_waiters.load() != 1) { in TEST_F() 502 while (g_waiters.load() != 1) { in TEST_F() 528 while (g_waiters.load() != 1) { in TEST_F() 580 while (!start_unwinding.load()) { in CreateUnwindThread() 606 while (g_waiters.load() != 1) { in TEST_F() 620 while (unwinders.load() != kNumThreads) { in TEST_F() [all …]
|
/system/media/audio_utils/include/audio_utils/ |
D | mutex.h | 402 operator T() const { return std::atomic<T>::load(std::memory_order_relaxed); } in T() 413 T load(std::memory_order order = std::memory_order_relaxed) const { 414 return std::atomic<T>::load(order); 461 T load(std::memory_order order = std::memory_order_relaxed) const { (void)order; return t_; } 733 pairs_[i].first = pairs_[i + 1].first.load(); in remove() 734 pairs_[i].second = pairs_[i + 1].second.load(); in remove() 749 true_top_ = top_.load(); in remove() 789 .append(std::to_string(reinterpret_cast<uintptr_t>(pairs_[i].first.load()))) in to_string() 791 .append(std::to_string(static_cast<size_t>(pairs_[i].second.load()))) in to_string() 874 const pid_t tid = tid_.load(); in to_string() [all …]
|
/system/chre/platform/arm/include/chre/target_platform/ |
D | atomic_base_impl.h | 33 inline bool AtomicBool::load() const { in load() function 54 inline uint32_t AtomicUint32::load() const { in load() function
|
/system/chre/platform/tinysys/include/chre/target_platform/ |
D | atomic_base_impl.h | 33 inline bool AtomicBool::load() const { in load() function 54 inline uint32_t AtomicUint32::load() const { in load() function
|
/system/chre/platform/zephyr/include/chre/target_platform/ |
D | atomic_base_impl.h | 34 inline bool AtomicBool::load() const { in load() function 54 inline uint32_t AtomicUint32::load() const { in load() function
|
/system/chre/platform/slpi/include/chre/target_platform/ |
D | atomic_base_impl.h | 39 inline bool AtomicBool::load() const { in load() function 62 inline uint32_t AtomicUint32::load() const { in load() function
|
/system/media/audio_utils/tests/ |
D | audio_mutex_tests.cpp | 293 EXPECT_EQ(1, bot.first.load()); in TEST() 294 EXPECT_EQ(10, bot.second.load()); in TEST() 296 EXPECT_EQ(3, top.first.load()); in TEST() 297 EXPECT_EQ(30, top.second.load()); in TEST() 339 EXPECT_EQ(30, recursive.first.load()); in TEST() 340 EXPECT_EQ(3, recursive.second.load()); in TEST() 344 EXPECT_EQ(0, nil.first.load()); in TEST() 345 EXPECT_EQ(0, nil.second.load()); in TEST() 366 EXPECT_EQ(30, inversion.first.load()); in TEST() 367 EXPECT_EQ(3, inversion.second.load()); in TEST() [all …]
|
/system/media/audio_utils/ |
D | MelProcessor.cpp | 277 mDeviceId.load()); in addMelValue_l() 339 mCurrentSamples.load()); in process() 367 mDeviceId.load()); in onLastStrongRef() 393 mRbReadPtr.load(), in run() 394 mRbWritePtr.load()); in run() 470 size_t curIdx = mRbWritePtr.load(); in ringBufferIsFull() 480 expected = idx.load(); in incRingBufferIndex()
|
/system/extras/simpleperf/scripts/test/ |
D | gecko_profile_generator_test.py | 43 want = json.load(f) 57 want = json.load(f) 71 want = json.load(f)
|
/system/extras/simpleperf/ |
D | RecordReadThread.cpp | 39 size_t write_head = write_head_.load(std::memory_order_relaxed); in GetFreeSize() 40 size_t read_head = read_head_.load(std::memory_order_relaxed); in GetFreeSize() 49 size_t write_head = write_head_.load(std::memory_order_relaxed); in AllocWriteSpace() 50 size_t read_head = read_head_.load(std::memory_order_acquire); in AllocWriteSpace() 74 size_t write_head = write_head_.load(std::memory_order_relaxed); in FinishWrite() 80 size_t write_head = write_head_.load(std::memory_order_acquire); in GetCurrentRecord() 81 size_t read_head = read_head_.load(std::memory_order_relaxed); in GetCurrentRecord() 102 size_t read_head = read_head_.load(std::memory_order_relaxed); in MoveToNextRecord() 674 if (!has_data_notification_.load(std::memory_order_relaxed)) { in SendDataNotificationToMainThread()
|
/system/unwinding/libunwindstack/ |
D | MapInfo.cpp | 346 uint64_t cur_load_bias = load_bias().load(); in GetLoadBias() 381 ElfFields* elf_fields = elf_fields_.load(); in ~MapInfo() 383 delete elf_fields->build_id_.load(); in ~MapInfo() 406 SharedString* id = build_id().load(); in GetBuildID() 445 ElfFields* elf_fields = elf_fields_.load(std::memory_order_acquire); in GetElfFields()
|
/system/chre/host/common/ |
D | st_hal_lpma_handler.cc | 93 if (load()) { in loadAndStart() 191 bool StHalLpmaHandler::load() { in load() function in android::chre::StHalLpmaHandler 243 bool StHalLpmaHandler::load() { in load() function in android::chre::StHalLpmaHandler
|
/system/extras/perf2cfg/ |
D | pylintrc | 4 load-plugins=pylint.extensions.docparams
|
/system/sepolicy/private/ |
D | vendor_misc_writer.te | 6 # load DT fstab.
|
/system/apex/apexer/ |
D | conv_apex_manifest.py | 42 obj = json.load(f, object_pairs_hook=collections.OrderedDict) 57 obj = json.load(f, object_pairs_hook=collections.OrderedDict)
|
/system/security/keystore2/watchdog/src/ |
D | lib.rs | 348 assert_eq!(0, hit_count.load(atomic::Ordering::Relaxed)); in test_watchdog() 350 assert_eq!(1, hit_count.load(atomic::Ordering::Relaxed)); in test_watchdog() 352 assert_eq!(2, hit_count.load(atomic::Ordering::Relaxed)); in test_watchdog() 355 assert_eq!(2, hit_count.load(atomic::Ordering::Relaxed)); in test_watchdog()
|
/system/core/shell_and_utilities/ |
D | README.md | 60 inotifyd insmod install ionice iorenice iotop kill killall ln load\_policy 98 inotifyd insmod install ionice iorenice iotop kill killall ln load\_policy 132 inotifyd insmod install ionice iorenice iotop kill killall ln load\_policy 166 inotifyd insmod install ionice iorenice iotop kill killall ln load\_policy 200 insmod install ionice iorenice iotop kill killall ln load\_policy log 232 **install** ionice iorenice **iotop** kill killall ln load\_policy log logname 262 insmod ionice iorenice kill killall ln load\_policy log logname losetup ls 287 inotifyd insmod ionice iorenice kill killall ln load\_policy **log** logname 311 insmod **ionice** **iorenice** kill **killall** load\_policy ln logname losetup **ls** 332 head hostname hwclock id ifconfig inotifyd insmod kill load\_policy ln [all …]
|