/aosp12/art/libartbase/base/ |
H A D | atomic.h | 37 class PACKED(sizeof(T)) Atomic : public std::atomic<T> { in PACKED() 39 Atomic<T>() : std::atomic<T>(T()) { } in PACKED() 41 explicit Atomic<T>(T value) : std::atomic<T>(value) { } in PACKED() 128 typedef Atomic<int32_t> AtomicInteger; 133 static_assert(sizeof(Atomic<int64_t>) == sizeof(int64_t), "Weird Atomic<int64> size"); 139 static_assert(alignof(Atomic<int64_t>) == alignof(int64_t),
|
H A D | allocator.cc | 78 Atomic<size_t> g_bytes_used[kAllocatorTagCount]; 79 Atomic<size_t> g_max_bytes_used[kAllocatorTagCount]; 80 Atomic<uint64_t> g_total_bytes_used[kAllocatorTagCount];
|
H A D | allocator.h | 78 extern Atomic<size_t> g_bytes_used[kAllocatorTagCount]; 81 extern Atomic<size_t> g_max_bytes_used[kAllocatorTagCount]; 84 extern Atomic<uint64_t> g_total_bytes_used[kAllocatorTagCount];
|
/aosp12/art/runtime/gc/ |
H A D | task_processor_test.cc | 32 RecursiveTask(TaskProcessor* task_processor, Atomic<size_t>* counter, size_t max_recursion) in RecursiveTask() 46 Atomic<size_t>* const counter_; 52 WorkUntilDoneTask(TaskProcessor* task_processor, Atomic<bool>* done_running) in WorkUntilDoneTask() 62 Atomic<bool>* done_running_; 70 Atomic<bool> done_running(false); in TEST_F() 71 Atomic<size_t> counter(0); in TEST_F() 137 Atomic<bool> done_running(false); in TEST_F()
|
H A D | heap.h | 1406 Atomic<size_t> target_footprint_; 1428 Atomic<size_t> num_bytes_allocated_; 1433 Atomic<size_t> native_bytes_registered_; 1440 Atomic<uint32_t> native_objects_notified_; 1447 Atomic<size_t> num_bytes_freed_revoke_; 1573 Atomic<size_t> count_delayed_oom_; 1588 Atomic<uint32_t> gcs_completed_; 1593 Atomic<uint32_t> max_gc_requested_; 1632 Atomic<bool> alloc_tracking_enabled_; 1642 Atomic<uint64_t> seen_backtrace_count_; [all …]
|
/aosp12/art/runtime/mirror/ |
H A D | object.cc | 44 Atomic<uint32_t> Object::hash_code_seed(987654321U + std::time(nullptr)); 90 reinterpret_cast<Atomic<uintptr_t>*>(dst_bytes)->store( in CopyObject() 91 reinterpret_cast<Atomic<uintptr_t>*>(src_bytes)->load(std::memory_order_relaxed), in CopyObject() 99 reinterpret_cast<Atomic<uint32_t>*>(dst_bytes)->store( in CopyObject() 100 reinterpret_cast<Atomic<uint32_t>*>(src_bytes)->load(std::memory_order_relaxed), in CopyObject() 109 reinterpret_cast<Atomic<uint8_t>*>(dst_bytes)->store( in CopyObject() 110 reinterpret_cast<Atomic<uint8_t>*>(src_bytes)->load(std::memory_order_relaxed), in CopyObject()
|
H A D | object-inl.h | 565 return reinterpret_cast<const Atomic<kSize>*>(addr)->load(std::memory_order_acquire); in GetFieldAcquire() 578 Atomic<int64_t>* atomic_addr = reinterpret_cast<Atomic<int64_t>*>(raw_addr); in CasFieldWeakSequentiallyConsistent64() 592 Atomic<int64_t>* atomic_addr = reinterpret_cast<Atomic<int64_t>*>(raw_addr); in CasFieldStrongSequentiallyConsistent64() 694 Atomic<uint32_t>* atomic_addr = reinterpret_cast<Atomic<uint32_t>*>(raw_addr); in CasFieldObjectWithoutWriteBarrier() 725 Atomic<uint32_t>* atomic_addr = reinterpret_cast<Atomic<uint32_t>*>(raw_addr); in CompareAndExchangeFieldObject() 750 Atomic<uint32_t>* atomic_addr = reinterpret_cast<Atomic<uint32_t>*>(raw_addr); in ExchangeFieldObject()
|
H A D | dex_cache-inl.h | 213 Atomic<GcRoot<mirror::CallSite>>& ref = in GetResolvedCallSite() 214 reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target); in GetResolvedCallSite() 228 Atomic<GcRoot<mirror::CallSite>>& ref = in SetResolvedCallSite() 229 reinterpret_cast<Atomic<GcRoot<mirror::CallSite>>&>(target); in SetResolvedCallSite()
|
H A D | object.h | 359 reinterpret_cast<Atomic<kType>*>(addr)->store(new_value, std::memory_order_seq_cst); in SetFieldPrimitive() 361 reinterpret_cast<Atomic<kType>*>(addr)->StoreJavaData(new_value); in SetFieldPrimitive() 371 return reinterpret_cast<const Atomic<kType>*>(addr)->load(std::memory_order_seq_cst); in GetFieldPrimitive() 373 return reinterpret_cast<const Atomic<kType>*>(addr)->LoadJavaData(); in GetFieldPrimitive() 771 static Atomic<uint32_t> hash_code_seed;
|
H A D | object_reference.h | 213 Atomic<uint32_t> reference_;
|
/aosp12/art/runtime/gc/collector/ |
H A D | concurrent_copying.h | 356 Atomic<bool> done_scanning_; 395 Atomic<int> is_mark_stack_push_disallowed_; 405 Atomic<MarkStackMode> mark_stack_mode_; 411 Atomic<size_t> bytes_moved_; // Used by mutators 412 Atomic<size_t> objects_moved_; // Used by mutators 453 Atomic<size_t> to_space_bytes_skipped_; 454 Atomic<size_t> to_space_objects_skipped_; 462 Atomic<uint64_t> rb_slow_path_ns_; 463 Atomic<uint64_t> rb_slow_path_count_; 464 Atomic<uint64_t> rb_slow_path_count_gc_; [all …]
|
/aosp12/art/runtime/gc/accounting/ |
H A D | card_table-inl.h | 35 Atomic<uint8_t>* byte_atomic = reinterpret_cast<Atomic<uint8_t>*>(address); in byte_cas() 43 Atomic<uintptr_t>* word_atomic = reinterpret_cast<Atomic<uintptr_t>*>(address); in byte_cas() 196 Atomic<uintptr_t>* atomic_word = reinterpret_cast<Atomic<uintptr_t>*>(word_cur); in ModifyCardsAtomic()
|
H A D | space_bitmap.cc | 67 bitmap_begin_(reinterpret_cast<Atomic<uintptr_t>*>(bitmap_begin)), in SpaceBitmap() 147 Atomic<uintptr_t>* const src = source_bitmap->Begin(); in CopyFrom() 148 Atomic<uintptr_t>* const dest = Begin(); in CopyFrom() 172 Atomic<uintptr_t>* live = live_bitmap.bitmap_begin_; in SweepWalk() 173 Atomic<uintptr_t>* mark = mark_bitmap.bitmap_begin_; in SweepWalk()
|
H A D | space_bitmap-inl.h | 40 Atomic<uintptr_t>* atomic_entry = &bitmap_begin_[index]; in AtomicTestAndSet() 170 Atomic<uintptr_t>* bitmap_begin = bitmap_begin_; in Walk() 195 Atomic<uintptr_t>* atomic_entry = &bitmap_begin_[index]; in Modify()
|
H A D | space_bitmap.h | 162 Atomic<uintptr_t>* Begin() { in Begin() 248 Atomic<uintptr_t>* bitmap_begin_ = nullptr;
|
H A D | bitmap-inl.h | 37 auto* atomic_entry = reinterpret_cast<Atomic<uintptr_t>*>(&bitmap_begin_[word_index]); in AtomicTestAndSetBit()
|
/aosp12/system/core/healthd/ |
H A D | charger_test.cpp | 46 class Atomic { class 48 Atomic(T&& init) : mValue(std::move(init)) {} in Atomic() function in Atomic 67 Atomic<bool>& getUpdateNotifier() { in getUpdateNotifier() 68 static Atomic<bool> val(false); in getUpdateNotifier()
|
/aosp12/art/runtime/base/ |
H A D | mutex.h | 135 Atomic<uint64_t> wait_time; 281 Atomic<pid_t> exclusive_owner_; 284 Atomic<pid_t> exclusive_owner_; // Guarded by mutex_. Asynchronous reads are OK. 408 Atomic<pid_t> exclusive_owner_; 416 Atomic<pid_t> exclusive_owner_; // Writes guarded by rwlock_. Asynchronous reads are OK.
|
H A D | locks.cc | 35 static Atomic<Locks::ClientCallback*> safe_to_call_abort_callback(nullptr); 81 Atomic<const BaseMutex*> Locks::expected_mutexes_on_weak_ref_access_guard_;
|
H A D | locks.h | 375 static Atomic<const BaseMutex*> expected_mutexes_on_weak_ref_access_guard_;
|
/aosp12/art/runtime/ |
H A D | read_barrier-inl.h | 138 Atomic<MirrorType*>* atomic_root = reinterpret_cast<Atomic<MirrorType*>*>(root); in BarrierForRoot() 181 reinterpret_cast<Atomic<mirror::CompressedReference<MirrorType>>*>(root); in BarrierForRoot()
|
/aosp12/art/test/2029-contended-monitors/ |
H A D | expected-stdout.txt | 2 Atomic increments
|
/aosp12/art/compiler/utils/ |
H A D | atomic_dex_ref_map.h | 72 using ElementArray = dchecked_vector<Atomic<Value>>;
|
/aosp12/art/runtime/jit/ |
H A D | profiling_info.cc | 94 auto atomic_root = reinterpret_cast<Atomic<GcRoot<mirror::Class>>*>(&cache->classes_[i]); in AddInvokeInfo()
|
/aosp12/art/runtime/gc/space/ |
H A D | image_space.h | 295 static Atomic<uint32_t> bitmap_index_;
|