Lines Matching refs:ConcurrentCopying

70 ConcurrentCopying::ConcurrentCopying(Heap* heap,  in ConcurrentCopying()  function in art::gc::collector::ConcurrentCopying
177 void ConcurrentCopying::MarkHeapReference(mirror::HeapReference<mirror::Object>* field, in MarkHeapReference()
203 ConcurrentCopying::~ConcurrentCopying() { in ~ConcurrentCopying()
207 void ConcurrentCopying::RunPhases() { in RunPhases()
261 class ConcurrentCopying::ActivateReadBarrierEntrypointsCheckpoint : public Closure {
263 explicit ActivateReadBarrierEntrypointsCheckpoint(ConcurrentCopying* concurrent_copying) in ActivateReadBarrierEntrypointsCheckpoint()
279 ConcurrentCopying* const concurrent_copying_;
282 class ConcurrentCopying::ActivateReadBarrierEntrypointsCallback : public Closure {
284 explicit ActivateReadBarrierEntrypointsCallback(ConcurrentCopying* concurrent_copying) in ActivateReadBarrierEntrypointsCallback()
295 ConcurrentCopying* const concurrent_copying_;
298 void ConcurrentCopying::ActivateReadBarrierEntrypoints() { in ActivateReadBarrierEntrypoints()
314 void ConcurrentCopying::CreateInterRegionRefBitmaps() { in CreateInterRegionRefBitmaps()
337 void ConcurrentCopying::BindBitmaps() { in BindBitmaps()
391 void ConcurrentCopying::InitializePhase() { in InitializePhase()
450 mark_stack_mode_.store(ConcurrentCopying::kMarkStackModeThreadLocal, std::memory_order_relaxed); in InitializePhase()
456 class ConcurrentCopying::ThreadFlipVisitor : public Closure, public RootVisitor {
458 ThreadFlipVisitor(ConcurrentCopying* concurrent_copying, bool use_tlab) in ThreadFlipVisitor()
471 if (ConcurrentCopying::kEnableFromSpaceAccountingCheck) { in Run()
527 ConcurrentCopying* const concurrent_copying_;
532 class ConcurrentCopying::FlipCallback : public Closure {
534 explicit FlipCallback(ConcurrentCopying* concurrent_copying) in FlipCallback()
539 ConcurrentCopying* cc = concurrent_copying_; in Run()
564 if (ConcurrentCopying::kEnableFromSpaceAccountingCheck) { in Run()
597 ConcurrentCopying* const concurrent_copying_;
600 class ConcurrentCopying::VerifyGrayImmuneObjectsVisitor {
602 explicit VerifyGrayImmuneObjectsVisitor(ConcurrentCopying* collector) in VerifyGrayImmuneObjectsVisitor()
635 ConcurrentCopying* const collector_;
660 void ConcurrentCopying::VerifyGrayImmuneObjects() { in VerifyGrayImmuneObjects()
680 class ConcurrentCopying::VerifyNoMissingCardMarkVisitor {
682 VerifyNoMissingCardMarkVisitor(ConcurrentCopying* cc, ObjPtr<mirror::Object> holder) in VerifyNoMissingCardMarkVisitor()
729 ConcurrentCopying* const cc_;
733 void ConcurrentCopying::VerifyNoMissingCardMarks() { in VerifyNoMissingCardMarks()
754 void ConcurrentCopying::FlipThreadRoots() { in FlipThreadRoots()
783 class ConcurrentCopying::GrayImmuneObjectVisitor {
809 void ConcurrentCopying::GrayAllDirtyImmuneObjects() { in GrayAllDirtyImmuneObjects()
849 void ConcurrentCopying::GrayAllNewlyDirtyImmuneObjects() { in GrayAllNewlyDirtyImmuneObjects()
880 void ConcurrentCopying::SwapStacks() { in SwapStacks()
884 void ConcurrentCopying::RecordLiveStackFreezeSize(Thread* self) { in RecordLiveStackFreezeSize()
890 inline void ConcurrentCopying::ScanImmuneObject(mirror::Object* obj) { in ScanImmuneObject()
903 class ConcurrentCopying::ImmuneSpaceScanObjVisitor {
905 explicit ImmuneSpaceScanObjVisitor(ConcurrentCopying* cc) in ImmuneSpaceScanObjVisitor()
929 ConcurrentCopying* const collector_;
933 class ConcurrentCopying::CaptureRootsForMarkingVisitor : public RootVisitor {
935 explicit CaptureRootsForMarkingVisitor(ConcurrentCopying* cc, Thread* self) in CaptureRootsForMarkingVisitor()
967 ConcurrentCopying* const collector_;
971 class ConcurrentCopying::RevokeThreadLocalMarkStackCheckpoint : public Closure {
973 RevokeThreadLocalMarkStackCheckpoint(ConcurrentCopying* concurrent_copying, in RevokeThreadLocalMarkStackCheckpoint()
1003 ConcurrentCopying* const concurrent_copying_;
1009 class ConcurrentCopying::CaptureThreadRootsForMarkingAndCheckpoint :
1012 explicit CaptureThreadRootsForMarkingAndCheckpoint(ConcurrentCopying* cc) : in CaptureThreadRootsForMarkingAndCheckpoint()
1031 void ConcurrentCopying::CaptureThreadRootsForMarking() { in CaptureThreadRootsForMarking()
1062 class ConcurrentCopying::ComputeLiveBytesAndMarkRefFieldsVisitor {
1064 explicit ComputeLiveBytesAndMarkRefFieldsVisitor(ConcurrentCopying* collector, in ComputeLiveBytesAndMarkRefFieldsVisitor()
1161 ConcurrentCopying* const collector_;
1166 void ConcurrentCopying::AddLiveBytesAndScanRef(mirror::Object* ref) { in AddLiveBytesAndScanRef()
1204 bool ConcurrentCopying::TestAndSetMarkBitForRef(mirror::Object* ref) { in TestAndSetMarkBitForRef()
1235 bool ConcurrentCopying::TestMarkBitmapForRef(mirror::Object* ref) { in TestMarkBitmapForRef()
1259 void ConcurrentCopying::PushOntoLocalMarkStack(mirror::Object* ref) { in PushOntoLocalMarkStack()
1272 void ConcurrentCopying::ProcessMarkStackForMarkingAndComputeLiveBytes() { in ProcessMarkStackForMarkingAndComputeLiveBytes()
1292 class ConcurrentCopying::ImmuneSpaceCaptureRefsVisitor {
1294 explicit ImmuneSpaceCaptureRefsVisitor(ConcurrentCopying* cc) : collector_(cc) {} in ImmuneSpaceCaptureRefsVisitor()
1308 ConcurrentCopying* const collector_;
1372 void ConcurrentCopying::MarkingPhase() { in MarkingPhase()
1430 void ConcurrentCopying::ScanDirtyObject(mirror::Object* obj) { in ScanDirtyObject()
1445 void ConcurrentCopying::CopyingPhase() { in CopyingPhase()
1693 void ConcurrentCopying::ReenableWeakRefAccess(Thread* self) { in ReenableWeakRefAccess()
1711 class ConcurrentCopying::DisableMarkingCheckpoint : public Closure {
1713 explicit DisableMarkingCheckpoint(ConcurrentCopying* concurrent_copying) in DisableMarkingCheckpoint()
1732 ConcurrentCopying* const concurrent_copying_;
1735 class ConcurrentCopying::DisableMarkingCallback : public Closure {
1737 explicit DisableMarkingCallback(ConcurrentCopying* concurrent_copying) in DisableMarkingCallback()
1755 ConcurrentCopying* const concurrent_copying_;
1758 void ConcurrentCopying::IssueDisableMarkingCheckpoint() { in IssueDisableMarkingCheckpoint()
1779 void ConcurrentCopying::DisableMarking() { in DisableMarking()
1792 void ConcurrentCopying::IssueEmptyCheckpoint() { in IssueEmptyCheckpoint()
1801 void ConcurrentCopying::ExpandGcMarkStack() { in ExpandGcMarkStack()
1813 void ConcurrentCopying::PushOntoMarkStack(Thread* const self, mirror::Object* to_ref) { in PushOntoMarkStack()
1879 accounting::ObjectStack* ConcurrentCopying::GetAllocationStack() { in GetAllocationStack()
1883 accounting::ObjectStack* ConcurrentCopying::GetLiveStack() { in GetLiveStack()
1889 class ConcurrentCopying::VerifyNoFromSpaceRefsVisitor : public SingleRootVisitor {
1891 explicit VerifyNoFromSpaceRefsVisitor(ConcurrentCopying* collector) in VerifyNoFromSpaceRefsVisitor()
1916 ConcurrentCopying* const collector_;
1919 class ConcurrentCopying::VerifyNoFromSpaceRefsFieldVisitor {
1921 explicit VerifyNoFromSpaceRefsFieldVisitor(ConcurrentCopying* collector) in VerifyNoFromSpaceRefsFieldVisitor()
1954 ConcurrentCopying* const collector_;
1958 void ConcurrentCopying::VerifyNoFromSpaceReferences() { in VerifyNoFromSpaceReferences()
2014 class ConcurrentCopying::AssertToSpaceInvariantFieldVisitor {
2016 explicit AssertToSpaceInvariantFieldVisitor(ConcurrentCopying* collector) in AssertToSpaceInvariantFieldVisitor()
2046 ConcurrentCopying* const collector_;
2049 void ConcurrentCopying::RevokeThreadLocalMarkStacks(bool disable_weak_ref_access, in RevokeThreadLocalMarkStacks()
2084 void ConcurrentCopying::RevokeThreadLocalMarkStack(Thread* thread) { in RevokeThreadLocalMarkStack()
2096 void ConcurrentCopying::ProcessMarkStack() { in ProcessMarkStack()
2111 bool ConcurrentCopying::ProcessMarkStackOnce() { in ProcessMarkStackOnce()
2184 size_t ConcurrentCopying::ProcessThreadLocalMarkStacks(bool disable_weak_ref_access, in ProcessThreadLocalMarkStacks()
2227 inline void ConcurrentCopying::ProcessMarkStackRef(mirror::Object* to_ref) { in ProcessMarkStackRef()
2381 class ConcurrentCopying::DisableWeakRefAccessCallback : public Closure {
2383 explicit DisableWeakRefAccessCallback(ConcurrentCopying* concurrent_copying) in DisableWeakRefAccessCallback()
2395 ConcurrentCopying* const concurrent_copying_;
2398 void ConcurrentCopying::SwitchToSharedMarkStackMode() { in SwitchToSharedMarkStackMode()
2421 void ConcurrentCopying::SwitchToGcExclusiveMarkStackMode() { in SwitchToGcExclusiveMarkStackMode()
2436 void ConcurrentCopying::CheckEmptyMarkStack() { in CheckEmptyMarkStack()
2471 void ConcurrentCopying::SweepSystemWeaks(Thread* self) { in SweepSystemWeaks()
2477 void ConcurrentCopying::Sweep(bool swap_bitmaps) { in Sweep()
2508 void ConcurrentCopying::SweepArray(accounting::ObjectStack* allocations, bool swap_bitmaps) { in SweepArray()
2598 void ConcurrentCopying::MarkZygoteLargeObjects() { in MarkZygoteLargeObjects()
2621 void ConcurrentCopying::SweepLargeObjects(bool swap_bitmaps) { in SweepLargeObjects()
2628 void ConcurrentCopying::CaptureRssAtPeak() { in CaptureRssAtPeak()
2694 void ConcurrentCopying::ReclaimPhase() { in ReclaimPhase()
2815 std::string ConcurrentCopying::DumpReferenceInfo(mirror::Object* ref, in DumpReferenceInfo()
2837 std::string ConcurrentCopying::DumpHeapReference(mirror::Object* obj, in DumpHeapReference()
2851 void ConcurrentCopying::AssertToSpaceInvariant(mirror::Object* obj, in AssertToSpaceInvariant()
2949 std::string ConcurrentCopying::DumpGcRoot(mirror::Object* ref) { in DumpGcRoot()
2958 void ConcurrentCopying::AssertToSpaceInvariant(GcRootSource* gc_root_source, in AssertToSpaceInvariant()
3019 void ConcurrentCopying::LogFromSpaceRefHolder(mirror::Object* obj, MemberOffset offset) { in LogFromSpaceRefHolder()
3070 bool ConcurrentCopying::IsMarkedInNonMovingSpace(mirror::Object* from_ref) { in IsMarkedInNonMovingSpace()
3093 void ConcurrentCopying::AssertToSpaceInvariantInNonMovingSpace(mirror::Object* obj, in AssertToSpaceInvariantInNonMovingSpace()
3132 class ConcurrentCopying::RefFieldsVisitor {
3134 explicit RefFieldsVisitor(ConcurrentCopying* collector, Thread* const thread) in RefFieldsVisitor()
3167 ConcurrentCopying* const collector_;
3172 inline void ConcurrentCopying::Scan(mirror::Object* to_ref, size_t obj_size) { in Scan()
3198 inline void ConcurrentCopying::Process(mirror::Object* obj, MemberOffset offset) { in Process()
3231 inline void ConcurrentCopying::VisitRoots( in VisitRoots()
3254 inline void ConcurrentCopying::MarkRoot(Thread* const self, in MarkRoot()
3273 inline void ConcurrentCopying::VisitRoots( in VisitRoots()
3287 class ConcurrentCopying::ScopedGcGraysImmuneObjects {
3289 explicit ScopedGcGraysImmuneObjects(ConcurrentCopying* collector) in ScopedGcGraysImmuneObjects()
3309 ConcurrentCopying* const collector_;
3315 void ConcurrentCopying::FillWithFakeObject(Thread* const self, in FillWithFakeObject()
3361 mirror::Object* ConcurrentCopying::AllocateInSkippedBlock(Thread* const self, size_t alloc_size) { in AllocateInSkippedBlock()
3419 mirror::Object* ConcurrentCopying::Copy(Thread* const self, in Copy()
3597 mirror::Object* ConcurrentCopying::IsMarked(mirror::Object* from_ref) { in IsMarked()
3637 bool ConcurrentCopying::IsOnAllocStack(mirror::Object* ref) { in IsOnAllocStack()
3644 mirror::Object* ConcurrentCopying::MarkNonMoving(Thread* const self, in MarkNonMoving()
3731 void ConcurrentCopying::FinishPhase() { in FinishPhase()
3794 bool ConcurrentCopying::IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* field, in IsNullOrMarkedHeapReference()
3819 mirror::Object* ConcurrentCopying::MarkObject(mirror::Object* from_ref) { in MarkObject()
3823 void ConcurrentCopying::DelayReferenceReferent(ObjPtr<mirror::Class> klass, in DelayReferenceReferent()
3828 void ConcurrentCopying::ProcessReferences(Thread* self) { in ProcessReferences()
3836 void ConcurrentCopying::RevokeAllThreadLocalBuffers() { in RevokeAllThreadLocalBuffers()
3841 mirror::Object* ConcurrentCopying::MarkFromReadBarrierWithMeasurements(Thread* const self, in MarkFromReadBarrierWithMeasurements()
3859 void ConcurrentCopying::DumpPerformanceInfo(std::ostream& os) { in DumpPerformanceInfo()