Skip to content

Commit

Permalink
[heap] Introduce separate young and old list for ArrayBufferExtension
Browse files Browse the repository at this point in the history
Split the linked list of array buffer extensions into two lists for
young and old JSArrayBuffers. Process young extensions during the
minor GC pause. When promoting JSArrayBuffers into the old gen, move
the extension into the old linked list as well.

Bug: v8:10064
Change-Id: I07275ffe7ba918c9b2d6d0648a6d1b59e4fa4891
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/1997438
Commit-Queue: Dominik Inführ <dinfuehr@chromium.org>
Reviewed-by: Ulan Degenbaev <ulan@chromium.org>
Cr-Commit-Position: refs/heads/master@{#65842}
  • Loading branch information
Dominik Inführ authored and Commit Bot committed Jan 17, 2020
1 parent 873f66c commit 6770210
Show file tree
Hide file tree
Showing 12 changed files with 369 additions and 25 deletions.
6 changes: 6 additions & 0 deletions src/flags/flag-definitions.h
Original file line number Diff line number Diff line change
Expand Up @@ -887,6 +887,9 @@ DEFINE_BOOL(concurrent_marking, V8_CONCURRENT_MARKING_BOOL,
#else
#define V8_ARRAY_BUFFER_EXTENSION_BOOL false
#endif
DEFINE_BOOL_READONLY(array_buffer_extension, V8_ARRAY_BUFFER_EXTENSION_BOOL,
"enable array buffer tracking using extension objects")
DEFINE_IMPLICATION(array_buffer_extension, always_promote_young_mc)
DEFINE_BOOL(parallel_marking, true, "use parallel marking in atomic pause")
DEFINE_INT(ephemeron_fixpoint_iterations, 10,
"number of fixpoint iterations it takes to switch to linear "
Expand Down Expand Up @@ -1346,6 +1349,9 @@ DEFINE_BOOL(minor_mc_parallel_marking, true,
DEFINE_BOOL(trace_minor_mc_parallel_marking, false,
"trace parallel marking for the young generation")
DEFINE_BOOL(minor_mc, false, "perform young generation mark compact GCs")
#else
DEFINE_BOOL_READONLY(minor_mc, false,
"perform young generation mark compact GCs")
#endif // ENABLE_MINOR_MC

//
Expand Down
20 changes: 18 additions & 2 deletions src/heap/heap.cc
Original file line number Diff line number Diff line change
Expand Up @@ -3790,16 +3790,32 @@ void Heap::RemoveNearHeapLimitCallback(v8::NearHeapLimitCallback callback,
UNREACHABLE();
}

void Heap::AppendArrayBufferExtension(JSArrayBuffer object,
ArrayBufferExtension* extension) {
if (Heap::InYoungGeneration(object)) {
extension->set_next(young_array_buffer_extensions_);
young_array_buffer_extensions_ = extension;
} else {
extension->set_next(old_array_buffer_extensions_);
old_array_buffer_extensions_ = extension;
}
}

void Heap::ReleaseAllArrayBufferExtensions() {
ArrayBufferExtension* current = array_buffer_extensions_;
ReleaseAllArrayBufferExtensions(&old_array_buffer_extensions_);
ReleaseAllArrayBufferExtensions(&young_array_buffer_extensions_);
}

void Heap::ReleaseAllArrayBufferExtensions(ArrayBufferExtension** head) {
ArrayBufferExtension* current = *head;

while (current) {
ArrayBufferExtension* next = current->next();
delete current;
current = next;
}

array_buffer_extensions_ = nullptr;
*head = nullptr;
}

void Heap::AutomaticallyRestoreInitialHeapLimit(double threshold_percent) {
Expand Down
25 changes: 17 additions & 8 deletions src/heap/heap.h
Original file line number Diff line number Diff line change
Expand Up @@ -587,19 +587,25 @@ class Heap {
V8_EXPORT_PRIVATE void AutomaticallyRestoreInitialHeapLimit(
double threshold_percent);

ArrayBufferExtension* array_buffer_extensions() {
return array_buffer_extensions_;
ArrayBufferExtension* old_array_buffer_extensions() {
return old_array_buffer_extensions_;
}

void set_array_buffer_extensions(ArrayBufferExtension* head) {
array_buffer_extensions_ = head;
ArrayBufferExtension* young_array_buffer_extensions() {
return young_array_buffer_extensions_;
}

void AppendArrayBufferExtension(ArrayBufferExtension* extension) {
extension->set_next(array_buffer_extensions_);
array_buffer_extensions_ = extension;
void set_old_array_buffer_extensions(ArrayBufferExtension* head) {
old_array_buffer_extensions_ = head;
}

void set_young_array_buffer_extensions(ArrayBufferExtension* head) {
young_array_buffer_extensions_ = head;
}

void AppendArrayBufferExtension(JSArrayBuffer object,
ArrayBufferExtension* extension);

void ReleaseAllArrayBufferExtensions();

V8_EXPORT_PRIVATE double MonotonicallyIncreasingTimeInMs();
Expand Down Expand Up @@ -1413,6 +1419,8 @@ class Heap {
static Isolate* GetIsolateFromWritableObject(HeapObject object);

private:
void ReleaseAllArrayBufferExtensions(ArrayBufferExtension** head);

using ExternalStringTableUpdaterCallback = String (*)(Heap* heap,
FullObjectSlot pointer);

Expand Down Expand Up @@ -1925,7 +1933,8 @@ class Heap {
Space* space_[LAST_SPACE + 1];

// List for tracking ArrayBufferExtensions
ArrayBufferExtension* array_buffer_extensions_ = nullptr;
ArrayBufferExtension* old_array_buffer_extensions_ = nullptr;
ArrayBufferExtension* young_array_buffer_extensions_ = nullptr;

// Determines whether code space is write-protected. This is essentially a
// race-free copy of the {FLAG_write_protect_code_memory} flag.
Expand Down
8 changes: 4 additions & 4 deletions src/heap/incremental-marking.cc
Original file line number Diff line number Diff line change
Expand Up @@ -511,8 +511,6 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
#ifdef ENABLE_MINOR_MC
MinorMarkCompactCollector::MarkingState* minor_marking_state =
heap()->minor_mark_compact_collector()->marking_state();
#else
void* minor_marking_state = nullptr;
#endif // ENABLE_MINOR_MC

collector_->marking_worklists_holder()->Update(
Expand All @@ -521,8 +519,10 @@ void IncrementalMarking::UpdateMarkingWorklistAfterScavenge() {
// this is referred inside DCHECK.
this,
#endif
filler_map,
minor_marking_state](HeapObject obj, HeapObject* out) -> bool {
#ifdef ENABLE_MINOR_MC
minor_marking_state,
#endif
filler_map](HeapObject obj, HeapObject* out) -> bool {
DCHECK(obj.IsHeapObject());
// Only pointers to from space have to be updated.
if (Heap::InFromPage(obj)) {
Expand Down
95 changes: 87 additions & 8 deletions src/heap/mark-compact.cc
Original file line number Diff line number Diff line change
Expand Up @@ -932,9 +932,14 @@ void MarkCompactCollector::Finish() {
}

void MarkCompactCollector::SweepArrayBufferExtensions() {
if (!V8_ARRAY_BUFFER_EXTENSION_BOOL) return;
ArrayBufferExtension* current = heap_->array_buffer_extensions();
ArrayBufferExtension* last = nullptr;
ArrayBufferExtension* promoted_list = SweepYoungArrayBufferExtensions();
SweepOldArrayBufferExtensions(promoted_list);
}

void MarkCompactCollector::SweepOldArrayBufferExtensions(
ArrayBufferExtension* promoted_list) {
ArrayBufferExtension* current = heap_->old_array_buffer_extensions();
ArrayBufferExtension* last = promoted_list;

while (current) {
ArrayBufferExtension* next = current->next();
Expand All @@ -950,7 +955,29 @@ void MarkCompactCollector::SweepArrayBufferExtensions() {
current = next;
}

heap_->set_array_buffer_extensions(last);
heap_->set_old_array_buffer_extensions(last);
}

ArrayBufferExtension* MarkCompactCollector::SweepYoungArrayBufferExtensions() {
ArrayBufferExtension* current = heap_->young_array_buffer_extensions();
ArrayBufferExtension* promoted_list = nullptr;

while (current) {
ArrayBufferExtension* next = current->next();

if (!current->IsMarked()) {
delete current;
} else {
current->Unmark();
current->set_next(promoted_list);
promoted_list = current;
}

current = next;
}

heap_->set_young_array_buffer_extensions(nullptr);
return promoted_list;
}

class MarkCompactCollector::RootMarkingVisitor final : public RootVisitor {
Expand Down Expand Up @@ -1219,6 +1246,8 @@ class RecordMigratedSlotVisitor : public ObjectVisitor {
inline void VisitRuntimeEntry(Code host, RelocInfo* rinfo) final {}
inline void VisitInternalReference(Code host, RelocInfo* rinfo) final {}

virtual void MarkArrayBufferExtensionPromoted(HeapObject object) {}

protected:
inline virtual void RecordMigratedSlot(HeapObject host, MaybeObject value,
Address slot) {
Expand Down Expand Up @@ -1305,6 +1334,9 @@ class EvacuateVisitorBase : public HeapObjectVisitor {
if (mode != MigrationMode::kFast)
base->ExecuteMigrationObservers(dest, src, dst, size);
dst.IterateBodyFast(dst.map(), size, base->record_visitor_);
if (V8_UNLIKELY(FLAG_minor_mc)) {
base->record_visitor_->MarkArrayBufferExtensionPromoted(dst);
}
} else if (dest == CODE_SPACE) {
DCHECK_CODEOBJECT_SIZE(size, base->heap_->code_space());
base->heap_->CopyBlock(dst_addr, src_addr, size);
Expand Down Expand Up @@ -1525,6 +1557,9 @@ class EvacuateNewSpacePageVisitor final : public HeapObjectVisitor {
local_pretenuring_feedback_);
} else if (mode == NEW_TO_OLD) {
object.IterateBodyFast(record_visitor_);
if (V8_UNLIKELY(FLAG_minor_mc)) {
record_visitor_->MarkArrayBufferExtensionPromoted(object);
}
}
return true;
}
Expand Down Expand Up @@ -3134,12 +3169,14 @@ void MarkCompactCollectorBase::CreateAndExecuteEvacuationTasks(
}
}

bool MarkCompactCollectorBase::ShouldMovePage(Page* p, intptr_t live_bytes) {
bool MarkCompactCollectorBase::ShouldMovePage(Page* p, intptr_t live_bytes,
bool always_promote_young) {
const bool reduce_memory = heap()->ShouldReduceMemory();
const Address age_mark = heap()->new_space()->age_mark();
return !reduce_memory && !p->NeverEvacuate() &&
(live_bytes > Evacuator::NewSpacePageEvacuationThreshold()) &&
!p->Contains(age_mark) && heap()->CanExpandOldGeneration(live_bytes);
(always_promote_young || !p->Contains(age_mark)) &&
heap()->CanExpandOldGeneration(live_bytes);
}

void MarkCompactCollector::EvacuatePagesInParallel() {
Expand All @@ -3156,7 +3193,8 @@ void MarkCompactCollector::EvacuatePagesInParallel() {
intptr_t live_bytes_on_page = non_atomic_marking_state()->live_bytes(page);
if (live_bytes_on_page == 0 && !page->contains_array_buffers()) continue;
live_bytes += live_bytes_on_page;
if (ShouldMovePage(page, live_bytes_on_page)) {
if (ShouldMovePage(page, live_bytes_on_page,
FLAG_always_promote_young_mc)) {
if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) ||
FLAG_always_promote_young_mc) {
EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page);
Expand Down Expand Up @@ -4251,6 +4289,13 @@ class YoungGenerationMarkingVisitor final
UNREACHABLE();
}

V8_INLINE int VisitJSArrayBuffer(Map map, JSArrayBuffer object) {
object.YoungMarkExtension();
int size = JSArrayBuffer::BodyDescriptor::SizeOf(map, object);
JSArrayBuffer::BodyDescriptor::IterateBody(map, object, size, this);
return size;
}

private:
template <typename TSlot>
V8_INLINE void VisitPointersImpl(HeapObject host, TSlot start, TSlot end) {
Expand Down Expand Up @@ -4323,6 +4368,33 @@ void MinorMarkCompactCollector::CleanupSweepToIteratePages() {
sweep_to_iterate_pages_.clear();
}

void MinorMarkCompactCollector::SweepArrayBufferExtensions() {
ArrayBufferExtension* current = heap_->young_array_buffer_extensions();
ArrayBufferExtension* last_young = nullptr;
ArrayBufferExtension* last_old = heap_->old_array_buffer_extensions();

while (current) {
ArrayBufferExtension* next = current->next();

if (!current->IsYoungMarked()) {
delete current;
} else if (current->IsYoungPromoted()) {
current->YoungUnmark();
current->set_next(last_old);
last_old = current;
} else {
current->YoungUnmark();
current->set_next(last_young);
last_young = current;
}

current = next;
}

heap_->set_old_array_buffer_extensions(last_old);
heap_->set_young_array_buffer_extensions(last_young);
}

class YoungGenerationMigrationObserver final : public MigrationObserver {
public:
YoungGenerationMigrationObserver(Heap* heap,
Expand Down Expand Up @@ -4358,6 +4430,11 @@ class YoungGenerationRecordMigratedSlotVisitor final
UNREACHABLE();
}

void MarkArrayBufferExtensionPromoted(HeapObject object) final {
if (!object.IsJSArrayBuffer()) return;
JSArrayBuffer::cast(object).YoungMarkExtensionPromoted();
}

private:
// Only record slots for host objects that are considered as live by the full
// collector.
Expand Down Expand Up @@ -4521,6 +4598,8 @@ void MinorMarkCompactCollector::CollectGarbage() {
}

heap()->account_external_memory_concurrently_freed();

SweepArrayBufferExtensions();
}

void MinorMarkCompactCollector::MakeIterable(
Expand Down Expand Up @@ -5088,7 +5167,7 @@ void MinorMarkCompactCollector::EvacuatePagesInParallel() {
intptr_t live_bytes_on_page = non_atomic_marking_state()->live_bytes(page);
if (live_bytes_on_page == 0 && !page->contains_array_buffers()) continue;
live_bytes += live_bytes_on_page;
if (ShouldMovePage(page, live_bytes_on_page)) {
if (ShouldMovePage(page, live_bytes_on_page, false)) {
if (page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK)) {
EvacuateNewSpacePageVisitor<NEW_TO_OLD>::Move(page);
} else {
Expand Down
6 changes: 5 additions & 1 deletion src/heap/mark-compact.h
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ class MarkCompactCollectorBase {
const intptr_t live_bytes);

// Returns whether this page should be moved according to heuristics.
bool ShouldMovePage(Page* p, intptr_t live_bytes);
bool ShouldMovePage(Page* p, intptr_t live_bytes, bool promote_young);

int CollectToSpaceUpdatingItems(ItemParallelJob* job);
template <typename IterateableSpace>
Expand Down Expand Up @@ -610,6 +610,8 @@ class MarkCompactCollector final : public MarkCompactCollectorBase {

// Free unmarked ArrayBufferExtensions.
void SweepArrayBufferExtensions();
void SweepOldArrayBufferExtensions(ArrayBufferExtension* promoted_list);
ArrayBufferExtension* SweepYoungArrayBufferExtensions();

void MarkLiveObjects() override;

Expand Down Expand Up @@ -872,6 +874,8 @@ class MinorMarkCompactCollector final : public MarkCompactCollectorBase {

int NumberOfParallelMarkingTasks(int pages);

void SweepArrayBufferExtensions();

MarkingWorklist* worklist_;

YoungGenerationMarkingVisitor* main_marking_visitor_;
Expand Down
7 changes: 7 additions & 0 deletions src/heap/scavenger-inl.h
Original file line number Diff line number Diff line change
Expand Up @@ -476,6 +476,13 @@ void ScavengeVisitor::VisitPointersImpl(HeapObject host, TSlot start,
}
}

int ScavengeVisitor::VisitJSArrayBuffer(Map map, JSArrayBuffer object) {
object.YoungMarkExtension();
int size = JSArrayBuffer::BodyDescriptor::SizeOf(map, object);
JSArrayBuffer::BodyDescriptor::IterateBody(map, object, size, this);
return size;
}

int ScavengeVisitor::VisitEphemeronHashTable(Map map,
EphemeronHashTable table) {
// Register table with the scavenger, so it can take care of the weak keys
Expand Down
Loading

0 comments on commit 6770210

Please sign in to comment.