Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit ec952aa

Browse files
ofrobotsCommit Bot
authored andcommitted
Reland "[profiler] proper observation of old space inline allocations"
This is a reland of 672a41c Original change's description: > [profiler] proper observation of old space inline allocations > > Bug: chromium:633920 > Change-Id: I9a2f4a89f6b9c0f63cb3b166b06a88a12f0a203c > Reviewed-on: https://chromium-review.googlesource.com/631696 > Commit-Queue: Ali Ijaz Sheikh <[email protected]> > Reviewed-by: Ulan Degenbaev <[email protected]> > Cr-Commit-Position: refs/heads/master@{#48043} Bug: chromium:633920 Change-Id: I6fe743d31b8ff26f3858488d4c014c62d3c85add Reviewed-on: https://chromium-review.googlesource.com/671127 Reviewed-by: Ulan Degenbaev <[email protected]> Commit-Queue: Ali Ijaz Sheikh <[email protected]> Cr-Commit-Position: refs/heads/master@{#48085}
1 parent f2cd10d commit ec952aa

File tree

7 files changed

+196
-69
lines changed

7 files changed

+196
-69
lines changed

src/heap/spaces-inl.h

Lines changed: 9 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -369,6 +369,11 @@ AllocationResult PagedSpace::AllocateRawAligned(int size_in_bytes,
369369

370370
AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
371371
AllocationAlignment alignment) {
372+
DCHECK(top() >= top_on_previous_step_);
373+
size_t bytes_since_last =
374+
top_on_previous_step_ ? top() - top_on_previous_step_ : 0;
375+
376+
DCHECK_IMPLIES(!SupportsInlineAllocation(), bytes_since_last == 0);
372377
#ifdef V8_HOST_ARCH_32_BIT
373378
AllocationResult result =
374379
alignment == kDoubleAligned
@@ -378,11 +383,13 @@ AllocationResult PagedSpace::AllocateRaw(int size_in_bytes,
378383
AllocationResult result = AllocateRawUnaligned(size_in_bytes);
379384
#endif
380385
HeapObject* heap_obj = nullptr;
381-
if (!result.IsRetry() && result.To(&heap_obj)) {
382-
AllocationStep(heap_obj->address(), size_in_bytes);
386+
if (!result.IsRetry() && result.To(&heap_obj) && !is_local()) {
387+
AllocationStep(static_cast<int>(size_in_bytes + bytes_since_last),
388+
heap_obj->address(), size_in_bytes);
383389
DCHECK_IMPLIES(
384390
heap()->incremental_marking()->black_allocation(),
385391
heap()->incremental_marking()->marking_state()->IsBlack(heap_obj));
392+
StartNextInlineAllocationStep();
386393
}
387394
return result;
388395
}

src/heap/spaces.cc

Lines changed: 81 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -1328,13 +1328,15 @@ STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::MAP_SPACE) ==
13281328

13291329
void Space::AddAllocationObserver(AllocationObserver* observer) {
13301330
allocation_observers_.push_back(observer);
1331+
StartNextInlineAllocationStep();
13311332
}
13321333

13331334
void Space::RemoveAllocationObserver(AllocationObserver* observer) {
13341335
auto it = std::find(allocation_observers_.begin(),
13351336
allocation_observers_.end(), observer);
13361337
DCHECK(allocation_observers_.end() != it);
13371338
allocation_observers_.erase(it);
1339+
StartNextInlineAllocationStep();
13381340
}
13391341

13401342
void Space::PauseAllocationObservers() { allocation_observers_paused_ = true; }
@@ -1343,11 +1345,12 @@ void Space::ResumeAllocationObservers() {
13431345
allocation_observers_paused_ = false;
13441346
}
13451347

1346-
void Space::AllocationStep(Address soon_object, int size) {
1348+
void Space::AllocationStep(int bytes_since_last, Address soon_object,
1349+
int size) {
13471350
if (!allocation_observers_paused_) {
13481351
heap()->CreateFillerObjectAt(soon_object, size, ClearRecordedSlots::kNo);
13491352
for (AllocationObserver* observer : allocation_observers_) {
1350-
observer->AllocationStep(size, soon_object, size);
1353+
observer->AllocationStep(bytes_since_last, soon_object, size);
13511354
}
13521355
}
13531356
}
@@ -1367,7 +1370,8 @@ PagedSpace::PagedSpace(Heap* heap, AllocationSpace space,
13671370
: Space(heap, space, executable),
13681371
anchor_(this),
13691372
free_list_(this),
1370-
locked_page_(nullptr) {
1373+
locked_page_(nullptr),
1374+
top_on_previous_step_(0) {
13711375
area_size_ = MemoryAllocator::PageAreaSize(space);
13721376
accounting_stats_.Clear();
13731377

@@ -1596,6 +1600,48 @@ void PagedSpace::SetAllocationInfo(Address top, Address limit) {
15961600
}
15971601
}
15981602

1603+
void PagedSpace::DecreaseLimit(Address new_limit) {
1604+
Address old_limit = limit();
1605+
DCHECK_LE(top(), new_limit);
1606+
DCHECK_GE(old_limit, new_limit);
1607+
if (new_limit != old_limit) {
1608+
SetTopAndLimit(top(), new_limit);
1609+
Free(new_limit, old_limit - new_limit);
1610+
if (heap()->incremental_marking()->black_allocation()) {
1611+
Page::FromAllocationAreaAddress(new_limit)->DestroyBlackArea(new_limit,
1612+
old_limit);
1613+
}
1614+
}
1615+
}
1616+
1617+
Address PagedSpace::ComputeLimit(Address start, Address end,
1618+
size_t size_in_bytes) {
1619+
DCHECK_GE(end - start, size_in_bytes);
1620+
1621+
if (heap()->inline_allocation_disabled()) {
1622+
// Keep the linear allocation area to fit exactly the requested size.
1623+
return start + size_in_bytes;
1624+
} else if (!allocation_observers_paused_ && !allocation_observers_.empty() &&
1625+
identity() == OLD_SPACE && !is_local()) {
1626+
// Generated code may allocate inline from the linear allocation area for
1627+
// Old Space. To make sure we can observe these allocations, we use a lower
1628+
// limit.
1629+
size_t step = RoundSizeDownToObjectAlignment(
1630+
static_cast<int>(GetNextInlineAllocationStepSize()));
1631+
return Max(start + size_in_bytes, Min(start + step, end));
1632+
} else {
1633+
// The entire node can be used as the linear allocation area.
1634+
return end;
1635+
}
1636+
}
1637+
1638+
void PagedSpace::StartNextInlineAllocationStep() {
1639+
if (!allocation_observers_paused_ && SupportsInlineAllocation()) {
1640+
top_on_previous_step_ = allocation_observers_.empty() ? 0 : top();
1641+
DecreaseLimit(ComputeLimit(top(), limit(), 0));
1642+
}
1643+
}
1644+
15991645
void PagedSpace::MarkAllocationInfoBlack() {
16001646
DCHECK(heap()->incremental_marking()->black_allocation());
16011647
Address current_top = top();
@@ -1641,6 +1687,12 @@ void PagedSpace::EmptyAllocationInfo() {
16411687
}
16421688
}
16431689

1690+
if (top_on_previous_step_) {
1691+
DCHECK(current_top >= top_on_previous_step_);
1692+
AllocationStep(static_cast<int>(current_top - top_on_previous_step_),
1693+
nullptr, 0);
1694+
top_on_previous_step_ = 0;
1695+
}
16441696
SetTopAndLimit(NULL, NULL);
16451697
DCHECK_GE(current_limit, current_top);
16461698
Free(current_top, current_limit - current_top);
@@ -2083,16 +2135,6 @@ void NewSpace::StartNextInlineAllocationStep() {
20832135
}
20842136
}
20852137

2086-
void NewSpace::AddAllocationObserver(AllocationObserver* observer) {
2087-
Space::AddAllocationObserver(observer);
2088-
StartNextInlineAllocationStep();
2089-
}
2090-
2091-
void NewSpace::RemoveAllocationObserver(AllocationObserver* observer) {
2092-
Space::RemoveAllocationObserver(observer);
2093-
StartNextInlineAllocationStep();
2094-
}
2095-
20962138
void NewSpace::PauseAllocationObservers() {
20972139
// Do a step to account for memory allocated so far.
20982140
InlineAllocationStep(top(), top(), nullptr, 0);
@@ -2101,12 +2143,28 @@ void NewSpace::PauseAllocationObservers() {
21012143
UpdateInlineAllocationLimit(0);
21022144
}
21032145

2146+
void PagedSpace::PauseAllocationObservers() {
2147+
// Do a step to account for memory allocated so far.
2148+
if (top_on_previous_step_) {
2149+
int bytes_allocated = static_cast<int>(top() - top_on_previous_step_);
2150+
AllocationStep(bytes_allocated, nullptr, 0);
2151+
}
2152+
Space::PauseAllocationObservers();
2153+
top_on_previous_step_ = 0;
2154+
}
2155+
21042156
void NewSpace::ResumeAllocationObservers() {
21052157
DCHECK(top_on_previous_step_ == 0);
21062158
Space::ResumeAllocationObservers();
21072159
StartNextInlineAllocationStep();
21082160
}
21092161

2162+
// TODO(ofrobots): refactor into SpaceWithLinearArea
2163+
void PagedSpace::ResumeAllocationObservers() {
2164+
DCHECK(top_on_previous_step_ == 0);
2165+
Space::ResumeAllocationObservers();
2166+
StartNextInlineAllocationStep();
2167+
}
21102168

21112169
void NewSpace::InlineAllocationStep(Address top, Address new_top,
21122170
Address soon_object, size_t size) {
@@ -2881,7 +2939,6 @@ bool FreeList::Allocate(size_t size_in_bytes) {
28812939
if (new_node == nullptr) return false;
28822940

28832941
DCHECK_GE(new_node_size, size_in_bytes);
2884-
size_t bytes_left = new_node_size - size_in_bytes;
28852942

28862943
#ifdef DEBUG
28872944
for (size_t i = 0; i < size_in_bytes / kPointerSize; i++) {
@@ -2895,38 +2952,21 @@ bool FreeList::Allocate(size_t size_in_bytes) {
28952952
// candidate.
28962953
DCHECK(!MarkCompactCollector::IsOnEvacuationCandidate(new_node));
28972954

2898-
const size_t kThreshold = IncrementalMarking::kAllocatedThreshold;
2899-
29002955
// Memory in the linear allocation area is counted as allocated. We may free
29012956
// a little of this again immediately - see below.
29022957
owner_->IncreaseAllocatedBytes(new_node_size,
29032958
Page::FromAddress(new_node->address()));
29042959

2905-
if (owner_->heap()->inline_allocation_disabled()) {
2906-
// Keep the linear allocation area to fit exactly the requested size.
2907-
// Return the rest to the free list.
2908-
owner_->Free(new_node->address() + size_in_bytes, bytes_left);
2909-
owner_->SetAllocationInfo(new_node->address(),
2910-
new_node->address() + size_in_bytes);
2911-
} else if (bytes_left > kThreshold &&
2912-
owner_->heap()->incremental_marking()->IsMarkingIncomplete() &&
2913-
FLAG_incremental_marking &&
2914-
!owner_->is_local()) { // Not needed on CompactionSpaces.
2915-
size_t linear_size = owner_->RoundSizeDownToObjectAlignment(kThreshold);
2916-
// We don't want to give too large linear areas to the allocator while
2917-
// incremental marking is going on, because we won't check again whether
2918-
// we want to do another increment until the linear area is used up.
2919-
DCHECK_GE(new_node_size, size_in_bytes + linear_size);
2920-
owner_->Free(new_node->address() + size_in_bytes + linear_size,
2921-
new_node_size - size_in_bytes - linear_size);
2922-
owner_->SetAllocationInfo(
2923-
new_node->address(), new_node->address() + size_in_bytes + linear_size);
2924-
} else {
2925-
// Normally we give the rest of the node to the allocator as its new
2926-
// linear allocation area.
2927-
owner_->SetAllocationInfo(new_node->address(),
2928-
new_node->address() + new_node_size);
2960+
Address start = new_node->address();
2961+
Address end = new_node->address() + new_node_size;
2962+
Address limit = owner_->ComputeLimit(start, end, size_in_bytes);
2963+
DCHECK_LE(limit, end);
2964+
DCHECK_LE(size_in_bytes, limit - start);
2965+
if (limit != end) {
2966+
owner_->Free(limit, end - limit);
29292967
}
2968+
owner_->SetAllocationInfo(start, limit);
2969+
29302970
return true;
29312971
}
29322972

@@ -3314,7 +3354,7 @@ AllocationResult LargeObjectSpace::AllocateRaw(int object_size,
33143354
if (heap()->incremental_marking()->black_allocation()) {
33153355
heap()->incremental_marking()->marking_state()->WhiteToBlack(object);
33163356
}
3317-
AllocationStep(object->address(), object_size);
3357+
AllocationStep(object_size, object->address(), object_size);
33183358
DCHECK_IMPLIES(
33193359
heap()->incremental_marking()->black_allocation(),
33203360
heap()->incremental_marking()->marking_state()->IsBlack(object));

src/heap/spaces.h

Lines changed: 24 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -903,17 +903,17 @@ class Space : public Malloced {
903903
// Identity used in error reporting.
904904
AllocationSpace identity() { return id_; }
905905

906-
V8_EXPORT_PRIVATE virtual void AddAllocationObserver(
907-
AllocationObserver* observer);
906+
void AddAllocationObserver(AllocationObserver* observer);
908907

909-
V8_EXPORT_PRIVATE virtual void RemoveAllocationObserver(
910-
AllocationObserver* observer);
908+
void RemoveAllocationObserver(AllocationObserver* observer);
911909

912910
V8_EXPORT_PRIVATE virtual void PauseAllocationObservers();
913911

914912
V8_EXPORT_PRIVATE virtual void ResumeAllocationObservers();
915913

916-
void AllocationStep(Address soon_object, int size);
914+
V8_EXPORT_PRIVATE virtual void StartNextInlineAllocationStep() {}
915+
916+
void AllocationStep(int bytes_since_last, Address soon_object, int size);
917917

918918
// Return the total amount committed memory for this space, i.e., allocatable
919919
// memory and page headers.
@@ -2071,15 +2071,8 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) {
20712071

20722072
void ResetFreeList() { free_list_.Reset(); }
20732073

2074-
// Set space allocation info.
2075-
void SetTopAndLimit(Address top, Address limit) {
2076-
DCHECK(top == limit ||
2077-
Page::FromAddress(top) == Page::FromAddress(limit - 1));
2078-
MemoryChunk::UpdateHighWaterMark(allocation_info_.top());
2079-
allocation_info_.Reset(top, limit);
2080-
}
2081-
2082-
void SetAllocationInfo(Address top, Address limit);
2074+
void PauseAllocationObservers() override;
2075+
void ResumeAllocationObservers() override;
20832076

20842077
// Empty space allocation info, returning unused area to free list.
20852078
void EmptyAllocationInfo();
@@ -2184,6 +2177,21 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) {
21842177
// multiple tasks hold locks on pages while trying to sweep each others pages.
21852178
void AnnounceLockedPage(Page* page) { locked_page_ = page; }
21862179

2180+
Address ComputeLimit(Address start, Address end, size_t size_in_bytes);
2181+
void SetAllocationInfo(Address top, Address limit);
2182+
2183+
private:
2184+
// Set space allocation info.
2185+
void SetTopAndLimit(Address top, Address limit) {
2186+
DCHECK(top == limit ||
2187+
Page::FromAddress(top) == Page::FromAddress(limit - 1));
2188+
MemoryChunk::UpdateHighWaterMark(allocation_info_.top());
2189+
allocation_info_.Reset(top, limit);
2190+
}
2191+
void DecreaseLimit(Address new_limit);
2192+
void StartNextInlineAllocationStep() override;
2193+
bool SupportsInlineAllocation() { return identity() == OLD_SPACE; }
2194+
21872195
protected:
21882196
// PagedSpaces that should be included in snapshots have different, i.e.,
21892197
// smaller, initial pages.
@@ -2246,6 +2254,7 @@ class V8_EXPORT_PRIVATE PagedSpace : NON_EXPORTED_BASE(public Space) {
22462254
base::Mutex space_mutex_;
22472255

22482256
Page* locked_page_;
2257+
Address top_on_previous_step_;
22492258

22502259
friend class IncrementalMarking;
22512260
friend class MarkCompactCollector;
@@ -2647,14 +2656,6 @@ class NewSpace : public Space {
26472656
UpdateInlineAllocationLimit(0);
26482657
}
26492658

2650-
// Allows observation of inline allocation. The observer->Step() method gets
2651-
// called after every step_size bytes have been allocated (approximately).
2652-
// This works by adjusting the allocation limit to a lower value and adjusting
2653-
// it after each step.
2654-
void AddAllocationObserver(AllocationObserver* observer) override;
2655-
2656-
void RemoveAllocationObserver(AllocationObserver* observer) override;
2657-
26582659
// Get the extent of the inactive semispace (for use as a marking stack,
26592660
// or to zap it). Notice: space-addresses are not necessarily on the
26602661
// same page, so FromSpaceStart() might be above FromSpaceEnd().
@@ -2761,7 +2762,7 @@ class NewSpace : public Space {
27612762
// different when we cross a page boundary or reset the space.
27622763
void InlineAllocationStep(Address top, Address new_top, Address soon_object,
27632764
size_t size);
2764-
void StartNextInlineAllocationStep();
2765+
void StartNextInlineAllocationStep() override;
27652766

27662767
friend class SemiSpaceIterator;
27672768
};

src/profiler/sampling-heap-profiler.h

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -172,8 +172,11 @@ class SamplingAllocationObserver : public AllocationObserver {
172172
void Step(int bytes_allocated, Address soon_object, size_t size) override {
173173
USE(heap_);
174174
DCHECK(heap_->gc_state() == Heap::NOT_IN_GC);
175-
DCHECK(soon_object);
176-
profiler_->SampleObject(soon_object, size);
175+
if (soon_object) {
176+
// TODO(ofrobots): it would be better to sample the next object rather
177+
// than skipping this sample epoch if soon_object happens to be null.
178+
profiler_->SampleObject(soon_object, size);
179+
}
177180
}
178181

179182
intptr_t GetNextStepSize() override { return GetNextSampleInterval(rate_); }

test/cctest/heap/heap-utils.cc

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ int FixedArrayLenFromSize(int size) {
3232

3333
std::vector<Handle<FixedArray>> FillOldSpacePageWithFixedArrays(Heap* heap,
3434
int remainder) {
35+
PauseAllocationObserversScope pause_observers(heap);
3536
std::vector<Handle<FixedArray>> handles;
3637
Isolate* isolate = heap->isolate();
3738
const int kArraySize = 128;
@@ -203,7 +204,7 @@ void ForceEvacuationCandidate(Page* page) {
203204
int remaining = static_cast<int>(limit - top);
204205
space->heap()->CreateFillerObjectAt(top, remaining,
205206
ClearRecordedSlots::kNo);
206-
space->SetTopAndLimit(nullptr, nullptr);
207+
space->EmptyAllocationInfo();
207208
}
208209
}
209210

test/cctest/heap/test-invalidated-slots.cc

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ namespace heap {
2020

2121
Page* HeapTester::AllocateByteArraysOnPage(
2222
Heap* heap, std::vector<ByteArray*>* byte_arrays) {
23+
PauseAllocationObserversScope pause_observers(heap);
2324
const int kLength = 256 - ByteArray::kHeaderSize;
2425
const int kSize = ByteArray::SizeFor(kLength);
2526
CHECK_EQ(kSize, 256);

0 commit comments

Comments
 (0)