page 679 src/arm/simulator-arm.cc v8::internal::HashMap::Entry* entry = i_cache->Lookup(page,
page 680 src/arm/simulator-arm.cc ICacheHash(page),
page 698 src/arm/simulator-arm.cc void* page = reinterpret_cast<void*>(start & (~CachePage::kPageMask));
page 700 src/arm/simulator-arm.cc CachePage* cache_page = GetCachePage(i_cache, page);
page 709 src/arm/simulator-arm.cc void* page = reinterpret_cast<void*>(address & (~CachePage::kPageMask));
page 712 src/arm/simulator-arm.cc CachePage* cache_page = GetCachePage(i_cache, page);
page 321 src/arm/simulator-arm.h static CachePage* GetCachePage(v8::internal::HashMap* i_cache, void* page);
page 1190 src/assembler.cc return ExternalReference(reinterpret_cast<Address>(page) +
page 672 src/assembler.h static ExternalReference page_flags(Page* page);
page 1334 src/frames.cc Page* page = Page::FromAddress(inner_pointer);
page 1336 src/frames.cc Address addr = page->skip_list()->StartFor(inner_pointer);
page 326 src/heap-inl.h NewSpacePage* page = NewSpacePage::FromAddress(old_address);
page 328 src/heap-inl.h bool below_mark = page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
page 329 src/heap-inl.h (!page->ContainsLimit(age_mark) || old_address < age_mark);
page 1090 src/heap.cc heap->store_buffer_rebuilder_.Callback(page, event);
page 1119 src/heap.cc current_page_ = page;
page 1132 src/heap.cc ASSERT(current_page_ == page);
page 1133 src/heap.cc ASSERT(page != NULL);
page 5443 src/heap.cc NewSpacePage* page = it.next();
page 5444 src/heap.cc for (Address cursor = page->area_start(), limit = page->area_end();
page 5582 src/heap.cc Page* page = pages.next();
page 5583 src/heap.cc Object** current = reinterpret_cast<Object**>(page->area_start());
page 5585 src/heap.cc Address end = page->area_end();
page 5610 src/heap.cc Page* page = pages.next();
page 5611 src/heap.cc Object** current = reinterpret_cast<Object**>(page->area_start());
page 5613 src/heap.cc Address end = page->area_end();
page 7171 src/heap.cc uintptr_t p = reinterpret_cast<uintptr_t>(page);
page 265 src/heap.h void Callback(MemoryChunk* page, StoreBufferEvent event);
page 1587 src/heap.h void RememberUnmappedPage(Address page, bool compacted);
page 1876 src/heap.h MemoryChunk* page,
page 2696 src/ia32/macro-assembler-ia32.cc Page* page = Page::FromAddress(map->address());
page 2697 src/ia32/macro-assembler-ia32.cc ExternalReference reference(ExternalReference::page_flags(page));
page 116 src/mark-compact.cc NewSpacePage* page = it.next();
page 117 src/mark-compact.cc Address limit = it.has_next() ? page->area_end() : end;
page 118 src/mark-compact.cc ASSERT(limit == end || !page->Contains(end));
page 119 src/mark-compact.cc VerifyMarking(page->area_start(), limit);
page 191 src/mark-compact.cc NewSpacePage* page = it.next();
page 192 src/mark-compact.cc Address current = page->area_start();
page 193 src/mark-compact.cc Address limit = it.has_next() ? page->area_end() : space->top();
page 194 src/mark-compact.cc ASSERT(limit == space->top() || !page->Contains(space->top()));
page 641 src/mark-compact.cc AddEvacuationCandidate(candidates[i].page());
page 3033 src/mark-compact.cc Page* page = evacuation_candidates_[j];
page 3034 src/mark-compact.cc slots_buffer_allocator_.DeallocateChain(page->slots_buffer_address());
page 3035 src/mark-compact.cc page->ClearEvacuationCandidate();
page 3036 src/mark-compact.cc page->SetFlag(Page::RESCAN_ON_EVACUATION);
page 534 src/mark-compact.h void EvictEvacuationCandidate(Page* page) {
page 537 src/mark-compact.h reinterpret_cast<void*>(page));
page 542 src/mark-compact.h page->ClearEvacuationCandidate();
page 548 src/mark-compact.h if (page->owner()->identity() == OLD_DATA_SPACE) {
page 549 src/mark-compact.h evacuation_candidates_.RemoveElement(page);
page 551 src/mark-compact.h page->SetFlag(Page::RESCAN_ON_EVACUATION);
page 826 src/mips/simulator-mips.cc v8::internal::HashMap::Entry* entry = i_cache->Lookup(page,
page 827 src/mips/simulator-mips.cc ICacheHash(page),
page 845 src/mips/simulator-mips.cc void* page = reinterpret_cast<void*>(start & (~CachePage::kPageMask));
page 847 src/mips/simulator-mips.cc CachePage* cache_page = GetCachePage(i_cache, page);
page 856 src/mips/simulator-mips.cc void* page = reinterpret_cast<void*>(address & (~CachePage::kPageMask));
page 859 src/mips/simulator-mips.cc CachePage* cache_page = GetCachePage(i_cache, page);
page 332 src/mips/simulator-mips.h static CachePage* GetCachePage(v8::internal::HashMap* i_cache, void* page);
page 145 src/spaces-inl.h void MemoryAllocator::ProtectChunkFromPage(Page* page) {
page 146 src/spaces-inl.h int id = GetChunkId(page);
page 151 src/spaces-inl.h void MemoryAllocator::UnprotectChunkFromPage(Page* page) {
page 152 src/spaces-inl.h int id = GetChunkId(page);
page 166 src/spaces-inl.h Page* page = reinterpret_cast<Page*>(chunk);
page 169 src/spaces-inl.h owner->IncreaseCapacity(page->area_size());
page 170 src/spaces-inl.h owner->Free(page->area_start(), page->area_size());
page 174 src/spaces-inl.h return page;
page 236 src/spaces-inl.h void Page::set_next_page(Page* page) {
page 237 src/spaces-inl.h ASSERT(page->owner() == owner());
page 238 src/spaces-inl.h set_next_chunk(page);
page 242 src/spaces-inl.h void Page::set_prev_page(Page* page) {
page 243 src/spaces-inl.h ASSERT(page->owner() == owner());
page 244 src/spaces-inl.h set_prev_chunk(page);
page 71 src/spaces.cc Space* owner = page->owner();
page 78 src/spaces.cc page->area_start(),
page 79 src/spaces.cc page->area_end(),
page 82 src/spaces.cc ASSERT(page->WasSweptPrecisely());
page 414 src/spaces.cc NewSpacePage* page = static_cast<NewSpacePage*>(chunk);
page 415 src/spaces.cc heap->incremental_marking()->SetNewSpacePageFlags(page);
page 416 src/spaces.cc return page;
page 918 src/spaces.cc ASSERT(page->LiveBytes() == 0);
page 919 src/spaces.cc ASSERT(AreaSize() == page->area_size());
page 922 src/spaces.cc if (first_unswept_page_ == page) {
page 923 src/spaces.cc first_unswept_page_ = page->next_page();
page 929 src/spaces.cc if (page->WasSwept()) {
page 930 src/spaces.cc intptr_t size = free_list_.EvictFreeListItems(page);
page 934 src/spaces.cc DecreaseUnsweptFreeBytes(page);
page 937 src/spaces.cc if (Page::FromAllocationTop(allocation_info_.top) == page) {
page 941 src/spaces.cc page->Unlink();
page 942 src/spaces.cc if (page->IsFlagSet(MemoryChunk::CONTAINS_ONLY_DATA)) {
page 943 src/spaces.cc heap()->isolate()->memory_allocator()->Free(page);
page 945 src/spaces.cc heap()->QueueMemoryChunkForFree(page);
page 956 src/spaces.cc Page* page = it.next();
page 957 src/spaces.cc if (!page->WasSwept()) {
page 958 src/spaces.cc if (page->LiveBytes() == 0) ReleasePage(page);
page 960 src/spaces.cc HeapObject* obj = HeapObject::FromAddress(page->area_start());
page 972 src/spaces.cc free_list_.CountFreeListItems(page, &sizes);
page 974 src/spaces.cc ReleasePage(page);
page 997 src/spaces.cc Page* page = page_iterator.next();
page 998 src/spaces.cc ASSERT(page->owner() == this);
page 999 src/spaces.cc if (page == Page::FromAllocationTop(allocation_info_.top)) {
page 1002 src/spaces.cc ASSERT(page->WasSweptPrecisely());
page 1003 src/spaces.cc HeapObjectIterator it(page, NULL);
page 1004 src/spaces.cc Address end_of_previous_object = page->area_start();
page 1005 src/spaces.cc Address top = page->area_end();
page 1032 src/spaces.cc ASSERT_LE(black_size, page->LiveBytes());
page 1302 src/spaces.cc NewSpacePage* page = NewSpacePage::FromLimit(current)->next_page();
page 1304 src/spaces.cc CHECK(!page->is_anchor());
page 1305 src/spaces.cc current = page->area_start();
page 1359 src/spaces.cc NewSpacePage* page = anchor();
page 1363 src/spaces.cc new_page->InsertAfter(page);
page 1364 src/spaces.cc page = new_page;
page 1466 src/spaces.cc NewSpacePage* page = anchor_.next_page();
page 1467 src/spaces.cc while (page != &anchor_) {
page 1468 src/spaces.cc page->set_owner(this);
page 1469 src/spaces.cc page->SetFlags(flags, mask);
page 1471 src/spaces.cc page->ClearFlag(MemoryChunk::IN_FROM_SPACE);
page 1472 src/spaces.cc page->SetFlag(MemoryChunk::IN_TO_SPACE);
page 1473 src/spaces.cc page->ClearFlag(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK);
page 1474 src/spaces.cc page->ResetLiveBytes();
page 1476 src/spaces.cc page->SetFlag(MemoryChunk::IN_FROM_SPACE);
page 1477 src/spaces.cc page->ClearFlag(MemoryChunk::IN_TO_SPACE);
page 1479 src/spaces.cc ASSERT(page->IsFlagSet(MemoryChunk::SCAN_ON_SCAVENGE));
page 1480 src/spaces.cc ASSERT(page->IsFlagSet(MemoryChunk::IN_TO_SPACE) ||
page 1481 src/spaces.cc page->IsFlagSet(MemoryChunk::IN_FROM_SPACE));
page 1482 src/spaces.cc page = page->next_page();
page 1531 src/spaces.cc NewSpacePage* page = anchor_.next_page();
page 1533 src/spaces.cc while (page != &anchor_) {
page 1534 src/spaces.cc CHECK(page->semi_space() == this);
page 1535 src/spaces.cc CHECK(page->InNewSpace());
page 1536 src/spaces.cc CHECK(page->IsFlagSet(is_from_space ? MemoryChunk::IN_FROM_SPACE
page 1538 src/spaces.cc CHECK(!page->IsFlagSet(is_from_space ? MemoryChunk::IN_TO_SPACE
page 1540 src/spaces.cc CHECK(page->IsFlagSet(MemoryChunk::POINTERS_TO_HERE_ARE_INTERESTING));
page 1544 src/spaces.cc if (page->heap()->incremental_marking()->IsMarking()) {
page 1545 src/spaces.cc CHECK(page->IsFlagSet(MemoryChunk::POINTERS_FROM_HERE_ARE_INTERESTING));
page 1547 src/spaces.cc CHECK(!page->IsFlagSet(
page 1553 src/spaces.cc CHECK(page->IsFlagSet(MemoryChunk::SCAN_ON_SCAVENGE));
page 1554 src/spaces.cc CHECK(page->prev_page()->next_page() == page);
page 1555 src/spaces.cc page = page->next_page();
page 1562 src/spaces.cc NewSpacePage* page = NewSpacePage::FromLimit(start);
page 1564 src/spaces.cc SemiSpace* space = page->semi_space();
page 1569 src/spaces.cc if (page == end_page) {
page 1572 src/spaces.cc while (page != end_page) {
page 1573 src/spaces.cc page = page->next_page();
page 1574 src/spaces.cc CHECK_NE(page, space->anchor());
page 2600 src/spaces.cc LargePage* page = first_page_;
page 2602 src/spaces.cc LOG(heap()->isolate(), DeleteEvent("LargeObjectChunk", page->address()));
page 2606 src/spaces.cc space, kAllocationActionFree, page->size());
page 2607 src/spaces.cc heap()->isolate()->memory_allocator()->Free(page);
page 2626 src/spaces.cc LargePage* page = heap()->isolate()->memory_allocator()->
page 2628 src/spaces.cc if (page == NULL) return Failure::RetryAfterGC(identity());
page 2629 src/spaces.cc ASSERT(page->area_size() >= object_size);
page 2631 src/spaces.cc size_ += static_cast<int>(page->size());
page 2634 src/spaces.cc page->set_next_page(first_page_);
page 2635 src/spaces.cc first_page_ = page;
page 2639 src/spaces.cc uintptr_t base = reinterpret_cast<uintptr_t>(page) / MemoryChunk::kAlignment;
page 2640 src/spaces.cc uintptr_t limit = base + (page->size() - 1) / MemoryChunk::kAlignment;
page 2646 src/spaces.cc entry->value = page;
page 2649 src/spaces.cc HeapObject* object = page->GetObject();
page 2665 src/spaces.cc LargePage* page = FindPage(a);
page 2666 src/spaces.cc if (page != NULL) {
page 2667 src/spaces.cc return page->GetObject();
page 2680 src/spaces.cc LargePage* page = reinterpret_cast<LargePage*>(e->value);
page 2681 src/spaces.cc ASSERT(page->is_valid());
page 2682 src/spaces.cc if (page->Contains(a)) {
page 2683 src/spaces.cc return page;
page 2705 src/spaces.cc LargePage* page = current;
page 2717 src/spaces.cc size_ -= static_cast<int>(page->size());
page 2725 src/spaces.cc uintptr_t base = reinterpret_cast<uintptr_t>(page)/alignment;
page 2726 src/spaces.cc uintptr_t limit = base + (page->size()-1)/alignment;
page 2733 src/spaces.cc heap()->QueueMemoryChunkForFree(page);
page 2735 src/spaces.cc heap()->isolate()->memory_allocator()->Free(page);
page 2765 src/spaces.cc Page* page = Page::FromAddress(object->address());
page 2766 src/spaces.cc ASSERT(object->address() == page->area_start());
page 672 src/spaces.h inline void set_next_page(Page* page);
page 673 src/spaces.h inline void set_prev_page(Page* page);
page 746 src/spaces.h inline void set_next_page(LargePage* page) {
page 747 src/spaces.h set_next_chunk(page);
page 917 src/spaces.h Page* page = Page::FromAddress(addr);
page 918 src/spaces.h SkipList* list = page->skip_list();
page 921 src/spaces.h page->set_skip_list(list);
page 1120 src/spaces.h HeapObjectIterator(Page* page, HeapObjectCallback size_func);
page 1546 src/spaces.h void ReleasePage(Page* page);
page 1742 src/spaces.h inline void set_next_page(NewSpacePage* page) {
page 1743 src/spaces.h set_next_chunk(page);
page 1750 src/spaces.h inline void set_prev_page(NewSpacePage* page) {
page 1751 src/spaces.h set_prev_chunk(page);
page 1778 src/spaces.h NewSpacePage* page = reinterpret_cast<NewSpacePage*>(page_start);
page 1779 src/spaces.h return page;
page 1998 src/spaces.h NewSpacePage* page = NewSpacePage::FromLimit(current_);
page 1999 src/spaces.h page = page->next_page();
page 2000 src/spaces.h ASSERT(!page->is_anchor());
page 2001 src/spaces.h current_ = page->area_start();
page 2337 src/spaces.h virtual Address PageAllocationLimit(Page* page) {
page 2338 src/spaces.h return page->area_end();
page 2371 src/spaces.h virtual Address PageAllocationLimit(Page* page) {
page 2372 src/spaces.h return page->area_end() - page_extra_;
page 386 src/store-buffer.cc Page* page = it.next();
page 388 src/store-buffer.cc reinterpret_cast<PagedSpace*>(page->owner()),
page 389 src/store-buffer.cc page,
page 456 src/store-buffer.cc Address page = Page::FromAddress(addr)->area_start();
page 457 src/store-buffer.cc return page + (((addr - page) + (Map::kSize - 1)) / Map::kSize * Map::kSize);
page 463 src/store-buffer.cc Address page = Page::FromAllocationTop(addr)->area_start();
page 464 src/store-buffer.cc return page + ((addr - page) / Map::kSize * Map::kSize);
page 526 src/store-buffer.cc Address visitable_start = page->area_start();
page 527 src/store-buffer.cc Address end_of_page = page->area_end();
page 646 src/store-buffer.cc Page* page = reinterpret_cast<Page*>(chunk);
page 647 src/store-buffer.cc PagedSpace* owner = reinterpret_cast<PagedSpace*>(page->owner());
page 650 src/store-buffer.cc page,
page 176 src/store-buffer.h Page* page,
page 192 src/store-buffer.h Page* page,