size 73 include/v8-preparser.h PreParserData(size_t size, const uint8_t* data)
size 74 include/v8-preparser.h : data_(data), size_(size) { }
size 84 include/v8-preparser.h size_t size() const { return size_; }
size 570 include/v8-profiler.h HeapStatsUpdate(uint32_t index, uint32_t count, uint32_t size)
size 571 include/v8-profiler.h : index(index), count(count), size(size) { }
size 574 include/v8-profiler.h uint32_t size; // New value of size field for the interval with this index.
size 469 include/v8.h void* operator new(size_t size);
size 2756 include/v8.h void set_total_heap_size(size_t size) { total_heap_size_ = size; }
size 2757 include/v8.h void set_total_heap_size_executable(size_t size) {
size 2758 include/v8.h total_heap_size_executable_ = size;
size 2760 include/v8.h void set_used_heap_size(size_t size) { used_heap_size_ = size; }
size 2761 include/v8.h void set_heap_size_limit(size_t size) { heap_size_limit_ = size; }
size 2863 include/v8.h void* operator new(size_t size);
size 3846 include/v8.h virtual WriteResult WriteAsciiChunk(char* data, int size) = 0;
size 205 preparser/preparser-process.cc uint32_t size = static_cast<uint32_t>(data->size());
size 206 preparser/preparser-process.cc fprintf(stderr, "LOG: data size: %u\n", size);
size 207 preparser/preparser-process.cc if (!WriteBuffer(stdout, data->data(), size)) {
size 235 preparser/preparser-process.cc PreparseDataInterpreter reader(data->data(), static_cast<int>(data->size()));
size 336 samples/lineprocessor.cc int size = ftell(file);
size 339 samples/lineprocessor.cc char* chars = new char[size + 1];
size 340 samples/lineprocessor.cc chars[size] = '\0';
size 341 samples/lineprocessor.cc for (int i = 0; i < size;) {
size 342 samples/lineprocessor.cc int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
size 346 samples/lineprocessor.cc v8::Handle<v8::String> result = v8::String::New(chars, size);
size 554 samples/process.cc int size = ftell(file);
size 557 samples/process.cc char* chars = new char[size + 1];
size 558 samples/process.cc chars[size] = '\0';
size 559 samples/process.cc for (int i = 0; i < size;) {
size 560 samples/process.cc int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
size 564 samples/process.cc Handle<String> result = String::New(chars, size);
size 202 samples/shell.cc int size = ftell(file);
size 205 samples/shell.cc char* chars = new char[size + 1];
size 206 samples/shell.cc chars[size] = '\0';
size 207 samples/shell.cc for (int i = 0; i < size;) {
size 208 samples/shell.cc int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
size 212 samples/shell.cc v8::Handle<v8::String> result = v8::String::New(chars, size);
size 37 src/allocation-inl.h void* PreallocatedStorageAllocationPolicy::New(size_t size) {
size 38 src/allocation-inl.h return Isolate::Current()->PreallocatedStorageNew(size);
size 39 src/allocation.cc void* result = malloc(size);
size 44 src/allocation.h void* operator new(size_t size) { return New(size); }
size 48 src/allocation.h static void* New(size_t size);
size 63 src/allocation.h void* operator new(size_t size);
size 76 src/allocation.h void* operator new(size_t size);
size 83 src/allocation.h T* NewArray(size_t size) {
size 84 src/allocation.h T* result = new T[size];
size 107 src/allocation.h INLINE(void* New(size_t size)) { return Malloced::New(size); }
size 117 src/allocation.h explicit PreallocatedStorage(size_t size);
size 118 src/allocation.h size_t size() { return size_; }
size 135 src/allocation.h INLINE(void* New(size_t size));
size 399 src/api.cc compressed_data[kSnapshot].compressed_size = i::Snapshot::size();
size 835 src/api.cc i::Handle<i::FixedArray> elements = isolate->factory()->NewFixedArray(size);
size 869 src/api.cc int size = obj_.size();
size 870 src/api.cc if (length == size - 1) {
size 871 src/api.cc i::Handle<i::FixedArray> new_elms = FACTORY->NewFixedArray(2 * size);
size 4839 src/api.cc int size = obj->Size(); // Byte size of the original string.
size 4840 src/api.cc if (size < i::ExternalString::kShortSize) return false;
size 59 src/api.h explicit NeanderObject(int size);
size 65 src/api.h int size();
size 2442 src/arm/assembler-arm.cc RecordRelocInfo(RelocInfo::CONST_POOL, static_cast<intptr_t>(size));
size 2616 src/arm/assembler-arm.cc int size = jump_instr + kInstrSize + num_pending_reloc_info_ * kPointerSize;
size 2617 src/arm/assembler-arm.cc int needed_space = size + kGap;
size 2624 src/arm/assembler-arm.cc RecordConstPool(size);
size 1239 src/arm/assembler-arm.h void RecordConstPool(int size);
size 121 src/arm/builtins-arm.cc int size = JSArray::kSize;
size 123 src/arm/builtins-arm.cc size += FixedArray::SizeFor(initial_capacity);
size 125 src/arm/builtins-arm.cc __ AllocateInNewSpace(size,
size 350 src/arm/code-stubs-arm.cc int size = JSArray::kSize + elements_size;
size 354 src/arm/code-stubs-arm.cc __ AllocateInNewSpace(size,
size 479 src/arm/code-stubs-arm.cc int size = JSObject::kHeaderSize + length_ * kPointerSize;
size 482 src/arm/code-stubs-arm.cc __ cmp(r0, Operand(size >> kPointerSizeLog2));
size 487 src/arm/code-stubs-arm.cc __ AllocateInNewSpace(size, r0, r1, r2, &slow_case, TAG_OBJECT);
size 488 src/arm/code-stubs-arm.cc for (int i = 0; i < size; i += kPointerSize) {
size 56 src/arm/cpu-arm.cc if (size == 0) {
size 66 src/arm/cpu-arm.cc Simulator::FlushICache(Isolate::Current()->simulator_i_cache(), start, size);
size 76 src/arm/cpu-arm.cc reinterpret_cast<uint32_t>(start) + size;
size 1545 src/arm/full-codegen-arm.cc int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
size 1547 src/arm/full-codegen-arm.cc __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
size 1552 src/arm/full-codegen-arm.cc __ mov(r0, Operand(Smi::FromInt(size)));
size 1562 src/arm/full-codegen-arm.cc __ CopyFields(r0, r5, r2.bit(), size / kPointerSize);
size 4916 src/arm/lithium-codegen-arm.cc int size = instr->hydrogen()->total_size();
size 4939 src/arm/lithium-codegen-arm.cc __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
size 4943 src/arm/lithium-codegen-arm.cc __ mov(r0, Operand(Smi::FromInt(size)));
size 4951 src/arm/lithium-codegen-arm.cc ASSERT_EQ(size, offset);
size 5016 src/arm/lithium-codegen-arm.cc int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
size 5019 src/arm/lithium-codegen-arm.cc __ AllocateInNewSpace(size, r0, r2, r3, &runtime_allocate, TAG_OBJECT);
size 5023 src/arm/lithium-codegen-arm.cc __ mov(r0, Operand(Smi::FromInt(size)));
size 5031 src/arm/lithium-codegen-arm.cc for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
size 5037 src/arm/lithium-codegen-arm.cc if ((size % (2 * kPointerSize)) != 0) {
size 5038 src/arm/lithium-codegen-arm.cc __ ldr(r3, FieldMemOperand(r1, size - kPointerSize));
size 5039 src/arm/lithium-codegen-arm.cc __ str(r3, FieldMemOperand(r0, size - kPointerSize));
size 137 src/arm/macro-assembler-arm.cc int size = 2 * kInstrSize;
size 141 src/arm/macro-assembler-arm.cc size += kInstrSize;
size 143 src/arm/macro-assembler-arm.cc return size;
size 105 src/arm/macro-assembler-arm.h MacroAssembler(Isolate* isolate, void* buffer, int size);
size 644 src/arm/simulator-arm.cc intptr_t end_page = ((start + size) & ~CachePage::kPageMask);
size 661 src/arm/simulator-arm.cc size += intra_line;
size 662 src/arm/simulator-arm.cc size = ((size - 1) | CachePage::kLineMask) + 1;
size 664 src/arm/simulator-arm.cc while (!AllOnOnePage(start, size - 1)) {
size 668 src/arm/simulator-arm.cc size -= bytes_to_flush;
size 672 src/arm/simulator-arm.cc if (size != 0) {
size 673 src/arm/simulator-arm.cc FlushOnePage(i_cache, start, size);
size 694 src/arm/simulator-arm.cc ASSERT(size <= CachePage::kPageSize);
size 695 src/arm/simulator-arm.cc ASSERT(AllOnOnePage(start, size - 1));
size 697 src/arm/simulator-arm.cc ASSERT((size & CachePage::kLineMask) == 0);
size 702 src/arm/simulator-arm.cc memset(valid_bytemap, CachePage::LINE_INVALID, size >> CachePage::kLineShift);
size 203 src/arm/simulator-arm.h size_t size);
size 320 src/arm/simulator-arm.h int size);
size 4489 src/arm/stub-cache-arm.cc int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
size 4490 src/arm/stub-cache-arm.cc __ AllocateInNewSpace(size, elements_reg, scratch, scratch2, &slow,
size 4639 src/arm/stub-cache-arm.cc int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
size 4640 src/arm/stub-cache-arm.cc __ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow,
size 205 src/ast.h void* operator new(size_t size, Zone* zone) {
size 206 src/ast.h return zone->New(static_cast<int>(size));
size 245 src/ast.h void* operator new(size_t size);
size 110 src/bignum.h void EnsureCapacity(int size) {
size 111 src/bignum.h if (size > kBigitCapacity) {
size 386 src/bootstrapper.cc int size = (prototypeMode == DONT_ADD_PROTOTYPE) ? 4 : 5;
size 387 src/bootstrapper.cc Handle<DescriptorArray> descriptors(factory()->NewDescriptorArray(size));
size 530 src/bootstrapper.cc int size = (prototypeMode == DONT_ADD_PROTOTYPE) ? 4 : 5;
size 531 src/bootstrapper.cc Handle<DescriptorArray> descriptors(factory()->NewDescriptorArray(size));
size 1852 src/bootstrapper.cc int array_size = JSFunctionResultCache::kEntriesIndex + 2 * size;
size 1874 src/bootstrapper.cc FixedArray* cache = CreateCache((size), Handle<JSFunction>(func)); \
size 70 src/compilation-cache.cc CompilationCacheTable::Allocate(size),
size 42 src/cpu-profiler-inl.h code_map->AddCode(start, entry, size);
size 70 src/cpu-profiler.cc rec->size = 1;
size 90 src/cpu-profiler.cc rec->size = size;
size 107 src/cpu-profiler.cc rec->size = size;
size 124 src/cpu-profiler.cc rec->size = size;
size 167 src/cpu-profiler.cc rec->size = size;
size 72 src/cpu-profiler.h unsigned size;
size 142 src/cpu-profiler.h Address start, unsigned size,
size 146 src/cpu-profiler.h Address start, unsigned size);
size 149 src/cpu-profiler.h Address start, unsigned size);
size 155 src/cpu-profiler.h Address start, unsigned size);
size 61 src/cpu.h static void FlushICache(void* start, size_t size);
size 1358 src/d8.cc int size = ftell(file);
size 1361 src/d8.cc char* chars = new char[size + 1];
size 1362 src/d8.cc chars[size] = '\0';
size 1363 src/d8.cc for (int i = 0; i < size;) {
size 1364 src/d8.cc int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
size 1368 src/d8.cc *size_out = size;
size 1425 src/d8.cc int size = 0;
size 1426 src/d8.cc char* chars = ReadChars(name, &size);
size 1559 src/d8.cc int size;
size 1560 src/d8.cc char* chars = ReadChars(name, &size);
size 1562 src/d8.cc Handle<String> result = String::New(chars, size);
size 1756 src/d8.cc int size = 0;
size 1757 src/d8.cc files = ReadChars(options.parallel_files[i], &size);
size 392 src/d8.h int32_t size);
size 3539 src/debug.cc messages_ = NewArray<CommandMessage>(size);
size 718 src/debug.h explicit CommandMessageQueue(int size);
size 744 src/debug.h LockingCommandMessageQueue(Logger* logger, int size);
size 431 src/deoptimizer.cc unsigned size = ComputeInputFrameSize();
size 432 src/deoptimizer.cc input_ = new(size) FrameDescription(size, function);
size 1294 src/deoptimizer.cc unsigned size = GetFrameSize() - ComputeFixedSize();
size 1295 src/deoptimizer.cc return size / kPointerSize;
size 355 src/deoptimizer.h void* operator new(size_t size, uint32_t frame_size) {
size 358 src/deoptimizer.h return malloc(size + frame_size - kPointerSize);
size 45 src/factory.cc ASSERT(0 <= size);
size 48 src/factory.cc isolate()->heap()->AllocateFixedArray(size, pretenure),
size 55 src/factory.cc ASSERT(0 <= size);
size 58 src/factory.cc isolate()->heap()->AllocateFixedArrayWithHoles(size, pretenure),
size 65 src/factory.cc ASSERT(0 <= size);
size 68 src/factory.cc isolate()->heap()->AllocateUninitializedFixedDoubleArray(size, pretenure),
size 44 src/factory.h int size,
size 49 src/factory.h int size,
size 54 src/factory.h int size,
size 203 src/gdb-jit.cc header->size = end - start;
size 220 src/gdb-jit.cc uint32_t size;
size 223 src/gdb-jit.cc uint64_t size;
size 263 src/gdb-jit.cc header->size = 0;
size 293 src/gdb-jit.cc uintptr_t size;
size 346 src/gdb-jit.cc header->size = end - start;
size 362 src/gdb-jit.cc header->size = 0;
size 396 src/gdb-jit.cc header->size = size_;
size 427 src/gdb-jit.cc header->size = size_;
size 468 src/gdb-jit.cc header->size = size_;
size 776 src/gdb-jit.cc size(size),
size 789 src/gdb-jit.cc uintptr_t size,
size 795 src/gdb-jit.cc size(size),
size 803 src/gdb-jit.cc uintptr_t size;
size 812 src/gdb-jit.cc uintptr_t size,
size 821 src/gdb-jit.cc size(size) {
size 829 src/gdb-jit.cc uintptr_t size;
size 837 src/gdb-jit.cc s->size = size;
size 846 src/gdb-jit.cc uintptr_t size;
size 869 src/gdb-jit.cc header->size = w->position() - header->offset;
size 1079 src/gdb-jit.cc Writer::Slot<uint32_t> size = w->CreateSlotHere<uint32_t>();
size 1207 src/gdb-jit.cc size.set(static_cast<uint32_t>(w->position() - start));
size 71 src/global-handles.h void* operator new(size_t size);
size 104 src/global-handles.h void* operator new(size_t size);
size 147 src/handles.h void* operator new(size_t size);
size 43 src/heap-inl.h void PromotionQueue::insert(HeapObject* target, int size) {
size 45 src/heap-inl.h emergency_stack_->Add(Entry(target, size));
size 63 src/heap-inl.h emergency_stack_->Add(Entry(target, size));
size 69 src/heap-inl.h *(--rear_) = size;
size 114 src/heap-inl.h int size = SeqAsciiString::SizeFor(str.length());
size 118 src/heap-inl.h { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
size 119 src/heap-inl.h ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
size 120 src/heap-inl.h : old_data_space_->AllocateRaw(size);
size 131 src/heap-inl.h ASSERT_EQ(size, answer->Size());
size 148 src/heap-inl.h int size = SeqTwoByteString::SizeFor(str.length());
size 152 src/heap-inl.h { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
size 153 src/heap-inl.h ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
size 154 src/heap-inl.h : old_data_space_->AllocateRaw(size);
size 164 src/heap-inl.h ASSERT_EQ(size, answer->Size());
size 174 src/heap-profiler.cc size_t size = profiler->snapshots_->GetUsedMemorySize();
size 175 src/heap-profiler.cc return size;
size 1172 src/heap.cc int size = static_cast<int>(*(head_start++));
size 1174 src/heap.cc emergency_stack_->Add(Entry(obj, size));
size 1574 src/heap.cc int size;
size 1575 src/heap.cc promotion_queue()->remove(&target, &size);
size 1583 src/heap.cc target->address() + size,
size 1610 src/heap.cc heap->CreateFillerObjectAt(object->address() + size - kPointerSize,
size 1714 src/heap.cc heap->CopyBlock(target->address(), source->address(), size);
size 1735 src/heap.cc MemoryChunk::IncrementLiveBytesFromGC(target->address(), size);
size 2140 src/heap.cc {type, size, k##camel_name##MapRootIndex},
size 2253 src/heap.cc { MaybeObject* maybe_obj = AllocateMap(entry.type, entry.size);
size 2378 src/heap.cc { MaybeObject* maybe_obj = AllocateMap(entry.type, entry.size);
size 3452 src/heap.cc int size = ByteArray::SizeFor(length);
size 3454 src/heap.cc { MaybeObject* maybe_result = (size <= Page::kMaxNonCodeHeapObjectSize)
size 3455 src/heap.cc ? old_data_space_->AllocateRaw(size)
size 3456 src/heap.cc : lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
size 3471 src/heap.cc int size = ByteArray::SizeFor(length);
size 3473 src/heap.cc (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : NEW_SPACE;
size 3475 src/heap.cc { MaybeObject* maybe_result = AllocateRaw(size, space, OLD_DATA_SPACE);
size 3487 src/heap.cc if (size == 0) return;
size 3489 src/heap.cc if (size == kPointerSize) {
size 3491 src/heap.cc } else if (size == 2 * kPointerSize) {
size 3495 src/heap.cc FreeSpace::cast(filler)->set_size(size);
size 4262 src/heap.cc MaybeObject* maybe = AllocateMap(type, size);
size 4463 src/heap.cc int size;
size 4471 src/heap.cc size = SeqAsciiString::SizeFor(chars);
size 4477 src/heap.cc size = SeqTwoByteString::SizeFor(chars);
size 4482 src/heap.cc { MaybeObject* maybe_result = (size > Page::kMaxNonCodeHeapObjectSize)
size 4483 src/heap.cc ? lo_space_->AllocateRaw(size, NOT_EXECUTABLE)
size 4484 src/heap.cc : old_data_space_->AllocateRaw(size);
size 4494 src/heap.cc ASSERT_EQ(size, answer->Size());
size 4516 src/heap.cc int size = SeqAsciiString::SizeFor(length);
size 4517 src/heap.cc ASSERT(size <= SeqAsciiString::kMaxSize);
size 4523 src/heap.cc if (size > kMaxObjectSizeInNewSpace) {
size 4526 src/heap.cc } else if (size > Page::kMaxNonCodeHeapObjectSize) {
size 4531 src/heap.cc size > Page::kMaxNonCodeHeapObjectSize) {
size 4535 src/heap.cc { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
size 4543 src/heap.cc ASSERT_EQ(size, HeapObject::cast(result)->Size());
size 4563 src/heap.cc int size = SeqTwoByteString::SizeFor(length);
size 4564 src/heap.cc ASSERT(size <= SeqTwoByteString::kMaxSize);
size 4569 src/heap.cc if (size > kMaxObjectSizeInNewSpace) {
size 4572 src/heap.cc } else if (size > Page::kMaxNonCodeHeapObjectSize) {
size 4577 src/heap.cc size > Page::kMaxNonCodeHeapObjectSize) {
size 4581 src/heap.cc { MaybeObject* maybe_result = AllocateRaw(size, space, retry_space);
size 4589 src/heap.cc ASSERT_EQ(size, HeapObject::cast(result)->Size());
size 4614 src/heap.cc int size = FixedArray::SizeFor(0);
size 4617 src/heap.cc AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
size 4636 src/heap.cc int size = FixedArray::SizeFor(length);
size 4637 src/heap.cc return size <= kMaxObjectSizeInNewSpace
size 4638 src/heap.cc ? new_space_.AllocateRaw(size)
size 4639 src/heap.cc : lo_space_->AllocateRaw(size, NOT_EXECUTABLE);
size 4711 src/heap.cc int size = FixedArray::SizeFor(length);
size 4712 src/heap.cc if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
size 4716 src/heap.cc size > Page::kMaxNonCodeHeapObjectSize) {
size 4722 src/heap.cc (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_POINTER_SPACE : LO_SPACE;
size 4724 src/heap.cc return AllocateRaw(size, space, retry_space);
size 4784 src/heap.cc int size = FixedDoubleArray::SizeFor(0);
size 4787 src/heap.cc AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
size 4844 src/heap.cc int size = FixedDoubleArray::SizeFor(length);
size 4847 src/heap.cc size += kPointerSize;
size 4850 src/heap.cc if (space == NEW_SPACE && size > kMaxObjectSizeInNewSpace) {
size 4854 src/heap.cc size > Page::kMaxNonCodeHeapObjectSize) {
size 4860 src/heap.cc (size <= Page::kMaxNonCodeHeapObjectSize) ? OLD_DATA_SPACE : LO_SPACE;
size 4863 src/heap.cc { MaybeObject* maybe_object = AllocateRaw(size, space, retry_space);
size 4867 src/heap.cc return EnsureDoubleAligned(this, object, size);
size 5004 src/heap.cc int size = map->instance_size();
size 5006 src/heap.cc (size > Page::kMaxNonCodeHeapObjectSize) ? LO_SPACE : OLD_POINTER_SPACE;
size 5011 src/heap.cc Struct::cast(result)->InitializeBody(size);
size 7140 src/heap.cc Address chunk_end = chunk->address() + chunk->size();
size 334 src/heap.h inline void insert(HeapObject* target, int size);
size 336 src/heap.h void remove(HeapObject** target, int* size) {
size 341 src/heap.h *size = e.size_;
size 353 src/heap.h *size = static_cast<int>(*(--front_));
size 370 src/heap.h Entry(HeapObject* obj, int size) : obj_(obj), size_(size) { }
size 596 src/heap.h int size);
size 976 src/heap.h void CreateFillerObjectAt(Address addr, int size);
size 1495 src/heap.h void IncreaseTotalRegexpCodeGenerated(int size) {
size 1496 src/heap.h total_regexp_code_generated_ += size;
size 1607 src/heap.h void RecordObjectStats(InstanceType type, int sub_type, size_t size) {
size 1611 src/heap.h object_sizes_[type] += size;
size 1616 src/heap.h object_sizes_[FIRST_CODE_KIND_SUB_TYPE + sub_type] += size;
size 1753 src/heap.h int size;
size 1764 src/heap.h int size;
size 9496 src/hydrogen.cc unsigned size = sizes_[i];
size 9497 src/hydrogen.cc double size_percent = static_cast<double>(size) * 100 / total_size_;
size 9498 src/hydrogen.cc PrintF(" %8u bytes / %4.1f %%\n", size, size_percent);
size 9523 src/hydrogen.cc total_size_ += size;
size 9527 src/hydrogen.cc sizes_[i] += size;
size 9533 src/hydrogen.cc sizes_.Add(size);
size 9561 src/hydrogen.cc unsigned size = Zone::allocation_size_ - start_allocation_size_;
size 9562 src/hydrogen.cc HStatistics::Instance()->SaveTiming(name_, end - start_, size);
size 874 src/hydrogen.h void* operator new(size_t size, Zone* zone) {
size 875 src/hydrogen.h return zone->New(static_cast<int>(size));
size 1311 src/hydrogen.h void SaveTiming(const char* name, int64_t ticks, unsigned size);
size 77 src/ia32/assembler-ia32.cc ASSERT(memory->size() >= static_cast<size_t>(kBufferSize));
size 566 src/ia32/assembler-ia32.cc emit_w(size);
size 690 src/ia32/assembler-ia32.h void enter(const Immediate& size);
size 907 src/ia32/builtins-ia32.cc int size = JSArray::kSize;
size 909 src/ia32/builtins-ia32.cc size += FixedArray::SizeFor(initial_capacity);
size 911 src/ia32/builtins-ia32.cc __ AllocateInNewSpace(size,
size 327 src/ia32/code-stubs-ia32.cc int size = JSArray::kSize + elements_size;
size 331 src/ia32/code-stubs-ia32.cc __ AllocateInNewSpace(size, eax, ebx, edx, fail, TAG_OBJECT);
size 470 src/ia32/code-stubs-ia32.cc int size = JSObject::kHeaderSize + length_ * kPointerSize;
size 473 src/ia32/code-stubs-ia32.cc __ cmp(eax, Immediate(size >> kPointerSizeLog2));
size 478 src/ia32/code-stubs-ia32.cc __ AllocateInNewSpace(size, eax, ebx, edx, &slow_case, TAG_OBJECT);
size 479 src/ia32/code-stubs-ia32.cc for (int i = 0; i < size; i += kPointerSize) {
size 139 src/ia32/codegen-ia32.cc memcpy(dest, src, size);
size 70 src/ia32/cpu-ia32.cc unsigned res = VALGRIND_DISCARD_TRANSLATIONS(start, size);
size 1489 src/ia32/full-codegen-ia32.cc int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
size 1491 src/ia32/full-codegen-ia32.cc __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
size 1496 src/ia32/full-codegen-ia32.cc __ push(Immediate(Smi::FromInt(size)));
size 1503 src/ia32/full-codegen-ia32.cc for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
size 1509 src/ia32/full-codegen-ia32.cc if ((size % (2 * kPointerSize)) != 0) {
size 1510 src/ia32/full-codegen-ia32.cc __ mov(edx, FieldOperand(ebx, size - kPointerSize));
size 1511 src/ia32/full-codegen-ia32.cc __ mov(FieldOperand(eax, size - kPointerSize), edx);
size 4853 src/ia32/lithium-codegen-ia32.cc int size = instr->hydrogen()->total_size();
size 4876 src/ia32/lithium-codegen-ia32.cc __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
size 4880 src/ia32/lithium-codegen-ia32.cc __ push(Immediate(Smi::FromInt(size)));
size 4887 src/ia32/lithium-codegen-ia32.cc ASSERT_EQ(size, offset);
size 4955 src/ia32/lithium-codegen-ia32.cc int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
size 4957 src/ia32/lithium-codegen-ia32.cc __ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
size 4962 src/ia32/lithium-codegen-ia32.cc __ push(Immediate(Smi::FromInt(size)));
size 4969 src/ia32/lithium-codegen-ia32.cc for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
size 4975 src/ia32/lithium-codegen-ia32.cc if ((size % (2 * kPointerSize)) != 0) {
size 4976 src/ia32/lithium-codegen-ia32.cc __ mov(edx, FieldOperand(ebx, size - kPointerSize));
size 4977 src/ia32/lithium-codegen-ia32.cc __ mov(FieldOperand(eax, size - kPointerSize), edx);
size 68 src/ia32/macro-assembler-ia32.h MacroAssembler(Isolate* isolate, void* buffer, int size);
size 927 src/ia32/macro-assembler-ia32.h CodePatcher(byte* address, int size);
size 4062 src/ia32/stub-cache-ia32.cc int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
size 4063 src/ia32/stub-cache-ia32.cc __ AllocateInNewSpace(size, edi, ebx, ecx, &prepare_slow, TAG_OBJECT);
size 4198 src/ia32/stub-cache-ia32.cc int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
size 4199 src/ia32/stub-cache-ia32.cc __ AllocateInNewSpace(size, edi, ebx, ecx, &prepare_slow, TAG_OBJECT);
size 302 src/incremental-marking.cc chunk->size() > static_cast<size_t>(Page::kPageSize) &&
size 460 src/incremental-marking.cc marking_deque_memory_->size(),
size 471 src/incremental-marking.cc marking_deque_memory_->size());
size 534 src/incremental-marking.cc size_t size = marking_deque_memory_->size();
size 535 src/incremental-marking.cc if (FLAG_force_marking_deque_overflows) size = 64 * kPointerSize;
size 536 src/incremental-marking.cc marking_deque_.Initialize(addr, addr + size);
size 827 src/incremental-marking.cc int size = obj->SizeFromMap(map);
size 828 src/incremental-marking.cc bytes_to_process -= size;
size 874 src/incremental-marking.cc obj->IterateBody(map->instance_type(), size, &marking_visitor);
size 881 src/incremental-marking.cc MemoryChunk::IncrementLiveBytesFromGC(obj->address(), size);
size 249 src/isolate.cc reinterpret_cast<PreallocatedStorage*>(new char[size]);
size 252 src/isolate.cc free_chunk->size_ = size - sizeof(PreallocatedStorage);
size 259 src/isolate.cc return FreeStoreAllocationPolicy().New(size);
size 264 src/isolate.cc size = (size + kPointerSize - 1) & ~(kPointerSize - 1);
size 269 src/isolate.cc if (storage->size_ == size) {
size 279 src/isolate.cc if (storage->size_ >= size + sizeof(PreallocatedStorage)) {
size 284 src/isolate.cc reinterpret_cast<char*>(storage + 1) + size);
size 285 src/isolate.cc left_over->size_ = storage->size_ - size - sizeof(PreallocatedStorage);
size 286 src/isolate.cc ASSERT(size + left_over->size_ + sizeof(PreallocatedStorage) ==
size 288 src/isolate.cc storage->size_ = size;
size 938 src/isolate.h void* PreallocatedStorageNew(size_t size);
size 940 src/isolate.h void PreallocatedStorageInit(size_t size);
size 532 src/jsregexp.cc int size = Max(registers_per_match, OffsetsVector::kStaticOffsetsVectorSize);
size 533 src/jsregexp.cc *max_matches = size / registers_per_match;
size 534 src/jsregexp.cc return size;
size 69 src/list.h INLINE(void* operator new(size_t size,
size 71 src/list.h return allocator.New(static_cast<int>(size));
size 584 src/liveobjectlist.cc int size = 0;
size 591 src/liveobjectlist.cc size = obj->Size();
size 612 src/liveobjectlist.cc Smi::FromInt(size),
size 646 src/liveobjectlist.cc *size = 0;
size 655 src/liveobjectlist.cc *size += heap_obj->Size();
size 737 src/liveobjectlist.cc *size = -1;
size 795 src/liveobjectlist.cc int size = heap_obj->Size();
size 799 src/liveobjectlist.cc sizes_[type] += size;
size 801 src/liveobjectlist.cc total_size_ += size;
size 945 src/liveobjectlist.cc int size = 0;
size 954 src/liveobjectlist.cc size += heap_obj->Size();
size 962 src/liveobjectlist.cc *size_p = size;
size 1106 src/liveobjectlist.cc int size = 0;
size 1130 src/liveobjectlist.cc size += heap_obj->Size();
size 1171 src/liveobjectlist.cc Smi::FromInt(size),
size 1278 src/liveobjectlist.cc int size = -1;
size 1279 src/liveobjectlist.cc writer->ComputeTotalCountAndSize(filter, &count, &size);
size 1319 src/liveobjectlist.cc if (size >= 0) {
size 1322 src/liveobjectlist.cc Smi::FromInt(size),
size 1441 src/liveobjectlist.cc int size = summary.Size(type);
size 1454 src/liveobjectlist.cc Smi::FromInt(size),
size 1566 src/liveobjectlist.cc int size;
size 1567 src/liveobjectlist.cc count = lol->GetTotalObjCountAndSize(&size);
size 1584 src/liveobjectlist.cc Smi::FromInt(size),
size 483 src/log.cc size = Min(size, kUtf8BufferSize - utf8_pos_);
size 484 src/log.cc memcpy(utf8_buffer_ + utf8_pos_, bytes, size);
size 485 src/log.cc utf8_pos_ += size;
size 499 src/log.cc int size = OS::SNPrintF(buffer, "%d", n);
size 500 src/log.cc if (size > 0 && utf8_pos_ + size <= kUtf8BufferSize) {
size 501 src/log.cc utf8_pos_ += size;
size 804 src/log.cc static_cast<unsigned int>(size));
size 818 src/log.cc LOGGER->NewEvent(name, object, size);
size 875 src/log.cc LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
size 878 src/log.cc RegisterSnapshotCodeName(code, name_buffer_->get(), name_buffer_->size());
size 910 src/log.cc LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
size 913 src/log.cc RegisterSnapshotCodeName(code, name_buffer_->get(), name_buffer_->size());
size 952 src/log.cc LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
size 955 src/log.cc RegisterSnapshotCodeName(code, name_buffer_->get(), name_buffer_->size());
size 997 src/log.cc LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
size 1000 src/log.cc RegisterSnapshotCodeName(code, name_buffer_->get(), name_buffer_->size());
size 1033 src/log.cc LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
size 1036 src/log.cc RegisterSnapshotCodeName(code, name_buffer_->get(), name_buffer_->size());
size 1066 src/log.cc LowLevelCodeCreateEvent(code, name_buffer_->get(), name_buffer_->size());
size 1069 src/log.cc RegisterSnapshotCodeName(code, name_buffer_->get(), name_buffer_->size());
size 1520 src/log.cc size_t rv = fwrite(bytes, 1, size, log_->ll_output_handle_);
size 1521 src/log.cc ASSERT(static_cast<size_t>(size) == rv);
size 178 src/log.h void NewEvent(const char* name, void* object, size_t size);
size 183 src/log.h static void NewEventStatic(const char* name, void* object, size_t size);
size 350 src/log.h void LowLevelLogWriteBytes(const char* bytes, int size);
size 2698 src/mark-compact.cc ASSERT(IsAligned(size, kPointerSize));
size 2700 src/mark-compact.cc for (int remaining = size / kPointerSize; remaining > 0; remaining--) {
size 2732 src/mark-compact.cc heap()->MoveBlock(dst, src, size);
size 2741 src/mark-compact.cc heap()->MoveBlock(dst, src, size);
size 2919 src/mark-compact.cc int size = object->Size();
size 2920 src/mark-compact.cc survivors_size += size;
size 2923 src/mark-compact.cc if (TryPromoteObject(object, size)) {
size 2928 src/mark-compact.cc MaybeObject* allocation = new_space->AllocateRaw(size);
size 2936 src/mark-compact.cc allocation = new_space->AllocateRaw(size);
size 2943 src/mark-compact.cc size,
size 2993 src/mark-compact.cc int size = object->Size();
size 2995 src/mark-compact.cc MaybeObject* target = space->AllocateRaw(size);
size 3006 src/mark-compact.cc size,
size 3168 src/mark-compact.cc int size = live_object->SizeFromMap(map);
size 3170 src/mark-compact.cc live_object->IterateBody(map->instance_type(), size, v);
size 3176 src/mark-compact.cc SkipList::RegionNumber(free_end + size - kPointerSize);
size 3179 src/mark-compact.cc skip_list->AddObject(free_end, size);
size 3183 src/mark-compact.cc free_start = free_end + size;
size 3806 src/mark-compact.cc size_t size = block_address - p->area_start();
size 3809 src/mark-compact.cc static_cast<int>(size)));
size 3817 src/mark-compact.cc size = free_end - p->area_start();
size 3819 src/mark-compact.cc static_cast<int>(size));
size 562 src/mark-compact.h int size,
size 1607 src/mips/assembler-mips.cc GenInstrRegister(SPECIAL3, rs, rt, pos + size - 1, pos, INS);
size 1615 src/mips/assembler-mips.cc GenInstrRegister(SPECIAL3, rs, rt, size - 1, pos, EXT);
size 788 src/mips/assembler-mips.h void ins_(Register rt, Register rs, uint16_t pos, uint16_t size);
size 789 src/mips/assembler-mips.h void ext_(Register rt, Register rs, uint16_t pos, uint16_t size);
size 125 src/mips/builtins-mips.cc int size = JSArray::kSize;
size 127 src/mips/builtins-mips.cc size += FixedArray::SizeFor(initial_capacity);
size 129 src/mips/builtins-mips.cc __ AllocateInNewSpace(size,
size 348 src/mips/code-stubs-mips.cc int size = JSArray::kSize + elements_size;
size 352 src/mips/code-stubs-mips.cc __ AllocateInNewSpace(size,
size 478 src/mips/code-stubs-mips.cc int size = JSObject::kHeaderSize + length_ * kPointerSize;
size 481 src/mips/code-stubs-mips.cc __ Branch(&slow_case, ne, a0, Operand(size >> kPointerSizeLog2));
size 485 src/mips/code-stubs-mips.cc __ AllocateInNewSpace(size, v0, a1, a2, &slow_case, TAG_OBJECT);
size 486 src/mips/code-stubs-mips.cc for (int i = 0; i < size; i += kPointerSize) {
size 62 src/mips/cpu-mips.cc if (size == 0) {
size 69 src/mips/cpu-mips.cc char *end = reinterpret_cast<char *>(start) + size;
size 75 src/mips/cpu-mips.cc res = syscall(__NR_cacheflush, start, size, ICACHE);
size 86 src/mips/cpu-mips.cc Simulator::FlushICache(Isolate::Current()->simulator_i_cache(), start, size);
size 1549 src/mips/full-codegen-mips.cc int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
size 1551 src/mips/full-codegen-mips.cc __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
size 1556 src/mips/full-codegen-mips.cc __ li(a0, Operand(Smi::FromInt(size)));
size 1567 src/mips/full-codegen-mips.cc __ CopyFields(v0, t1, a2.bit(), size / kPointerSize);
size 4685 src/mips/lithium-codegen-mips.cc int size = instr->hydrogen()->total_size();
size 4708 src/mips/lithium-codegen-mips.cc __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
size 4712 src/mips/lithium-codegen-mips.cc __ li(a0, Operand(Smi::FromInt(size)));
size 4720 src/mips/lithium-codegen-mips.cc ASSERT_EQ(size, offset);
size 4786 src/mips/lithium-codegen-mips.cc int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
size 4789 src/mips/lithium-codegen-mips.cc __ AllocateInNewSpace(size, v0, a2, a3, &runtime_allocate, TAG_OBJECT);
size 4793 src/mips/lithium-codegen-mips.cc __ li(a0, Operand(Smi::FromInt(size)));
size 4801 src/mips/lithium-codegen-mips.cc for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
size 4807 src/mips/lithium-codegen-mips.cc if ((size % (2 * kPointerSize)) != 0) {
size 4808 src/mips/lithium-codegen-mips.cc __ lw(a3, FieldMemOperand(a1, size - kPointerSize));
size 4809 src/mips/lithium-codegen-mips.cc __ sw(a3, FieldMemOperand(v0, size - kPointerSize));
size 932 src/mips/macro-assembler-mips.cc ASSERT(pos + size < 33);
size 935 src/mips/macro-assembler-mips.cc ext_(rt, rs, pos, size);
size 939 src/mips/macro-assembler-mips.cc int shift_left = 32 - (pos + size);
size 942 src/mips/macro-assembler-mips.cc int shift_right = 32 - size;
size 955 src/mips/macro-assembler-mips.cc ASSERT(pos + size <= 32);
size 956 src/mips/macro-assembler-mips.cc ASSERT(size != 0);
size 959 src/mips/macro-assembler-mips.cc ins_(rt, rs, pos, size);
size 963 src/mips/macro-assembler-mips.cc srl(at, at, 32 - size);
size 2489 src/mips/macro-assembler-mips.cc int size = 0;
size 2492 src/mips/macro-assembler-mips.cc size += 1;
size 2494 src/mips/macro-assembler-mips.cc size += 3;
size 2498 src/mips/macro-assembler-mips.cc size += 1;
size 2500 src/mips/macro-assembler-mips.cc return size * kInstrSize;
size 2535 src/mips/macro-assembler-mips.cc int size = CallSize(t9, cond, rs, rt, bd);
size 2536 src/mips/macro-assembler-mips.cc return size + 2 * kInstrSize;
size 138 src/mips/macro-assembler-mips.h MacroAssembler(Isolate* isolate, void* buffer, int size);
size 708 src/mips/macro-assembler-mips.h void Ins(Register rt, Register rs, uint16_t pos, uint16_t size);
size 709 src/mips/macro-assembler-mips.h void Ext(Register rt, Register rs, uint16_t pos, uint16_t size);
size 791 src/mips/simulator-mips.cc intptr_t end_page = ((start + size) & ~CachePage::kPageMask);
size 808 src/mips/simulator-mips.cc size += intra_line;
size 809 src/mips/simulator-mips.cc size = ((size - 1) | CachePage::kLineMask) + 1;
size 811 src/mips/simulator-mips.cc while (!AllOnOnePage(start, size - 1)) {
size 815 src/mips/simulator-mips.cc size -= bytes_to_flush;
size 819 src/mips/simulator-mips.cc if (size != 0) {
size 820 src/mips/simulator-mips.cc FlushOnePage(i_cache, start, size);
size 841 src/mips/simulator-mips.cc ASSERT(size <= CachePage::kPageSize);
size 842 src/mips/simulator-mips.cc ASSERT(AllOnOnePage(start, size - 1));
size 844 src/mips/simulator-mips.cc ASSERT((size & CachePage::kLineMask) == 0);
size 849 src/mips/simulator-mips.cc memset(valid_bytemap, CachePage::LINE_INVALID, size >> CachePage::kLineShift);
size 1896 src/mips/simulator-mips.cc uint16_t size = msb - lsb + 1;
size 1897 src/mips/simulator-mips.cc uint32_t mask = (1 << size) - 1;
size 1906 src/mips/simulator-mips.cc uint16_t size = msb + 1;
size 1907 src/mips/simulator-mips.cc uint32_t mask = (1 << size) - 1;
size 230 src/mips/simulator-mips.h size_t size);
size 331 src/mips/simulator-mips.h int size);
size 4550 src/mips/stub-cache-mips.cc int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
size 4551 src/mips/stub-cache-mips.cc __ AllocateInNewSpace(size, elements_reg, scratch, scratch2, &slow,
size 4703 src/mips/stub-cache-mips.cc int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
size 4704 src/mips/stub-cache-mips.cc __ AllocateInNewSpace(size, elements_reg, scratch1, scratch2, &slow,
size 327 src/mksnapshot.cc int size = ftell(file);
size 330 src/mksnapshot.cc char* chars = new char[size + 1];
size 331 src/mksnapshot.cc chars[size] = '\0';
size 332 src/mksnapshot.cc for (int i = 0; i < size;) {
size 333 src/mksnapshot.cc int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
size 961 src/objects-debug.cc int size = Smi::cast(get(kCacheSizeIndex))->value();
size 962 src/objects-debug.cc ASSERT(kEntriesIndex <= size);
size 963 src/objects-debug.cc ASSERT(size <= length());
size 964 src/objects-debug.cc ASSERT_EQ(0, size % kEntrySize);
size 968 src/objects-debug.cc ASSERT((finger < size) || (finger == kEntriesIndex && finger == size));
size 972 src/objects-debug.cc for (int i = kEntriesIndex; i < size; i++) {
size 976 src/objects-debug.cc for (int i = size; i < length(); i++) {
size 1593 src/objects-inl.h int size = map->instance_size();
size 1597 src/objects-inl.h ASSERT(pre_allocated * kPointerSize + kHeaderSize <= size);
size 1603 src/objects-inl.h while (offset < size) {
size 2281 src/objects-inl.h SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
size 2561 src/objects-inl.h int cache_size = size();
size 2570 src/objects-inl.h int JSFunctionResultCache::size() {
size 2575 src/objects-inl.h void JSFunctionResultCache::set_size(int size) {
size 2576 src/objects-inl.h set(kCacheSizeIndex, Smi::FromInt(size));
size 2851 src/objects-inl.h return reinterpret_cast<FreeSpace*>(this)->size();
size 5285 src/objects-inl.h template<int start_offset, int end_offset, int size>
size 5286 src/objects-inl.h void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
size 177 src/objects-visiting.h static const int size = object_size_in_words * kPointerSize;
size 178 src/objects-visiting.h Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size),
size 179 src/objects-visiting.h &Visitor::template VisitSpecialized<size>);
size 924 src/objects.cc int size = this->Size(); // Byte size of the original string.
size 925 src/objects.cc if (size < ExternalString::kShortSize) {
size 933 src/objects.cc if (size >= ExternalString::kSize) {
size 954 src/objects.cc heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
size 957 src/objects.cc new_size - size);
size 976 src/objects.cc int size = this->Size(); // Byte size of the original string.
size 977 src/objects.cc if (size < ExternalString::kShortSize) {
size 984 src/objects.cc if (size >= ExternalString::kSize) {
size 999 src/objects.cc heap->CreateFillerObjectAt(this->address() + new_size, size - new_size);
size 1002 src/objects.cc new_size - size);
size 5695 src/objects.cc int size = number_of_descriptors();
size 5698 src/objects.cc { MaybeObject* maybe_result = Allocate(size, MAY_BE_SHARED);
size 5705 src/objects.cc for (int index = 0; index < size; ++index) {
size 8441 src/objects.cc PrintF(out, "Safepoints (size = %u)\n", table.size());
size 8696 src/objects.cc int size = Size();
size 8772 src/objects.cc ASSERT(size == Size());
size 11972 src/objects.cc for (int entry = 0, size = Capacity(); entry < size; entry++) {
size 1252 src/objects.h template<int start_offset, int end_offset, int size>
size 1257 src/objects.h static const int kSize = size;
size 2912 src/objects.h static uint32_t GetProbe(uint32_t hash, uint32_t number, uint32_t size) {
size 2913 src/objects.h ASSERT(IsPowerOf2(size));
size 2914 src/objects.h return (hash + GetProbeOffset(number)) & (size - 1);
size 2917 src/objects.h static uint32_t FirstProbe(uint32_t hash, uint32_t size) {
size 2918 src/objects.h return hash & (size - 1);
size 2921 src/objects.h static uint32_t NextProbe(uint32_t last, uint32_t number, uint32_t size) {
size 2922 src/objects.h return (last + number) & (size - 1);
size 3379 src/objects.h inline int size();
size 3380 src/objects.h inline void set_size(int size);
size 3682 src/objects.h inline int size();
size 3685 src/objects.h inline int Size() { return size(); }
size 124 src/platform-cygwin.cc reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
size 156 src/platform-cygwin.cc int result = munmap(address, size);
size 164 src/platform-cygwin.cc VirtualProtect(address, size, PAGE_EXECUTE_READ, &old_protect);
size 170 src/platform-cygwin.cc VirtualProtect(address, size, PAGE_READONLY | PAGE_GUARD, &oldprotect);
size 210 src/platform-cygwin.cc int size = ftell(file);
size 213 src/platform-cygwin.cc mmap(0, size, PROT_READ | PROT_WRITE, MAP_SHARED, fileno(file), 0);
size 214 src/platform-cygwin.cc return new PosixMemoryMappedFile(file, memory, size);
size 222 src/platform-cygwin.cc int result = fwrite(initial, size, 1, file);
size 228 src/platform-cygwin.cc mmap(0, size, PROT_READ | PROT_WRITE, MAP_SHARED, fileno(file), 0);
size 229 src/platform-cygwin.cc return new PosixMemoryMappedFile(file, memory, size);
size 322 src/platform-cygwin.cc address_ = VirtualAlloc(NULL, size, MEM_RESERVE, PAGE_NOACCESS);
size 323 src/platform-cygwin.cc size_ = size;
size 336 src/platform-cygwin.cc if (NULL == VirtualAlloc(address, size, MEM_COMMIT, prot)) {
size 340 src/platform-cygwin.cc UpdateAllocatedSpaceLimits(address, static_cast<int>(size));
size 347 src/platform-cygwin.cc return VirtualFree(address, size, MEM_DECOMMIT) != false;
size 139 src/platform-freebsd.cc reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
size 220 src/platform-freebsd.cc int size = ftell(file);
size 223 src/platform-freebsd.cc mmap(0, size, PROT_READ | PROT_WRITE, MAP_SHARED, fileno(file), 0);
size 224 src/platform-freebsd.cc return new PosixMemoryMappedFile(file, memory, size);
size 232 src/platform-freebsd.cc int result = fwrite(initial, size, 1, file);
size 238 src/platform-freebsd.cc mmap(0, size, PROT_READ | PROT_WRITE, MAP_SHARED, fileno(file), 0);
size 239 src/platform-freebsd.cc return new PosixMemoryMappedFile(file, memory, size);
size 333 src/platform-freebsd.cc address_ = ReserveRegion(size);
size 334 src/platform-freebsd.cc size_ = size;
size 341 src/platform-freebsd.cc size_t request_size = RoundUp(size + alignment,
size 362 src/platform-freebsd.cc size_t aligned_size = RoundUp(size, OS::AllocateAlignment());
size 380 src/platform-freebsd.cc bool result = ReleaseRegion(address(), size());
size 399 src/platform-freebsd.cc return CommitRegion(address, size, is_executable);
size 404 src/platform-freebsd.cc return UncommitRegion(address, size);
size 416 src/platform-freebsd.cc size,
size 431 src/platform-freebsd.cc size,
size 439 src/platform-freebsd.cc UpdateAllocatedSpaceLimits(base, size);
size 446 src/platform-freebsd.cc size,
size 455 src/platform-freebsd.cc return munmap(base, size) == 0;
size 317 src/platform-linux.cc reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
size 351 src/platform-linux.cc int result = munmap(address, size);
size 406 src/platform-linux.cc int size = ftell(file);
size 410 src/platform-linux.cc size,
size 415 src/platform-linux.cc return new PosixMemoryMappedFile(file, memory, size);
size 423 src/platform-linux.cc int result = fwrite(initial, size, 1, file);
size 430 src/platform-linux.cc size,
size 435 src/platform-linux.cc return new PosixMemoryMappedFile(file, memory, size);
size 517 src/platform-linux.cc int size = sysconf(_SC_PAGESIZE);
size 520 src/platform-linux.cc size,
size 526 src/platform-linux.cc OS::Free(addr, size);
size 571 src/platform-linux.cc address_ = ReserveRegion(size);
size 572 src/platform-linux.cc size_ = size;
size 579 src/platform-linux.cc size_t request_size = RoundUp(size + alignment,
size 600 src/platform-linux.cc size_t aligned_size = RoundUp(size, OS::AllocateAlignment());
size 618 src/platform-linux.cc bool result = ReleaseRegion(address(), size());
size 637 src/platform-linux.cc return CommitRegion(address, size, is_executable);
size 642 src/platform-linux.cc return UncommitRegion(address, size);
size 654 src/platform-linux.cc size,
size 669 src/platform-linux.cc size,
size 677 src/platform-linux.cc UpdateAllocatedSpaceLimits(base, size);
size 684 src/platform-linux.cc size,
size 693 src/platform-linux.cc return munmap(base, size) == 0;
size 118 src/platform-macos.cc reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
size 163 src/platform-macos.cc int result = munmap(address, size);
size 204 src/platform-macos.cc int size = ftell(file);
size 208 src/platform-macos.cc size,
size 213 src/platform-macos.cc return new PosixMemoryMappedFile(file, memory, size);
size 221 src/platform-macos.cc int result = fwrite(initial, size, 1, file);
size 228 src/platform-macos.cc size,
size 233 src/platform-macos.cc return new PosixMemoryMappedFile(file, memory, size);
size 249 src/platform-macos.cc uint64_t size;
size 254 src/platform-macos.cc &size);
size 256 src/platform-macos.cc unsigned int size;
size 257 src/platform-macos.cc char* code_ptr = getsectdatafromheader(header, SEG_TEXT, SECT_TEXT, &size);
size 263 src/platform-macos.cc SharedLibraryEvent(_dyld_get_image_name(i), start, start + size));
size 354 src/platform-macos.cc size_t request_size = RoundUp(size + alignment,
size 375 src/platform-macos.cc size_t aligned_size = RoundUp(size, OS::AllocateAlignment());
size 393 src/platform-macos.cc bool result = ReleaseRegion(address(), size());
size 408 src/platform-macos.cc size,
size 426 src/platform-macos.cc return CommitRegion(address, size, is_executable);
size 441 src/platform-macos.cc size,
size 449 src/platform-macos.cc UpdateAllocatedSpaceLimits(address, size);
size 455 src/platform-macos.cc return UncommitRegion(address, size);
size 461 src/platform-macos.cc size,
size 470 src/platform-macos.cc return munmap(address, size) == 0;
size 161 src/platform-openbsd.cc reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
size 195 src/platform-openbsd.cc int result = munmap(address, size);
size 237 src/platform-openbsd.cc int size = ftell(file);
size 240 src/platform-openbsd.cc mmap(0, size, PROT_READ | PROT_WRITE, MAP_SHARED, fileno(file), 0);
size 241 src/platform-openbsd.cc return new PosixMemoryMappedFile(file, memory, size);
size 249 src/platform-openbsd.cc int result = fwrite(initial, size, 1, file);
size 255 src/platform-openbsd.cc mmap(0, size, PROT_READ | PROT_WRITE, MAP_SHARED, fileno(file), 0);
size 256 src/platform-openbsd.cc return new PosixMemoryMappedFile(file, memory, size);
size 338 src/platform-openbsd.cc int size = sysconf(_SC_PAGESIZE);
size 340 src/platform-openbsd.cc void* addr = mmap(NULL, size, PROT_READ | PROT_EXEC, MAP_PRIVATE,
size 343 src/platform-openbsd.cc OS::Free(addr, size);
size 384 src/platform-openbsd.cc address_ = ReserveRegion(size);
size 385 src/platform-openbsd.cc size_ = size;
size 392 src/platform-openbsd.cc size_t request_size = RoundUp(size + alignment,
size 413 src/platform-openbsd.cc size_t aligned_size = RoundUp(size, OS::AllocateAlignment());
size 431 src/platform-openbsd.cc bool result = ReleaseRegion(address(), size());
size 450 src/platform-openbsd.cc return CommitRegion(address, size, is_executable);
size 455 src/platform-openbsd.cc return UncommitRegion(address, size);
size 467 src/platform-openbsd.cc size,
size 482 src/platform-openbsd.cc size,
size 490 src/platform-openbsd.cc UpdateAllocatedSpaceLimits(base, size);
size 497 src/platform-openbsd.cc size,
size 506 src/platform-openbsd.cc return munmap(base, size) == 0;
size 85 src/platform-posix.cc mprotect(address, size, PROT_READ | PROT_EXEC);
size 91 src/platform-posix.cc mprotect(address, size, PROT_NONE);
size 319 src/platform-posix.cc (*memcopy_function)(dest, src, size);
size 321 src/platform-posix.cc CHECK_EQ(0, memcmp(dest, src, size));
size 153 src/platform-solaris.cc reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
size 186 src/platform-solaris.cc int result = munmap(address, size);
size 228 src/platform-solaris.cc int size = ftell(file);
size 231 src/platform-solaris.cc mmap(0, size, PROT_READ | PROT_WRITE, MAP_SHARED, fileno(file), 0);
size 232 src/platform-solaris.cc return new PosixMemoryMappedFile(file, memory, size);
size 240 src/platform-solaris.cc int result = fwrite(initial, size, 1, file);
size 246 src/platform-solaris.cc mmap(0, size, PROT_READ | PROT_WRITE, MAP_SHARED, fileno(file), 0);
size 247 src/platform-solaris.cc return new PosixMemoryMappedFile(file, memory, size);
size 325 src/platform-solaris.cc address_ = ReserveRegion(size);
size 326 src/platform-solaris.cc size_ = size;
size 333 src/platform-solaris.cc size_t request_size = RoundUp(size + alignment,
size 354 src/platform-solaris.cc size_t aligned_size = RoundUp(size, OS::AllocateAlignment());
size 372 src/platform-solaris.cc bool result = ReleaseRegion(address(), size());
size 391 src/platform-solaris.cc return CommitRegion(address, size, is_executable);
size 396 src/platform-solaris.cc return UncommitRegion(address, size);
size 408 src/platform-solaris.cc size,
size 423 src/platform-solaris.cc size,
size 431 src/platform-solaris.cc UpdateAllocatedSpaceLimits(base, size);
size 438 src/platform-solaris.cc size,
size 447 src/platform-solaris.cc return munmap(base, size) == 0;
size 151 src/platform-win32.cc (*memcopy_function)(dest, src, size);
size 153 src/platform-win32.cc CHECK_EQ(0, memcmp(dest, src, size));
size 822 src/platform-win32.cc reinterpret_cast<void*>(reinterpret_cast<char*>(address) + size));
size 896 src/platform-win32.cc base = VirtualAlloc(GetRandomAddr(), size, action, protection);
size 901 src/platform-win32.cc if (base == NULL) base = VirtualAlloc(NULL, size, action, protection);
size 936 src/platform-win32.cc USE(size);
size 947 src/platform-win32.cc VirtualProtect(address, size, PAGE_EXECUTE_READ, &old_protect);
size 953 src/platform-win32.cc VirtualProtect(address, size, PAGE_READONLY | PAGE_GUARD, &oldprotect);
size 1008 src/platform-win32.cc int size = static_cast<int>(GetFileSize(file, NULL));
size 1012 src/platform-win32.cc PAGE_READWRITE, 0, static_cast<DWORD>(size), NULL);
size 1016 src/platform-win32.cc void* memory = MapViewOfFile(file_mapping, FILE_MAP_ALL_ACCESS, 0, 0, size);
size 1017 src/platform-win32.cc return new Win32MemoryMappedFile(file, file_mapping, memory, size);
size 1029 src/platform-win32.cc PAGE_READWRITE, 0, static_cast<DWORD>(size), NULL);
size 1032 src/platform-win32.cc void* memory = MapViewOfFile(file_mapping, FILE_MAP_ALL_ACCESS, 0, 0, size);
size 1033 src/platform-win32.cc if (memory) memmove(memory, initial, size);
size 1034 src/platform-win32.cc return new Win32MemoryMappedFile(file, file_mapping, memory, size);
size 1459 src/platform-win32.cc size_t request_size = RoundUp(size + alignment,
size 1468 src/platform-win32.cc address = VirtualAlloc(base, size, MEM_RESERVE, PAGE_NOACCESS);
size 1470 src/platform-win32.cc request_size = size;
size 1503 src/platform-win32.cc if (CommitRegion(address, size, is_executable)) {
size 1504 src/platform-win32.cc UpdateAllocatedSpaceLimits(address, static_cast<int>(size));
size 1513 src/platform-win32.cc return UncommitRegion(address, size);
size 1518 src/platform-win32.cc return RandomizedVirtualAlloc(size, MEM_RESERVE, PAGE_NOACCESS);
size 1524 src/platform-win32.cc if (NULL == VirtualAlloc(base, size, MEM_COMMIT, prot)) {
size 1528 src/platform-win32.cc UpdateAllocatedSpaceLimits(base, static_cast<int>(size));
size 1545 src/platform-win32.cc return VirtualFree(base, size, MEM_DECOMMIT) != 0;
size 188 src/platform.h static void Free(void* address, const size_t size);
size 195 src/platform.h static void ProtectCode(void* address, const size_t size);
size 198 src/platform.h static void Guard(void* address, const size_t size);
size 248 src/platform.h static MemoryMappedFile* create(const char* name, int size, void* initial);
size 251 src/platform.h virtual int size() = 0;
size 307 src/platform.h static void MemCopy(void* dest, const void* src, size_t size);
size 314 src/platform.h static void MemCopy(void* dest, const void* src, size_t size) {
size 315 src/platform.h memcpy(dest, src, size);
size 338 src/platform.h explicit VirtualMemory(size_t size);
size 343 src/platform.h VirtualMemory(size_t size, size_t alignment);
size 368 src/platform.h size_t size() { return size_; }
size 371 src/platform.h bool Commit(void* address, size_t size, bool is_executable);
size 374 src/platform.h bool Uncommit(void* address, size_t size);
size 384 src/platform.h size_t size = size_;
size 386 src/platform.h bool result = ReleaseRegion(address, size);
size 400 src/platform.h static void* ReserveRegion(size_t size);
size 402 src/platform.h static bool CommitRegion(void* base, size_t size, bool is_executable);
size 404 src/platform.h static bool UncommitRegion(void* base, size_t size);
size 408 src/platform.h static bool ReleaseRegion(void* base, size_t size);
size 93 src/preparse-data.cc int function_size = function_store_.size();
size 143 src/preparse-data.cc int function_size = function_store_.size();
size 145 src/preparse-data.cc int symbol_size = symbol_store_.size();
size 108 src/preparse-data.h virtual int function_position() { return function_store_.size(); }
size 182 src/preparse-data.h virtual int symbol_position() { return symbol_store_.size(); }
size 203 src/preparser-api.cc size_t size = pre_data.length() * sizeof(pre_data[0]);
size 205 src/preparser-api.cc return PreParserData(size, data);
size 173 src/profile-generator.cc size_t size = sizeof(*this);
size 174 src/profile-generator.cc size += sizeof(HashMap::Entry) * names_.capacity();
size 176 src/profile-generator.cc size += strlen(reinterpret_cast<const char*>(p->value)) + 1;
size 178 src/profile-generator.cc return size;
size 512 src/profile-generator.cc DeleteAllCoveredCode(addr, addr + size);
size 515 src/profile-generator.cc locator.set_value(CodeEntryInfo(entry, size));
size 525 src/profile-generator.cc Address start2 = locator.key(), end2 = start2 + locator.value().size;
size 538 src/profile-generator.cc if (addr < (locator.key() + entry.size))
size 551 src/profile-generator.cc return entry.size;
size 567 src/profile-generator.cc AddCode(to, entry.entry, entry.size);
size 573 src/profile-generator.cc OS::Print("%p %5d %s\n", key, value.size, value.entry->name());
size 1187 src/profile-generator.cc HeapEntry entry(this, type, name, id, size);
size 1347 src/profile-generator.cc entry_info.size = size;
size 1353 src/profile-generator.cc entries_.Add(EntryInfo(id, addr, size));
size 1392 src/profile-generator.cc entries_size += entry_info->size;
size 1398 src/profile-generator.cc time_interval.size != entries_size) {
size 1402 src/profile-generator.cc time_interval.size = entries_size));
size 1557 src/profile-generator.cc size_t size = sizeof(*this);
size 1558 src/profile-generator.cc size += names_.GetUsedMemorySize();
size 1559 src/profile-generator.cc size += ids_.GetUsedMemorySize();
size 1560 src/profile-generator.cc size += sizeof(HashMap::Entry) * snapshots_uids_.capacity();
size 1561 src/profile-generator.cc size += GetMemoryUsedByList(snapshots_);
size 1563 src/profile-generator.cc size += snapshots_[i]->RawSnapshotSize();
size 1565 src/profile-generator.cc return size;
size 2760 src/profile-generator.cc intptr_t size = info->GetSizeInBytes();
size 2769 src/profile-generator.cc size != -1 ? static_cast<int>(size) : 0);
size 244 src/profile-generator.h void AddCode(Address addr, CodeEntry* entry, unsigned size);
size 254 src/profile-generator.h : entry(an_entry), size(a_size) { }
size 256 src/profile-generator.h unsigned size;
size 607 src/profile-generator.h int size);
size 646 src/profile-generator.h SnapshotObjectId FindOrAddEntry(Address addr, unsigned int size);
size 668 src/profile-generator.h EntryInfo(SnapshotObjectId id, Address addr, unsigned int size)
size 669 src/profile-generator.h : id(id), addr(addr), size(size), accessed(true) { }
size 670 src/profile-generator.h EntryInfo(SnapshotObjectId id, Address addr, unsigned int size, bool accessed)
size 671 src/profile-generator.h : id(id), addr(addr), size(size), accessed(accessed) { }
size 674 src/profile-generator.h unsigned int size;
size 678 src/profile-generator.h explicit TimeInterval(SnapshotObjectId id) : id(id), size(0), count(0) { }
size 680 src/profile-generator.h uint32_t size;
size 260 src/regexp-macro-assembler.cc size_t size = regexp_stack->stack_capacity();
size 264 src/regexp-macro-assembler.cc ASSERT(static_cast<size_t>(old_stack_base - stack_pointer) <= size);
size 265 src/regexp-macro-assembler.cc Address new_stack_base = regexp_stack->EnsureCapacity(size * 2);
size 58 src/regexp-stack.cc size_t size = sizeof(thread_local_);
size 61 src/regexp-stack.cc size);
size 63 src/regexp-stack.cc return to + size;
size 68 src/regexp-stack.cc size_t size = sizeof(thread_local_);
size 69 src/regexp-stack.cc memcpy(&thread_local_, reinterpret_cast<void*>(from), size);
size 70 src/regexp-stack.cc return from + size;
size 91 src/regexp-stack.cc if (size > kMaximumStackSize) return NULL;
size 92 src/regexp-stack.cc if (size < kMinimumStackSize) size = kMinimumStackSize;
size 93 src/regexp-stack.cc if (thread_local_.memory_size_ < size) {
size 94 src/regexp-stack.cc Address new_memory = NewArray<byte>(static_cast<int>(size));
size 98 src/regexp-stack.cc new_memory + size - thread_local_.memory_size_),
size 104 src/regexp-stack.cc thread_local_.memory_size_ = size;
size 83 src/regexp-stack.h Address EnsureCapacity(size_t size);
size 9514 src/runtime.cc int size = size_smi->value();
size 9515 src/runtime.cc RUNTIME_ASSERT(IsAligned(size, kPointerSize));
size 9516 src/runtime.cc RUNTIME_ASSERT(size > 0);
size 9519 src/runtime.cc RUNTIME_ASSERT(size <= kMinFreeNewSpaceAfterGC);
size 9521 src/runtime.cc { MaybeObject* maybe_allocation = heap->new_space()->AllocateRaw(size);
size 9523 src/runtime.cc heap->CreateFillerObjectAt(HeapObject::cast(allocation)->address(), size);
size 13416 src/runtime.cc int size = cache->size();
size 13417 src/runtime.cc ASSERT(size <= cache->length());
size 13419 src/runtime.cc for (int i = size - 2; i > finger_index; i -= 2) {
size 13457 src/runtime.cc size = cache_handle->size();
size 13462 src/runtime.cc if (size < cache_handle->length()) {
size 13463 src/runtime.cc cache_handle->set_size(size + JSFunctionResultCache::kEntrySize);
size 13464 src/runtime.cc index = size;
size 108 src/safepoint-table.h int size() const {
size 522 src/serialize.cc for (int i = 0; i < external_references->size(); ++i) {
size 568 src/serialize.cc for (int i = 0; i < external_references->size(); ++i) {
size 600 src/serialize.cc size <= Page::kPageSize - Page::kObjectStartOffset);
size 604 src/serialize.cc reinterpret_cast<NewSpace*>(space)->AllocateRaw(size);
size 607 src/serialize.cc reinterpret_cast<PagedSpace*>(space)->AllocateRaw(size);
size 613 src/serialize.cc high_water_[space_index] = address + size;
size 620 src/serialize.cc lo_space->AllocateRaw(size, NOT_EXECUTABLE)->ToObjectUnchecked();
size 624 src/serialize.cc lo_space->AllocateRaw(size, EXECUTABLE)->ToObjectUnchecked();
size 735 src/serialize.cc int size = source_->GetInt() << kObjectAlignmentBits;
size 736 src/serialize.cc Address address = Allocate(space_number, space, size);
size 739 src/serialize.cc Object** limit = current + (size >> kPointerSizeLog2);
size 930 src/serialize.cc source_->CopyRaw(raw_data_out, size); \
size 931 src/serialize.cc current = reinterpret_cast<Object**>(raw_data_out + size); \
size 940 src/serialize.cc int size = source_->GetInt();
size 942 src/serialize.cc source_->CopyRaw(raw_data_out, size);
size 943 src/serialize.cc current = reinterpret_cast<Object**>(raw_data_out + size);
size 1412 src/serialize.cc int size = object_->Size();
size 1416 src/serialize.cc sink_->PutInt(size >> kObjectAlignmentBits, "Size in words");
size 1423 src/serialize.cc int offset = serializer_->Allocate(space, size, &start_new_page);
size 1436 src/serialize.cc object_->IterateBody(object_->map()->instance_type(), size, this);
size 1437 src/serialize.cc OutputRawData(object_->address() + size);
size 1654 src/serialize.cc large_object_total_ += size;
size 1670 src/serialize.cc CHECK(size <= SpaceAreaSize(space));
size 1671 src/serialize.cc if (used_in_this_page + size > SpaceAreaSize(space)) {
size 1677 src/serialize.cc fullness_[space] = allocation_address + size;
size 72 src/serialize.h int size() const { return refs_.length(); }
size 363 src/serialize.h Address Allocate(int space_number, Space* space, int size);
size 553 src/serialize.h int Allocate(int space, int size, bool* new_page_started);
size 56 src/snapshot.h static int size() { return size_; }
size 133 src/spaces-inl.h void MemoryAllocator::Protect(Address start, size_t size) {
size 134 src/spaces-inl.h OS::Protect(start, size);
size 139 src/spaces-inl.h size_t size,
size 141 src/spaces-inl.h OS::Unprotect(start, size, executable);
size 147 src/spaces-inl.h OS::Protect(chunks_[id].address(), chunks_[id].size());
size 153 src/spaces-inl.h OS::Unprotect(chunks_[id].address(), chunks_[id].size(),
size 167 src/spaces-inl.h ASSERT(chunk->size() <= static_cast<size_t>(kPageSize));
size 147 src/spaces.cc ASSERT(code_range_->size() == requested);
size 153 src/spaces.cc size_t size = code_range_->size() - (aligned_base - base);
size 154 src/spaces.cc allocation_list_.Add(FreeBlock(aligned_base, size));
size 173 src/spaces.cc if (requested <= allocation_list_[current_allocation_block_index_].size) {
size 187 src/spaces.cc free_list_[i].start == merged.start + merged.size) {
size 188 src/spaces.cc merged.size += free_list_[i].size;
size 191 src/spaces.cc if (merged.size > 0) {
size 200 src/spaces.cc if (requested <= allocation_list_[current_allocation_block_index_].size) {
size 214 src/spaces.cc if (requested > allocation_list_[current_allocation_block_index_].size) {
size 222 src/spaces.cc if (aligned_requested >= (current.size - Page::kPageSize)) {
size 224 src/spaces.cc *allocated = current.size;
size 228 src/spaces.cc ASSERT(*allocated <= current.size);
size 237 src/spaces.cc allocation_list_[current_allocation_block_index_].size -= *allocated;
size 238 src/spaces.cc if (*allocated == current.size) {
size 299 src/spaces.cc size_t size = reservation->size();
size 300 src/spaces.cc ASSERT(size_ >= size);
size 301 src/spaces.cc size_ -= size;
size 303 src/spaces.cc isolate_->counters()->memory_allocated()->Decrement(static_cast<int>(size));
size 306 src/spaces.cc ASSERT(size_executable_ >= size);
size 307 src/spaces.cc size_executable_ -= size;
size 321 src/spaces.cc ASSERT(size_ >= size);
size 322 src/spaces.cc size_ -= size;
size 324 src/spaces.cc isolate_->counters()->memory_allocated()->Decrement(static_cast<int>(size));
size 327 src/spaces.cc ASSERT(size_executable_ >= size);
size 328 src/spaces.cc size_executable_ -= size;
size 332 src/spaces.cc isolate_->code_range()->FreeRawMemory(base, size);
size 335 src/spaces.cc bool result = VirtualMemory::ReleaseRegion(base, size);
size 345 src/spaces.cc VirtualMemory reservation(size, alignment);
size 348 src/spaces.cc size_ += reservation.size();
size 361 src/spaces.cc Address base = ReserveAlignedMemory(size, alignment, &reservation);
size 365 src/spaces.cc if (!CommitCodePage(&reservation, base, size)) {
size 369 src/spaces.cc if (!reservation.Commit(base, size, false)) {
size 442 src/spaces.cc chunk->size_ = size;
size 528 src/spaces.cc size_executable_ += reservation.size();
size 578 src/spaces.cc MemoryChunk* chunk = AllocateChunk(size, executable, owner);
size 600 src/spaces.cc PerformAllocationCallback(space, kAllocationActionFree, chunk->size());
size 614 src/spaces.cc chunk->size(),
size 623 src/spaces.cc if (!VirtualMemory::CommitRegion(start, size, executable)) return false;
size 625 src/spaces.cc ZapBlock(start, size);
size 627 src/spaces.cc isolate_->counters()->memory_allocated()->Increment(static_cast<int>(size));
size 633 src/spaces.cc if (!VirtualMemory::UncommitRegion(start, size)) return false;
size 634 src/spaces.cc isolate_->counters()->memory_allocated()->Decrement(static_cast<int>(size));
size 640 src/spaces.cc for (size_t s = 0; s + kPointerSize <= size; s += kPointerSize) {
size 654 src/spaces.cc registration.callback(space, action, static_cast<int>(size));
size 745 src/spaces.cc size_t area_size = size - CodePageAreaStartOffset() - CodePageGuardSize();
size 856 src/spaces.cc intptr_t size = AreaSize();
size 859 src/spaces.cc size = SizeOfFirstPage();
size 863 src/spaces.cc size, this, executable());
size 875 src/spaces.cc int size = 0;
size 878 src/spaces.cc size = 64 * kPointerSize * KB;
size 881 src/spaces.cc size = 192 * KB;
size 884 src/spaces.cc size = 128 * KB;
size 887 src/spaces.cc size = 96 * KB;
size 894 src/spaces.cc size = AreaSize();
size 896 src/spaces.cc size = 384 * KB;
size 902 src/spaces.cc return Min(size, AreaSize());
size 930 src/spaces.cc intptr_t size = free_list_.EvictFreeListItems(page);
size 931 src/spaces.cc accounting_stats_.AllocateBytes(size);
size 932 src/spaces.cc ASSERT_EQ(AreaSize(), static_cast<int>(size));
size 962 src/spaces.cc FreeSpace::cast(obj)->size() == AreaSize()) {
size 1023 src/spaces.cc int size = object->Size();
size 1024 src/spaces.cc object->IterateBody(map->instance_type(), size, visitor);
size 1026 src/spaces.cc black_size += size;
size 1029 src/spaces.cc ASSERT(object->address() + size <= top);
size 1030 src/spaces.cc end_of_previous_object = object->address() + size;
size 1051 src/spaces.cc size_t size = 2 * reserved_semispace_capacity;
size 1054 src/spaces.cc size, size, &reservation_);
size 1058 src/spaces.cc chunk_size_ = static_cast<uintptr_t>(size);
size 1296 src/spaces.cc int size = object->Size();
size 1297 src/spaces.cc object->IterateBody(map->instance_type(), size, &visitor);
size 1299 src/spaces.cc current += size;
size 1989 src/spaces.cc int size = cur_as_free_space->Size();
size 1990 src/spaces.cc if (size >= size_in_bytes) {
size 1993 src/spaces.cc *node_size = size;
size 2370 src/spaces.cc if (cs.size > 0) {
size 2371 src/spaces.cc PrintF(" %-30s: %10d/%6d (%d)\n", cs.comment, cs.size, cs.count,
size 2372 src/spaces.cc cs.size/cs.count);
size 2388 src/spaces.cc comments_statistics[CommentStatistic::kMaxComments].size = 0;
size 2414 src/spaces.cc cs->size += delta;
size 2606 src/spaces.cc space, kAllocationActionFree, page->size());
size 2631 src/spaces.cc size_ += static_cast<int>(page->size());
size 2640 src/spaces.cc uintptr_t limit = base + (page->size() - 1) / MemoryChunk::kAlignment;
size 2717 src/spaces.cc size_ -= static_cast<int>(page->size());
size 2726 src/spaces.cc uintptr_t limit = base + (page->size()-1)/alignment;
size 107 src/spaces.h ASSERT((0 < size) && (size <= Page::kMaxNonCodeHeapObjectSize))
size 504 src/spaces.h size_t size() const { return size_; }
size 506 src/spaces.h void set_size(size_t size) {
size 507 src/spaces.h size_ = size;
size 631 src/spaces.h size_t size,
size 781 src/spaces.h virtual int RoundSizeDownToObjectAlignment(int size) {
size 783 src/spaces.h return RoundDown(size, kCodeAlignment);
size 785 src/spaces.h return RoundDown(size, kPointerSize);
size 833 src/spaces.h return start <= address && address < start + code_range_->size();
size 852 src/spaces.h : start(start_arg), size(size_arg) {
size 854 src/spaces.h ASSERT(size >= static_cast<size_t>(Page::kPageSize));
size 857 src/spaces.h : start(static_cast<Address>(start_arg)), size(size_arg) {
size 859 src/spaces.h ASSERT(size >= static_cast<size_t>(Page::kPageSize));
size 863 src/spaces.h size_t size;
size 904 src/spaces.h void AddObject(Address addr, int size) {
size 906 src/spaces.h int end_region = RegionNumber(addr + size - kPointerSize);
size 916 src/spaces.h static void Update(Address addr, int size) {
size 924 src/spaces.h list->AddObject(addr, size);
size 956 src/spaces.h intptr_t size, PagedSpace* owner, Executability executable);
size 1001 src/spaces.h void FreeMemory(Address addr, size_t size, Executability executable);
size 1007 src/spaces.h bool CommitBlock(Address start, size_t size, Executability executable);
size 1013 src/spaces.h bool UncommitBlock(Address start, size_t size);
size 1017 src/spaces.h void ZapBlock(Address start, size_t size);
size 1021 src/spaces.h size_t size);
size 1047 src/spaces.h size_t size);
size 1541 src/spaces.h void IncreaseCapacity(int size) {
size 1542 src/spaces.h accounting_stats_.ExpandSpace(size);
size 1691 src/spaces.h void increment_bytes(int size) { bytes_ += size; }
size 2006 src/spaces.h int size = (size_func_ == NULL) ? object->Size() : size_func_(object);
size 2008 src/spaces.h current_ += size;
size 2404 src/spaces.h virtual int RoundSizeDownToObjectAlignment(int size) {
size 2406 src/spaces.h return RoundDown(size, Map::kSize);
size 2408 src/spaces.h return (size / Map::kSize) * Map::kSize;
size 2442 src/spaces.h virtual int RoundSizeDownToObjectAlignment(int size) {
size 2444 src/spaces.h return RoundDown(size, JSGlobalPropertyCell::kSize);
size 2446 src/spaces.h return (size / JSGlobalPropertyCell::kSize) * JSGlobalPropertyCell::kSize;
size 2639 src/spaces.h int size;
size 2643 src/spaces.h size = 0;
size 65 src/splay-tree.h INLINE(void* operator new(size_t size,
size 67 src/splay-tree.h return allocator.New(static_cast<int>(size));
size 123 src/splay-tree.h INLINE(void* operator new(size_t size, AllocationPolicy allocator)) {
size 124 src/splay-tree.h return allocator.New(static_cast<int>(size));
size 88 src/store-buffer.cc virtual_memory_->size());
size 48 src/string-stream.cc size_ = size;
size 64 src/string-stream.h NoAllocationStringAllocator(char* memory, unsigned size);
size 3956 src/third_party/valgrind/valgrind.h pool, addr, size, 0, 0)
size 3968 src/third_party/valgrind/valgrind.h pool, addr, size, 0, 0)
size 3980 src/third_party/valgrind/valgrind.h pool, addrA, addrB, size, 0)
size 77 src/unicode.cc unsigned int high = size - 1;
size 84 src/unicode.cc (mid + 1 == size ||
size 132 src/unicode.cc unsigned int high = size - 1;
size 139 src/unicode.cc (mid + 1 == size ||
size 49 src/unicode.h template <class T, int size = 256>
size 65 src/unicode.h static const int kSize = size;
size 74 src/unicode.h template <class T, int size = 256>
size 91 src/unicode.h static const int kSize = size;
size 38 src/utils.cc buffer_ = Vector<char>::New(size);
size 244 src/utils.h template<class T, int shift, int size>
size 250 src/utils.h static const uint32_t kMask = ((1U << shift) << size) - (1U << shift);
size 253 src/utils.h static const T kMax = static_cast<T>((1U << size) - 1);
size 578 src/utils.h inline Vector<T> AddBlock(int size, T initial_value) {
size 579 src/utils.h ASSERT(size > 0);
size 580 src/utils.h if (size > current_chunk_.length() - index_) {
size 581 src/utils.h Grow(size);
size 584 src/utils.h index_ += size;
size 585 src/utils.h size_ += size;
size 586 src/utils.h for (int i = 0; i < size; i++) {
size 589 src/utils.h return Vector<T>(position, size);
size 642 src/utils.h inline int size() { return size_; }
size 896 src/utils.h explicit SimpleStringBuilder(int size);
size 898 src/utils.h SimpleStringBuilder(char* buffer, int size)
size 899 src/utils.h : buffer_(buffer, size), position_(0) { }
size 903 src/utils.h int size() const { return buffer_.length(); }
size 62 src/v8checks.h #define ASSERT_SIZE_TAG_ALIGNED(size) ASSERT((size & HeapObjectTagMask()) == 0)
size 412 src/v8globals.h void* operator new(size_t size) { \
size 413 src/v8globals.h void* result = ::operator new(size); \
size 414 src/v8globals.h Logger::NewEventStatic(name, result, size); \
size 135 src/v8utils.cc *size = ftell(file);
size 138 src/v8utils.cc char* result = NewArray<char>(*size + extra_space);
size 139 src/v8utils.cc for (int i = 0; i < *size && feof(file) == 0;) {
size 140 src/v8utils.cc int read = static_cast<int>(fread(&result[i], 1, *size - i, file));
size 141 src/v8utils.cc if (read != (*size - i) && ferror(file) != 0) {
size 157 src/v8utils.cc char* result = ReadCharsFromFile(file, size, extra_space, verbose, filename);
size 164 src/v8utils.cc char* chars = ReadCharsFromFile(filename, size, 0, verbose);
size 176 src/v8utils.cc chars[size] = '\0';
size 178 src/v8utils.cc return Vector<const char>(chars, size);
size 185 src/v8utils.cc int size;
size 186 src/v8utils.cc char* result = ReadCharsFromFile(filename, &size, 1, verbose);
size 187 src/v8utils.cc return SetVectorContents(result, size, exists);
size 194 src/v8utils.cc int size;
size 195 src/v8utils.cc char* result = ReadCharsFromFile(file, &size, 1, verbose, "");
size 196 src/v8utils.cc return SetVectorContents(result, size, exists);
size 202 src/v8utils.cc while (total < size) {
size 203 src/v8utils.cc int write = static_cast<int>(fwrite(str, 1, size - total, f));
size 225 src/v8utils.cc int written = WriteCharsToFile(str, size, f);
size 242 src/v8utils.cc int written = WriteCharsToFile(str, size, f);
size 253 src/v8utils.cc return WriteChars(filename, str, size, verbose);
size 314 src/v8utils.cc length_ = file_->size();
size 79 src/v8utils.h byte* ReadBytes(const char* filename, int* size, bool verbose = true);
size 86 src/v8utils.h int size,
size 94 src/v8utils.h int size,
size 102 src/v8utils.h int size,
size 111 src/v8utils.h const char* str, int size, bool verbose = true);
size 268 src/v8utils.h explicit StringBuilder(int size) : SimpleStringBuilder(size) { }
size 269 src/v8utils.h StringBuilder(char* buffer, int size) : SimpleStringBuilder(buffer, size) { }
size 66 src/x64/assembler-x64.cc ASSERT(memory->size() >= static_cast<size_t>(kBufferSize));
size 1082 src/x64/assembler-x64.cc emitw(size.value_); // 16 bit operand, always.
size 678 src/x64/assembler-x64.h void enter(Immediate size);
size 984 src/x64/builtins-x64.cc int size = JSArray::kSize;
size 986 src/x64/builtins-x64.cc size += FixedArray::SizeFor(initial_capacity);
size 988 src/x64/builtins-x64.cc __ AllocateInNewSpace(size,
size 320 src/x64/code-stubs-x64.cc int size = JSArray::kSize + elements_size;
size 324 src/x64/code-stubs-x64.cc __ AllocateInNewSpace(size, rax, rbx, rdx, fail, TAG_OBJECT);
size 459 src/x64/code-stubs-x64.cc int size = JSObject::kHeaderSize + length_ * kPointerSize;
size 462 src/x64/code-stubs-x64.cc __ cmpq(rax, Immediate(size >> kPointerSizeLog2));
size 467 src/x64/code-stubs-x64.cc __ AllocateInNewSpace(size, rax, rbx, rdx, &slow_case, TAG_OBJECT);
size 468 src/x64/code-stubs-x64.cc for (int i = 0; i < size; i += kPointerSize) {
size 70 src/x64/cpu-x64.cc unsigned res = VALGRIND_DISCARD_TRANSLATIONS(start, size);
size 561 src/x64/disasm-x64.cc switch (size) {
size 1508 src/x64/full-codegen-x64.cc int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
size 1510 src/x64/full-codegen-x64.cc __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
size 1515 src/x64/full-codegen-x64.cc __ Push(Smi::FromInt(size));
size 1522 src/x64/full-codegen-x64.cc for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
size 1528 src/x64/full-codegen-x64.cc if ((size % (2 * kPointerSize)) != 0) {
size 1529 src/x64/full-codegen-x64.cc __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
size 1530 src/x64/full-codegen-x64.cc __ movq(FieldOperand(rax, size - kPointerSize), rdx);
size 4555 src/x64/lithium-codegen-x64.cc int size = instr->hydrogen()->total_size();
size 4578 src/x64/lithium-codegen-x64.cc __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
size 4582 src/x64/lithium-codegen-x64.cc __ Push(Smi::FromInt(size));
size 4589 src/x64/lithium-codegen-x64.cc ASSERT_EQ(size, offset);
size 4654 src/x64/lithium-codegen-x64.cc int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
size 4656 src/x64/lithium-codegen-x64.cc __ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
size 4661 src/x64/lithium-codegen-x64.cc __ Push(Smi::FromInt(size));
size 4668 src/x64/lithium-codegen-x64.cc for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
size 4674 src/x64/lithium-codegen-x64.cc if ((size % (2 * kPointerSize)) != 0) {
size 4675 src/x64/lithium-codegen-x64.cc __ movq(rdx, FieldOperand(rbx, size - kPointerSize));
size 4676 src/x64/lithium-codegen-x64.cc __ movq(FieldOperand(rax, size - kPointerSize), rdx);
size 141 src/x64/macro-assembler-x64.cc int size = 4;
size 143 src/x64/macro-assembler-x64.cc size += 3; // Need full four-byte displacement in lea.
size 145 src/x64/macro-assembler-x64.cc return size;
size 89 src/x64/macro-assembler-x64.h MacroAssembler(Isolate* isolate, void* buffer, int size);
size 1413 src/x64/macro-assembler-x64.h CodePatcher(byte* address, int size);
size 3828 src/x64/stub-cache-x64.cc int size = FixedArray::SizeFor(JSArray::kPreallocatedArrayElements);
size 3829 src/x64/stub-cache-x64.cc __ AllocateInNewSpace(size, rdi, rbx, r8, &slow, TAG_OBJECT);
size 3961 src/x64/stub-cache-x64.cc int size = FixedDoubleArray::SizeFor(JSArray::kPreallocatedArrayElements);
size 3962 src/x64/stub-cache-x64.cc __ AllocateInNewSpace(size, rdi, rbx, r8, &slow, TAG_OBJECT);
size 42 src/zone-inl.h inline void* Zone::New(int size) {
size 45 src/zone-inl.h size = RoundUp(size, kAlignment);
size 50 src/zone-inl.h position_ += ((~size) & 4) & (reinterpret_cast<intptr_t>(position_) & 4);
size 58 src/zone-inl.h if (size > limit_ - position_) {
size 59 src/zone-inl.h result = NewExpand(size);
size 61 src/zone-inl.h position_ += size;
size 66 src/zone-inl.h allocation_size_ += size;
size 97 src/zone-inl.h void* ZoneObject::operator new(size_t size, Zone* zone) {
size 98 src/zone-inl.h return zone->New(static_cast<int>(size));
size 101 src/zone-inl.h inline void* ZoneAllocationPolicy::New(size_t size) {
size 103 src/zone-inl.h return zone_->New(static_cast<int>(size));
size 108 src/zone-inl.h void* ZoneList<T>::operator new(size_t size, Zone* zone) {
size 109 src/zone-inl.h return zone->New(static_cast<int>(size));
size 47 src/zone.cc size_ = size;
size 90 src/zone.cc Segment* result = reinterpret_cast<Segment*>(Malloced::New(size));
size 91 src/zone.cc adjust_segment_bytes_allocated(size);
size 93 src/zone.cc result->Initialize(segment_head_, size);
size 102 src/zone.cc adjust_segment_bytes_allocated(-size);
size 115 src/zone.cc while (keep != NULL && keep->size() > kMaximumKeptSegmentSize) {
size 128 src/zone.cc int size = current->size();
size 131 src/zone.cc memset(current, kZapDeadByte, size);
size 133 src/zone.cc DeleteSegment(current, size);
size 161 src/zone.cc DeleteSegment(segment_head_, segment_head_->size());
size 170 src/zone.cc ASSERT(size == RoundDown(size, kAlignment));
size 171 src/zone.cc ASSERT(size > limit_ - position_);
size 178 src/zone.cc int old_size = (head == NULL) ? 0 : head->size();
size 180 src/zone.cc int new_size_no_overhead = size + (old_size << 1);
size 183 src/zone.cc if (new_size_no_overhead < size || new_size < kSegmentOverhead) {
size 194 src/zone.cc new_size = Max(kSegmentOverhead + size, kMaximumSegmentSize);
size 204 src/zone.cc position_ = result + size;
size 71 src/zone.h inline void* New(int size);
size 123 src/zone.h Address NewExpand(int size);
size 127 src/zone.h Segment* NewSegment(int size);
size 130 src/zone.h void DeleteSegment(Segment* segment, int size);
size 150 src/zone.h INLINE(void* operator new(size_t size, Zone* zone));
size 170 src/zone.h INLINE(void* New(size_t size));
size 190 src/zone.h INLINE(void* operator new(size_t size, Zone* zone));
size 193 test/cctest/test-alloc.cc int size;
size 226 test/cctest/test-alloc.cc code_range->FreeRawMemory(blocks[index].base, blocks[index].size);
size 227 test/cctest/test-alloc.cc current_allocated -= blocks[index].size;
size 13034 test/cctest/test-api.cc for (int size = 0; size < 100; size += 10) {
size 13035 test/cctest/test-api.cc uint8_t* pixel_data = reinterpret_cast<uint8_t*>(malloc(size));
size 13037 test/cctest/test-api.cc obj->SetIndexedPropertiesToPixelData(pixel_data, size);
size 13040 test/cctest/test-api.cc CHECK_EQ(size, obj->GetIndexedPropertiesPixelDataLength());
size 13701 test/cctest/test-api.cc for (int size = 0; size < 100; size += 10) {
size 13703 test/cctest/test-api.cc void* external_data = malloc(size * element_size);
size 13706 test/cctest/test-api.cc external_data, array_type, size);
size 13710 test/cctest/test-api.cc CHECK_EQ(size, obj->GetIndexedPropertiesExternalArrayDataLength());
size 14218 test/cctest/test-api.cc uint32_t* answer = &size - (size / sizeof(size));
size 14223 test/cctest/test-api.cc if (answer > &size) return reinterpret_cast<uint32_t*>(sizeof(size));
size 131 test/cctest/test-hashmap.cc const uint32_t n = size;
size 507 test/cctest/test-heap-profiler.cc int size() { return buffer_.size(); }
size 544 test/cctest/test-heap-profiler.cc CHECK_GT(stream.size(), 0);
size 546 test/cctest/test-heap-profiler.cc i::ScopedVector<char> json(stream.size());
size 638 test/cctest/test-heap-profiler.cc CHECK_GT(stream.size(), 0);
size 677 test/cctest/test-heap-profiler.cc entries_size_ += buffer[i].size;
size 793 test/cctest/test-heap.cc for (int i = 0; i < size; i++) {
size 858 test/cctest/test-heap.cc return (size - FixedArray::kHeaderSize) / kPointerSize;
size 40 test/cctest/test-list.cc size_t true_size = size + sizeof(size_t);
size 574 test/cctest/test-serialize.cc for (int size = 1000; size < 5 * MB; size += size >> 1) {
size 575 test/cctest/test-serialize.cc size &= ~8; // Round.
size 576 test/cctest/test-serialize.cc int new_space_size = (size < new_space_max) ? size : new_space_max;
size 577 test/cctest/test-serialize.cc int paged_space_size = (size < paged_space_max) ? size : paged_space_max;
size 585 test/cctest/test-serialize.cc size); // Large object space.
size 667 test/cctest/test-serialize.cc if (size > Page::kMaxNonCodeHeapObjectSize) {
size 672 test/cctest/test-serialize.cc (size - FixedArray::kHeaderSize) / kPointerSize;
size 365 tools/gcmole/gcmole.cc int new_code = symbol_table_.size();
size 1206 tools/gcmole/gcmole.cc for (unsigned i = 0; i < args.size(); ++i) {
size 75 tools/oom_dump/oom_dump.cc for (int i = 0; i < size; i++) {
size 90 tools/oom_dump/oom_dump.cc ReadArray(region, ptr, size, output);
size 95 tools/oom_dump/oom_dump.cc return size / (1024. * 1024.);
size 267 tools/oom_dump/oom_dump.cc int size = size_per_type[type];
size 268 tools/oom_dump/oom_dump.cc running_size += size;
size 270 tools/oom_dump/oom_dump.cc name, objects_per_type[type], toM(size),
size 271 tools/oom_dump/oom_dump.cc 100. * size / total_size, 100. * running_size / total_size);