uint64_t 427 MMgc/GC-inlines.h REALLY_INLINE double GC::duration(uint64_t start) uint64_t 433 MMgc/GC-inlines.h REALLY_INLINE uint64_t GC::ticksToMicros(uint64_t ticks) uint64_t 439 MMgc/GC-inlines.h REALLY_INLINE uint64_t GC::ticksToMillis(uint64_t ticks) uint64_t 444 MMgc/GC-inlines.h REALLY_INLINE uint64_t GC::bytesMarked() uint64_t 449 MMgc/GC-inlines.h REALLY_INLINE uint64_t GC::markTicks() uint64_t 65 MMgc/GC.cpp inline uint64_t max(uint64_t a, uint64_t b) { return a > b ? a : b; } uint64_t 404 MMgc/GC.cpp inline uint64_t GCPolicyManager::now() { uint64_t 413 MMgc/GC.cpp inline double GCPolicyManager::ticksToMillis(uint64_t ticks) { uint64_t 422 MMgc/GC.cpp uint64_t GCPolicyManager::bytesMarked() { uint64_t 426 MMgc/GC.cpp uint64_t GCPolicyManager::objectsMarked() { uint64_t 500 MMgc/GC.cpp uint64_t t = now(); uint64_t 501 MMgc/GC.cpp uint64_t elapsed = t - start_time; uint64_t 916 MMgc/GC.cpp MMGC_STATIC_ASSERT(sizeof(uint64_t) == 8); uint64_t 3005 MMgc/GC.cpp uint64_t start = VMPI_getPerformanceCounter(); uint64_t 3007 MMgc/GC.cpp uint64_t numObjects=policy.objectsMarked(); uint64_t 3008 MMgc/GC.cpp uint64_t objSize=policy.bytesMarked(); uint64_t 3010 MMgc/GC.cpp uint64_t ticks = start + time * VMPI_getPerformanceFrequency() / 1000; uint64_t 285 MMgc/GC.h uint64_t objectsMarked(); uint64_t 290 MMgc/GC.h uint64_t bytesMarked(); uint64_t 464 MMgc/GC.h uint64_t timeStartIncrementalMark; uint64_t 465 MMgc/GC.h uint64_t timeIncrementalMark; uint64_t 466 MMgc/GC.h uint64_t timeFinalRootAndStackScan; uint64_t 467 MMgc/GC.h uint64_t timeFinalizeAndSweep; uint64_t 468 MMgc/GC.h uint64_t timeReapZCT; uint64_t 474 MMgc/GC.h uint64_t timeInLastCollection; uint64_t 475 MMgc/GC.h uint64_t timeEndToEndLastCollection; uint64_t 479 MMgc/GC.h uint64_t timeReapZCTLastCollection; uint64_t 482 MMgc/GC.h uint64_t timeMaxStartIncrementalMark; uint64_t 483 MMgc/GC.h uint64_t timeMaxIncrementalMark; uint64_t 484 MMgc/GC.h uint64_t timeMaxFinalRootAndStackScan; uint64_t 485 MMgc/GC.h uint64_t timeMaxFinalizeAndSweep; uint64_t 486 MMgc/GC.h uint64_t timeMaxReapZCT; uint64_t 489 MMgc/GC.h uint64_t timeMaxStartIncrementalMarkLastCollection; uint64_t 490 MMgc/GC.h uint64_t timeMaxIncrementalMarkLastCollection; uint64_t 491 MMgc/GC.h uint64_t timeMaxFinalRootAndStackScanLastCollection; uint64_t 492 MMgc/GC.h uint64_t timeMaxFinalizeAndSweepLastCollection; uint64_t 493 MMgc/GC.h uint64_t timeMaxReapZCTLastCollection; uint64_t 496 MMgc/GC.h uint64_t countStartIncrementalMark; uint64_t 497 MMgc/GC.h uint64_t countIncrementalMark; uint64_t 498 MMgc/GC.h uint64_t countFinalRootAndStackScan; uint64_t 499 MMgc/GC.h uint64_t countFinalizeAndSweep; uint64_t 500 MMgc/GC.h uint64_t countReapZCT; uint64_t 511 MMgc/GC.h uint64_t now(); uint64_t 515 MMgc/GC.h double ticksToMillis(uint64_t ticks); uint64_t 563 MMgc/GC.h uint64_t timeEndOfLastIncrementalMark; uint64_t 566 MMgc/GC.h uint64_t timeStartOfLastCollection; uint64_t 569 MMgc/GC.h uint64_t timeEndOfLastCollection; uint64_t 578 MMgc/GC.h uint64_t objectsScannedTotal; uint64_t 585 MMgc/GC.h uint64_t bytesScannedTotal; uint64_t 591 MMgc/GC.h uint64_t start_time; uint64_t 614 MMgc/GC.h uint64_t barrierStageTotal[2]; uint64_t 618 MMgc/GC.h uint64_t objectsReaped; uint64_t 619 MMgc/GC.h uint64_t bytesReaped; uint64_t 620 MMgc/GC.h uint64_t objectsPinned; uint64_t 623 MMgc/GC.h uint64_t objectsAllocated; uint64_t 624 MMgc/GC.h uint64_t bytesAllocated; uint64_t 632 MMgc/GC.h uint64_t candidateWords; uint64_t 633 MMgc/GC.h uint64_t couldBePointer; uint64_t 634 MMgc/GC.h uint64_t actuallyIsPointer; uint64_t 637 MMgc/GC.h uint64_t incrementRefTotal; uint64_t 638 MMgc/GC.h uint64_t incrementRefLastCollection; uint64_t 639 MMgc/GC.h uint64_t decrementRefTotal; uint64_t 640 MMgc/GC.h uint64_t decrementRefLastCollection; uint64_t 642 MMgc/GC.h uint64_t addZCTTotal; uint64_t 643 MMgc/GC.h uint64_t addZCTLastCollection; uint64_t 644 MMgc/GC.h uint64_t removeZCTTotal; uint64_t 645 MMgc/GC.h uint64_t removeZCTLastCollection; uint64_t 646 MMgc/GC.h uint64_t addZCTInitialTotal; uint64_t 647 MMgc/GC.h uint64_t removeZCTFinalTotal; uint64_t 688 MMgc/GC.h uint64_t adjustR_startTime; uint64_t 689 MMgc/GC.h uint64_t adjustR_totalTime; uint64_t 1241 MMgc/GC.h const uint64_t t0; uint64_t 1245 MMgc/GC.h static double duration(uint64_t start); uint64_t 1248 MMgc/GC.h static uint64_t ticksToMicros(uint64_t ticks); uint64_t 1250 MMgc/GC.h static uint64_t ticksToMillis(uint64_t ticks); uint64_t 1257 MMgc/GC.h uint64_t bytesMarked(); uint64_t 1263 MMgc/GC.h uint64_t markTicks(); uint64_t 1277 MMgc/GC.h uint64_t sweepStart; uint64_t 344 MMgc/GCGlobalNew.h DECLARE_PRIM_ARRAY(uint64_t) uint64_t 495 MMgc/GCHeap.h uint64_t total = (uint64_t)size + (uint64_t)extra; uint64_t 500 MMgc/GCHeap.h if ((total > (uint64_t)kMaxObjectSize) || (total < size) || (total < extra)) uint64_t 504 MMgc/GCHeap.h if (total > (uint64_t)kMaxObjectSize) uint64_t 537 MMgc/GCHeap.h uint64_t total = (uint64_t)elsize * (uint64_t)count; uint64_t 541 MMgc/GCHeap.h || total > (uint64_t)kMaxObjectSize) uint64_t 544 MMgc/GCHeap.h if(total > (uint64_t)kMaxObjectSize) uint64_t 563 MMgc/GCMemoryProfiler.cpp uint64_t count; uint64_t 312 MMgc/ZCT.cpp uint64_t start = VMPI_getPerformanceCounter(); uint64_t 206 VMPI/MMgcPortMac.cpp static uint64_t unused_value = VMPI_getPerformanceFrequency(); uint64_t 208 VMPI/MMgcPortMac.cpp uint64_t VMPI_getPerformanceFrequency() uint64_t 216 VMPI/MMgcPortMac.cpp static uint64_t frequency = 0; uint64_t 219 VMPI/MMgcPortMac.cpp frequency = (uint64_t) ( 1e9 / ((double) info.numer / (double) info.denom) ); uint64_t 231 VMPI/MMgcPortMac.cpp uint64_t VMPI_getPerformanceCounter() uint64_t 304 VMPI/MMgcPortSymbian.cpp uint64_t VMPI_getPerformanceFrequency() uint64_t 308 VMPI/MMgcPortSymbian.cpp return (uint64_t)tickPeriod; uint64_t 311 VMPI/MMgcPortSymbian.cpp uint64_t VMPI_getPerformanceCounter() uint64_t 319 VMPI/MMgcPortUnix.cpp uint64_t VMPI_getPerformanceFrequency() uint64_t 324 VMPI/MMgcPortUnix.cpp uint64_t VMPI_getPerformanceCounter() uint64_t 327 VMPI/MMgcPortUnix.cpp uint64_t retval = gethrtime(); uint64_t 333 VMPI/MMgcPortUnix.cpp uint64_t seconds = (uint64_t)tv.tv_sec * 1000000; uint64_t 334 VMPI/MMgcPortUnix.cpp uint64_t microseconds = (uint64_t)tv.tv_usec; uint64_t 335 VMPI/MMgcPortUnix.cpp uint64_t result = seconds + microseconds; uint64_t 282 VMPI/MMgcPortWin.cpp static uint64_t unused_value = VMPI_getPerformanceFrequency(); uint64_t 284 VMPI/MMgcPortWin.cpp uint64_t VMPI_getPerformanceFrequency() uint64_t 291 VMPI/MMgcPortWin.cpp static uint64_t gPerformanceFrequency = 0; uint64_t 298 VMPI/MMgcPortWin.cpp uint64_t VMPI_getPerformanceCounter() uint64_t 123 VMPI/PosixPortUtils.cpp uint64_t VMPI_getTime() uint64_t 127 VMPI/PosixPortUtils.cpp uint64_t result = (tv.tv_sec * 1000) + (tv.tv_usec / 1000); uint64_t 43 VMPI/SpyUtilsSymbian.cpp uint64_t spyLastTime = 0; uint64_t 47 VMPI/SpyUtilsSymbian.cpp uint64_t time = VMPI_getTime(); uint64_t 171 VMPI/SymbianPortUtils.cpp uint64_t VMPI_getTime() uint64_t 175 VMPI/SymbianPortUtils.cpp uint64_t result = (tv.tv_sec * 1000) + (tv.tv_usec / 1000); uint64_t 107 VMPI/VMPI.h typedef uint64_t uint64; uint64_t 151 VMPI/VMPI.h extern uint64_t VMPI_getTime(); uint64_t 306 VMPI/VMPI.h extern uint64_t VMPI_getPerformanceFrequency(); uint64_t 315 VMPI/VMPI.h */extern uint64_t VMPI_getPerformanceCounter(); uint64_t 217 VMPI/WinPortUtils.cpp uint64_t VMPI_getTime() uint64_t 69 core/AbcEnv-inlines.h REALLY_INLINE uint64_t& AbcEnv::invocationCount(uint32_t i) uint64_t 59 core/AbcEnv.cpp m_invocationCounts = (uint64_t*)_pool->core->GetGC()->Alloc(_pool->methodCount() * sizeof(uint64_t), MMgc::GC::kZero); uint64_t 63 core/AbcEnv.h uint64_t& invocationCount(uint32_t i); uint64_t 86 core/AbcEnv.h DWB(uint64_t*) m_invocationCounts; // actual size will hold pool->methodCount methods, only allocated if debugger exists uint64_t 724 core/CodegenLIR.cpp LIns *insImmq(uint64_t i) { uint64_t 2719 core/CodegenLIR.cpp uint64_t *pquad = (uint64_t*) pd; uint64_t 2820 core/Interpreter.cpp const uint8_t *target = (const uint8_t *)((uint64_t(target_hi) << 32) | uint64_t(target_lo)); uint64_t 641 core/MethodEnv.cpp uint64_t MethodEnv::invocationCount() const uint64_t 302 core/MethodEnv.h uint64_t invocationCount() const; uint64_t 125 core/PrintWriter.h PrintWriter& operator<< (uint64_t value); uint64_t 176 core/Sampler.h uint64 recordAllocationSample(const void* item, uint64_t size, bool callback_ok = true, bool forceWrite = false); uint64_t 177 core/Sampler.h void recordDeallocationSample(const void* item, uint64_t size); uint64_t 223 core/Sampler.h uint64_t allocId; uint64_t 110 core/StackTrace.cpp void CallStackNode::init(AvmCore* core, uint64_t functionId, int32_t lineno) uint64_t 119 core/StackTrace.h inline explicit CallStackNode(AvmCore* core, uint64_t functionId, int32_t lineno) uint64_t 124 core/StackTrace.h void init(AvmCore* core, uint64_t functionId, int32_t lineno); uint64_t 176 core/StackTrace.h inline uint64_t functionId() const { return m_functionId; } uint64_t 199 core/StackTrace.h private: uint64_t m_functionId; // int used to uniquely identify function calls in external scripting languages uint64_t 247 core/StackTrace.h uint64_t m_functionId; uint64_t 281 core/StackTrace.h inline uint64_t functionId() const { return isAS3Sample() ? 0 : m_functionId; } uint64_t 318 core/StringObject.cpp AvmAssert((uint64_t(m_length) << getWidth()) <= 0x7FFFFFFFU); uint64_t 335 core/StringObject.cpp AvmAssert((uint64_t(m_length) << getWidth()) <= 0x7FFFFFFFU); uint64_t 367 core/StringObject.cpp AvmAssert((uint64_t(m_length) << getWidth()) <= 0x7FFFFFFFU); uint64_t 389 core/StringObject.cpp uint64_t x = uint64_t(a) + uint64_t(b); uint64_t 405 core/StringObject.cpp uint64_t x = uint64_t(a) << uint64_t(b); uint64_t 417 core/StringObject.cpp uint64_t x = uint64_t(a) << uint64_t(b); uint64_t 65 core/VTable.h typedef uint64_t GprImtThunkProcRetType; uint64_t 2910 core/Verifier.cpp core->console << (uint64_t)state->pc << ':'; uint64_t 1641 eval/eval-lex.cpp uint64_t bits = 0; uint64_t 1696 eval/eval-lex.cpp uint64_t lost = bits & 0xFFF; uint64_t 1708 eval/eval-lex.cpp bits &= ~(uint64_t)0 >> 12; uint64_t 1712 eval/eval-lex.cpp bits |= (uint64_t)(1023 + scale) << 52; uint64_t 1714 eval/eval-lex.cpp uint64_t bits; uint64_t 164 nanojit/Containers.h const uint64_t m = 0xc6a4a7935bd1e995; uint64_t 166 nanojit/Containers.h uint64_t h = 0; uint64_t 168 nanojit/Containers.h const uint64_t *data = (const uint64_t*)key; uint64_t 169 nanojit/Containers.h const uint64_t *end = data + (len/8); uint64_t 173 nanojit/Containers.h uint64_t k = *data++; uint64_t 186 nanojit/Containers.h case 7: h ^= uint64_t(data2[6]) << 48; uint64_t 187 nanojit/Containers.h case 6: h ^= uint64_t(data2[5]) << 40; uint64_t 188 nanojit/Containers.h case 5: h ^= uint64_t(data2[4]) << 32; uint64_t 189 nanojit/Containers.h case 4: h ^= uint64_t(data2[3]) << 24; uint64_t 190 nanojit/Containers.h case 3: h ^= uint64_t(data2[2]) << 16; uint64_t 191 nanojit/Containers.h case 2: h ^= uint64_t(data2[1]) << 8; uint64_t 192 nanojit/Containers.h case 1: h ^= uint64_t(data2[0]); uint64_t 330 nanojit/LIR.cpp LInsp LirBufWriter::insImmq(uint64_t imm) uint64_t 344 nanojit/LIR.cpp uint64_t q; uint64_t 569 nanojit/LIR.cpp uint64_t q; uint64_t 573 nanojit/LIR.cpp q = c1 | uint64_t(c2)<<32; uint64_t 910 nanojit/LIR.cpp return insImmq((uint64_t)ptr); uint64_t 1117 nanojit/LIR.cpp inline uint32_t LInsHashSet::hashImmq(uint64_t a) { uint64_t 1125 nanojit/LIR.cpp uint64_t u64; uint64_t 1215 nanojit/LIR.cpp LInsp LInsHashSet::findImmq(uint64_t a, uint32_t &k) uint64_t 1245 nanojit/LIR.cpp uint64_t u64; uint64_t 1988 nanojit/LIR.cpp LIns* CseFilter::insImmq(uint64_t q) uint64_t 367 nanojit/LIR.h inline uint64_t imm64() const; uint64_t 902 nanojit/LIR.h uint64_t LIns::imm64() const { uint64_t 904 nanojit/LIR.h return (uint64_t(toLInsN64()->imm64_1) << 32) | uint32_t(toLInsN64()->imm64_0); uint64_t 909 nanojit/LIR.h uint64_t q; uint64_t 999 nanojit/LIR.h virtual LInsp insImmq(uint64_t imm) { uint64_t 1205 nanojit/LIR.h LIns* insImmq(uint64_t imm) { uint64_t 1265 nanojit/LIR.h static uint32_t hashImmq(uint64_t); uint64_t 1292 nanojit/LIR.h LInsp findImmq(uint64_t a, uint32_t &k); uint64_t 1315 nanojit/LIR.h LIns* insImmq(uint64_t q); uint64_t 1385 nanojit/LIR.h LInsp insImmq(uint64_t imm); uint64_t 305 nanojit/NativePPC.cpp void Assembler::asm_li64(Register r, uint64_t imm) { uint64_t 333 nanojit/NativePPC.cpp uint64_t q = value->imm64(); uint64_t 559 nanojit/NativePPC.cpp asm_li64(R0, uint64_t(targ)); uint64_t 1287 nanojit/NativePPC.cpp uint64_t imm = uintptr_t(target); uint64_t 1378 nanojit/NativePPC.cpp asm_li64(R2, uint64_t(native_table)); // R2 = table (5 instr) uint64_t 247 nanojit/NativePPC.h typedef uint64_t RegisterMask; uint64_t 290 nanojit/NativePPC.h void asm_li64(Register r, uint64_t imm); \ uint64_t 127 nanojit/NativeX64.cpp static inline int oplen(uint64_t op) { uint64_t 132 nanojit/NativeX64.cpp static inline uint64_t rexrb(uint64_t op, Register r, Register b) { uint64_t 134 nanojit/NativeX64.cpp uint64_t rex = ((op >> shift) & 255) | ((r&8)>>1) | ((b&8)>>3); uint64_t 139 nanojit/NativeX64.cpp static inline uint64_t rexrxb(uint64_t op, Register r, Register x, Register b) { uint64_t 141 nanojit/NativeX64.cpp uint64_t rex = ((op >> shift) & 255) | ((r&8)>>1) | ((x&8)>>2) | ((b&8)>>3); uint64_t 147 nanojit/NativeX64.cpp static inline uint64_t rexrb8(uint64_t op, Register r, Register b) { uint64_t 149 nanojit/NativeX64.cpp uint64_t rex = ((op >> shift) & 255) | ((r&8)>>1) | ((b&8)>>3); uint64_t 155 nanojit/NativeX64.cpp static inline uint64_t rexprb(uint64_t op, Register r, Register b) { uint64_t 157 nanojit/NativeX64.cpp uint64_t rex = ((op >> shift) & 255) | ((r&8)>>1) | ((b&8)>>3); uint64_t 164 nanojit/NativeX64.cpp static inline uint64_t mod_rr(uint64_t op, Register r, Register b) { uint64_t 165 nanojit/NativeX64.cpp return op | uint64_t((r&7)<<3 | (b&7))<<56; uint64_t 169 nanojit/NativeX64.cpp static inline uint64_t mod_rxb(uint64_t op, Register r, Register x, Register b) { uint64_t 170 nanojit/NativeX64.cpp return op | /*modrm*/uint64_t((r&7)<<3)<<48 | /*sib*/uint64_t((x&7)<<3|(b&7))<<56; uint64_t 173 nanojit/NativeX64.cpp static inline uint64_t mod_disp32(uint64_t op, Register r, Register b, int32_t d) { uint64_t 184 nanojit/NativeX64.cpp return op | int64_t(d)<<32 | uint64_t((r&7)<<3 | (b&7))<<24; uint64_t 191 nanojit/NativeX64.cpp void Assembler::emit(uint64_t op) { uint64_t 202 nanojit/NativeX64.cpp void Assembler::emit8(uint64_t op, int64_t v) { uint64_t 204 nanojit/NativeX64.cpp emit(op | uint64_t(v)<<56); uint64_t 207 nanojit/NativeX64.cpp void Assembler::emit_target8(size_t underrun, uint64_t op, NIns* target) { uint64_t 212 nanojit/NativeX64.cpp emit(op | uint64_t(offset)<<56); uint64_t 215 nanojit/NativeX64.cpp void Assembler::emit_target32(size_t underrun, uint64_t op, NIns* target) { uint64_t 225 nanojit/NativeX64.cpp emit(op | uint64_t(uint32_t(offset))<<32); uint64_t 229 nanojit/NativeX64.cpp void Assembler::emitrxb(uint64_t op, Register r, Register x, Register b) { uint64_t 234 nanojit/NativeX64.cpp void Assembler::emitrr(uint64_t op, Register r, Register b) { uint64_t 239 nanojit/NativeX64.cpp void Assembler::emitrr8(uint64_t op, Register r, Register b) { uint64_t 244 nanojit/NativeX64.cpp void Assembler::emitprr(uint64_t op, Register r, Register b) { uint64_t 249 nanojit/NativeX64.cpp void Assembler::emitrm8(uint64_t op, Register r, int32_t d, Register b) { uint64_t 254 nanojit/NativeX64.cpp void Assembler::emitrm(uint64_t op, Register r, int32_t d, Register b) { uint64_t 259 nanojit/NativeX64.cpp uint64_t Assembler::emit_disp32(uint64_t op, int32_t d) { uint64_t 275 nanojit/NativeX64.cpp void Assembler::emitrm_wide(uint64_t op, Register r, int32_t d, Register b) { uint64_t 282 nanojit/NativeX64.cpp void Assembler::emitprm(uint64_t op, Register r, int32_t d, Register b) { uint64_t 287 nanojit/NativeX64.cpp void Assembler::emitrr_imm(uint64_t op, Register r, Register b, int32_t imm) { uint64_t 295 nanojit/NativeX64.cpp void Assembler::emitr_imm64(uint64_t op, Register r, uint64_t imm64) { uint64_t 297 nanojit/NativeX64.cpp *((uint64_t*)(_nIns -= 8)) = imm64; uint64_t 302 nanojit/NativeX64.cpp void Assembler::emitrxb_imm(uint64_t op, Register r, Register x, Register b, int32_t imm) { uint64_t 311 nanojit/NativeX64.cpp void Assembler::emitr_imm8(uint64_t op, Register b, int32_t imm8) { uint64_t 313 nanojit/NativeX64.cpp op |= uint64_t(imm8)<<56 | uint64_t(b&7)<<48; // modrm is 2nd to last byte uint64_t 317 nanojit/NativeX64.cpp void Assembler::emitxm_abs(uint64_t op, Register r, int32_t addr32) uint64_t 322 nanojit/NativeX64.cpp op = op | uint64_t((r&7)<<3)<<48; // put rr[0:2] into mod/rm byte uint64_t 327 nanojit/NativeX64.cpp void Assembler::emitxm_rel(uint64_t op, Register r, NIns* addr64) uint64_t 364 nanojit/NativeX64.cpp #define U64 uint64_t uint64_t 879 nanojit/NativeX64.cpp asm_quad(RAX, (uint64_t)target); uint64_t 940 nanojit/NativeX64.cpp asm_quad(r, uint64_t(uint32_t(p->imm32()))); uint64_t 1541 nanojit/NativeX64.cpp void Assembler::asm_quad(Register r, uint64_t v) { uint64_t 1569 nanojit/NativeX64.cpp uint64_t v = ins->imm64(); uint64_t 1885 nanojit/NativeX64.cpp asm_quad(tablereg, (uint64_t)table); uint64_t 137 nanojit/NativeX64.h : uint64_t uint64_t 359 nanojit/NativeX64.h void emit(uint64_t op);\ uint64_t 360 nanojit/NativeX64.h void emit8(uint64_t op, int64_t val);\ uint64_t 361 nanojit/NativeX64.h void emit_target8(size_t underrun, uint64_t op, NIns* target);\ uint64_t 362 nanojit/NativeX64.h void emit_target32(size_t underrun, uint64_t op, NIns* target);\ uint64_t 363 nanojit/NativeX64.h void emitrr(uint64_t op, Register r, Register b);\ uint64_t 364 nanojit/NativeX64.h void emitrxb(uint64_t op, Register r, Register x, Register b);\ uint64_t 365 nanojit/NativeX64.h void emitxb(uint64_t op, Register x, Register b) { emitrxb(op, (Register)0, x, b); }\ uint64_t 366 nanojit/NativeX64.h void emitrr8(uint64_t op, Register r, Register b);\ uint64_t 367 nanojit/NativeX64.h void emitr(uint64_t op, Register b) { emitrr(op, (Register)0, b); }\ uint64_t 368 nanojit/NativeX64.h void emitr8(uint64_t op, Register b) { emitrr8(op, (Register)0, b); }\ uint64_t 369 nanojit/NativeX64.h void emitprr(uint64_t op, Register r, Register b);\ uint64_t 370 nanojit/NativeX64.h void emitrm8(uint64_t op, Register r, int32_t d, Register b);\ uint64_t 371 nanojit/NativeX64.h void emitrm(uint64_t op, Register r, int32_t d, Register b);\ uint64_t 372 nanojit/NativeX64.h void emitrm_wide(uint64_t op, Register r, int32_t d, Register b);\ uint64_t 373 nanojit/NativeX64.h uint64_t emit_disp32(uint64_t op, int32_t d);\ uint64_t 374 nanojit/NativeX64.h void emitprm(uint64_t op, Register r, int32_t d, Register b);\ uint64_t 375 nanojit/NativeX64.h void emitrr_imm(uint64_t op, Register r, Register b, int32_t imm);\ uint64_t 376 nanojit/NativeX64.h void emitr_imm64(uint64_t op, Register r, uint64_t imm);\ uint64_t 377 nanojit/NativeX64.h void emitrxb_imm(uint64_t op, Register r, Register x, Register b, int32_t imm);\ uint64_t 378 nanojit/NativeX64.h void emitr_imm(uint64_t op, Register r, int32_t imm) { emitrr_imm(op, (Register)0, r, imm); }\ uint64_t 379 nanojit/NativeX64.h void emitr_imm8(uint64_t op, Register b, int32_t imm8);\ uint64_t 380 nanojit/NativeX64.h void emitxm_abs(uint64_t op, Register r, int32_t addr32);\ uint64_t 381 nanojit/NativeX64.h void emitxm_rel(uint64_t op, Register r, NIns* addr64);\ uint64_t 384 nanojit/NativeX64.h void asm_quad(Register r, uint64_t v);\ uint64_t 504 nanojit/NativeX64.h void MOVQI(Register r, uint64_t u64);\ uint64_t 1381 nanojit/Nativei386.cpp const uint64_t q = ins->imm64(); uint64_t 654 shell/ByteArrayGlue.cpp if((uint64_t)len >= UINT32_T_MAX) //ByteArray APIs cannot handle files > 4GB uint64_t 92 shell/DataIO.cpp uint64_t u; uint64_t 215 shell/DataIO.cpp uint64_t u; uint64_t 58 shell/DataIO.h inline void FlipU64(uint64_t& value) uint64_t 116 shell/DataIO.h void ConvertU64(uint64_t& value) uint64_t 713 shell/DebugCLI.cpp if (f.valid() && ((uint64_t)file->length() < UINT32_T_MAX)) { //cannot handle files > 4GB uint64_t 64 shell/Profiler.h void sample(uint64_t /*time*/, Stringp /*stackTrace*/) {} uint64_t 65 shell/Profiler.h void allocationSample(uint64_t /*time*/, Stringp /*stackTrace*/, uint64_t /*allocId*/, Stringp /*type*/) {} uint64_t 66 shell/Profiler.h void deallocationSample(uint64_t /*time*/, uint64_t /*id*/) {} uint64_t 242 shell/ShellCore.cpp if (!f.valid() || (uint64_t) f.length() >= UINT32_T_MAX) uint64_t 481 shell/ShellCore.cpp bool isValid = f.valid() && ((uint64_t)f.length() < UINT32_T_MAX); //currently we cannot read files > 4GB uint64_t 90 shell/SystemClass.h uint64_t initialTime; uint64_t 62 vprof/vprof.cpp #define MAXINT64 int64_t(uint64_t(-1)>>1) uint64_t 384 vprof/vprof.cpp inline uint64_t _rdtsc() uint64_t 390 vprof/vprof.cpp inline uint64_t _rdtsc() uint64_t 394 vprof/vprof.cpp return (uint64_t(hi) << 32) | lo; uint64_t 398 vprof/vprof.cpp inline uint64_t _rdtsc() { return 0; } uint64_t 402 vprof/vprof.cpp static uint64_t _tprof_before = 0; uint64_t 405 vprof/vprof.cpp uint64_t now = _rdtsc(); uint64_t 406 vprof/vprof.cpp uint64_t v = _tprof_before ? now-_tprof_before : 0; uint64_t 174 vprof/vprof.h uint64_t v = _tprof_time();\ uint64_t 189 vprof/vprof.h uint64_t v = _tprof_time();\