diff options
119 files changed, 6215 insertions, 2926 deletions
diff --git a/V8Binding/v8/ChangeLog b/V8Binding/v8/ChangeLog index ac364f7..83ebc02 100644 --- a/V8Binding/v8/ChangeLog +++ b/V8Binding/v8/ChangeLog @@ -1,3 +1,30 @@ +2009-07-13: Version 1.2.14 + + Added separate paged heap space for global property cells and + avoid updating the write barrier when storing into them. + + Improved peep-hole optimization on ARM platforms by not emitting + unnecessary debug information. + + Re-enabled ICs for loads and calls that skip a global object + during lookup through the prototype chain. + + Allowed access through global proxies to use ICs. + + Fixed issue 401. + + +2009-07-09: Version 1.2.13 + + Fixed issue 397, issue 398, and issue 399. + + Added support for breakpoint groups. + + Fixed bugs introduced with the new global object representation. + + Fixed a few bugs in the ARM code generator. + + 2009-07-06: Version 1.2.12 Added stack traces collection to Error objects accessible through diff --git a/V8Binding/v8/SConstruct b/V8Binding/v8/SConstruct index 0baf71b..78b050d 100644 --- a/V8Binding/v8/SConstruct +++ b/V8Binding/v8/SConstruct @@ -95,7 +95,12 @@ ANDROID_LINKFLAGS = ['-nostdlib', LIBRARY_FLAGS = { 'all': { 'CPPDEFINES': ['ENABLE_LOGGING_AND_PROFILING'], - 'CPPPATH': [join(root_dir, 'src')] + 'CPPPATH': [join(root_dir, 'src')], + 'regexp:native': { + 'arch:ia32' : { + 'CPPDEFINES': ['V8_NATIVE_REGEXP'] + } + } }, 'gcc': { 'all': { @@ -167,6 +172,7 @@ LIBRARY_FLAGS = { 'CPPDEFINES': ['V8_TARGET_ARCH_ARM'] }, 'arch:x64': { + 'CCFLAGS': ['-fno-strict-aliasing'], 'CPPDEFINES': ['V8_TARGET_ARCH_X64'] }, 'prof:oprofile': { @@ -546,6 +552,11 @@ SIMPLE_OPTIONS = { 'default': ARCH_GUESS, 'help': 'the architecture to build for (' + ARCH_GUESS + ')' }, + 'regexp': { + 'values': ['native', 'interpreted'], + 'default': 'native', + 'help': 'Whether to use native or interpreted regexp implementation' + }, 'snapshot': { 'values': ['on', 'off', 'nobuild'], 'default': 'off', @@ -677,6 +688,8 @@ def VerifyOptions(env): return False if not IsLegal(env, 'sample', ["shell", "process"]): return False + if not IsLegal(env, 'regexp', ["native", "interpreted"]): + return False if env['os'] == 'win32' and env['library'] == 'shared' and env['prof'] == 'on': Abort("Profiling on windows only supported for static library.") if env['prof'] == 'oprofile' and env['os'] != 'linux': diff --git a/V8Binding/v8/benchmarks/README.txt b/V8Binding/v8/benchmarks/README.txt index 561e88b..eb759cc 100644 --- a/V8Binding/v8/benchmarks/README.txt +++ b/V8Binding/v8/benchmarks/README.txt @@ -57,4 +57,7 @@ of the benchmark. Changes from Version 4 to Version 5 =================================== -Removed duplicate line in random seed code. +Removed duplicate line in random seed code, and changed the name of +the Object.prototype.inherits function in the DeltaBlue benchmark to +inheritsFrom to avoid name clashes when running in Chromium with +extensions enabled. diff --git a/V8Binding/v8/benchmarks/deltablue.js b/V8Binding/v8/benchmarks/deltablue.js index 253046f..7e25d2e 100644 --- a/V8Binding/v8/benchmarks/deltablue.js +++ b/V8Binding/v8/benchmarks/deltablue.js @@ -46,7 +46,7 @@ var DeltaBlue = new BenchmarkSuite('DeltaBlue', 71104, [ /* --- O b j e c t M o d e l --- */ -Object.prototype.inherits = function (shuper) { +Object.prototype.inheritsFrom = function (shuper) { function Inheriter() { } Inheriter.prototype = shuper.prototype; this.prototype = new Inheriter(); @@ -216,7 +216,7 @@ function UnaryConstraint(v, strength) { this.addConstraint(); } -UnaryConstraint.inherits(Constraint); +UnaryConstraint.inheritsFrom(Constraint); /** * Adds this constraint to the constraint graph @@ -294,7 +294,7 @@ function StayConstraint(v, str) { StayConstraint.superConstructor.call(this, v, str); } -StayConstraint.inherits(UnaryConstraint); +StayConstraint.inheritsFrom(UnaryConstraint); StayConstraint.prototype.execute = function () { // Stay constraints do nothing @@ -312,7 +312,7 @@ function EditConstraint(v, str) { EditConstraint.superConstructor.call(this, v, str); } -EditConstraint.inherits(UnaryConstraint); +EditConstraint.inheritsFrom(UnaryConstraint); /** * Edits indicate that a variable is to be changed by imperative code. @@ -346,7 +346,7 @@ function BinaryConstraint(var1, var2, strength) { this.addConstraint(); } -BinaryConstraint.inherits(Constraint); +BinaryConstraint.inheritsFrom(Constraint); /** * Decides if this constratint can be satisfied and which way it @@ -459,7 +459,7 @@ function ScaleConstraint(src, scale, offset, dest, strength) { ScaleConstraint.superConstructor.call(this, src, dest, strength); } -ScaleConstraint.inherits(BinaryConstraint); +ScaleConstraint.inheritsFrom(BinaryConstraint); /** * Adds this constraint to the constraint graph. @@ -515,7 +515,7 @@ function EqualityConstraint(var1, var2, strength) { EqualityConstraint.superConstructor.call(this, var1, var2, strength); } -EqualityConstraint.inherits(BinaryConstraint); +EqualityConstraint.inheritsFrom(BinaryConstraint); /** * Enforce this constraint. Assume that it is satisfied. diff --git a/V8Binding/v8/benchmarks/revisions.html b/V8Binding/v8/benchmarks/revisions.html index b86c876..99d7be4 100644 --- a/V8Binding/v8/benchmarks/revisions.html +++ b/V8Binding/v8/benchmarks/revisions.html @@ -22,7 +22,10 @@ the benchmark suite. <div class="subtitle"><h3>Version 5 (<a href="http://v8.googlecode.com/svn/data/benchmarks/v5/run.html">link</a>)</h3></div> -<p>Removed a duplicate line in the base random seed code. +<p>Removed duplicate line in random seed code, and changed the name of +the Object.prototype.inherits function in the DeltaBlue benchmark to +inheritsFrom to avoid name clashes when running in Chromium with +extensions enabled. </p> <div class="subtitle"><h3>Version 4 (<a href="http://v8.googlecode.com/svn/data/benchmarks/v4/run.html">link</a>)</h3></div> diff --git a/V8Binding/v8/src/api.cc b/V8Binding/v8/src/api.cc index b9e0cec..145fa9d 100644 --- a/V8Binding/v8/src/api.cc +++ b/V8Binding/v8/src/api.cc @@ -1085,8 +1085,9 @@ Local<Script> Script::Compile(v8::Handle<String> source, // handle it if it turns out not to be in release mode. ASSERT(pre_data == NULL || pre_data->SanityCheck()); // If the pre-data isn't sane we simply ignore it - if (pre_data != NULL && !pre_data->SanityCheck()) + if (pre_data != NULL && !pre_data->SanityCheck()) { pre_data = NULL; + } i::Handle<i::JSFunction> boilerplate = i::Compiler::Compile(str, name_obj, line_offset, diff --git a/V8Binding/v8/src/api.h b/V8Binding/v8/src/api.h index 85b13ec..f1057a8 100644 --- a/V8Binding/v8/src/api.h +++ b/V8Binding/v8/src/api.h @@ -244,9 +244,10 @@ v8::internal::Handle<T> v8::internal::Handle<T>::EscapeFrom( // Implementations of ToLocal -#define MAKE_TO_LOCAL(Name, From, To) \ +#define MAKE_TO_LOCAL(Name, From, To) \ Local<v8::To> Utils::Name(v8::internal::Handle<v8::internal::From> obj) { \ - return Local<To>(reinterpret_cast<To*>(obj.location())); \ + ASSERT(!obj->IsTheHole()); \ + return Local<To>(reinterpret_cast<To*>(obj.location())); \ } MAKE_TO_LOCAL(ToLocal, Context, Context) diff --git a/V8Binding/v8/src/apinatives.js b/V8Binding/v8/src/apinatives.js index 2981eec..6451e62 100644 --- a/V8Binding/v8/src/apinatives.js +++ b/V8Binding/v8/src/apinatives.js @@ -51,6 +51,7 @@ function Instantiate(data, name) { var Constructor = %GetTemplateField(data, kApiConstructorOffset); var result = Constructor ? new (Instantiate(Constructor))() : {}; ConfigureTemplateInstance(result, data); + result = %ToFastProperties(result); return result; default: throw 'Unknown API tag <' + tag + '>'; diff --git a/V8Binding/v8/src/arm/assembler-arm.cc b/V8Binding/v8/src/arm/assembler-arm.cc index a393ac0..3ed99f9 100644 --- a/V8Binding/v8/src/arm/assembler-arm.cc +++ b/V8Binding/v8/src/arm/assembler-arm.cc @@ -697,6 +697,7 @@ void Assembler::bl(int branch_offset, Condition cond) { void Assembler::blx(int branch_offset) { // v5 and above + WriteRecordedPositions(); ASSERT((branch_offset & 1) == 0); int h = ((branch_offset & 2) >> 1)*B24; int imm24 = branch_offset >> 2; @@ -706,12 +707,14 @@ void Assembler::blx(int branch_offset) { // v5 and above void Assembler::blx(Register target, Condition cond) { // v5 and above + WriteRecordedPositions(); ASSERT(!target.is(pc)); emit(cond | B24 | B21 | 15*B16 | 15*B12 | 15*B8 | 3*B4 | target.code()); } void Assembler::bx(Register target, Condition cond) { // v5 and above, plus v4t + WriteRecordedPositions(); ASSERT(!target.is(pc)); // use of pc is actually allowed, but discouraged emit(cond | B24 | B21 | 15*B16 | 15*B12 | 15*B8 | B4 | target.code()); } @@ -810,6 +813,9 @@ void Assembler::orr(Register dst, Register src1, const Operand& src2, void Assembler::mov(Register dst, const Operand& src, SBit s, Condition cond) { + if (dst.is(pc)) { + WriteRecordedPositions(); + } addrmod1(cond | 13*B21 | s, r0, dst, src); } @@ -937,6 +943,9 @@ void Assembler::msr(SRegisterFieldMask fields, const Operand& src, // Load/Store instructions void Assembler::ldr(Register dst, const MemOperand& src, Condition cond) { + if (dst.is(pc)) { + WriteRecordedPositions(); + } addrmod2(cond | B26 | L, dst, src); // Eliminate pattern: push(r), pop(r) @@ -1274,7 +1283,6 @@ void Assembler::RecordPosition(int pos) { if (pos == RelocInfo::kNoPosition) return; ASSERT(pos >= 0); current_position_ = pos; - WriteRecordedPositions(); } @@ -1282,7 +1290,6 @@ void Assembler::RecordStatementPosition(int pos) { if (pos == RelocInfo::kNoPosition) return; ASSERT(pos >= 0); current_statement_position_ = pos; - WriteRecordedPositions(); } diff --git a/V8Binding/v8/src/arm/disasm-arm.cc b/V8Binding/v8/src/arm/disasm-arm.cc index c55a958..d193ab9 100644 --- a/V8Binding/v8/src/arm/disasm-arm.cc +++ b/V8Binding/v8/src/arm/disasm-arm.cc @@ -506,17 +506,25 @@ void Decoder::DecodeType01(Instr* instr) { // multiply instructions if (instr->Bit(23) == 0) { if (instr->Bit(21) == 0) { - // Mul calls it Rd. Everyone else calls it Rn. + // The MUL instruction description (A 4.1.33) refers to Rd as being + // the destination for the operation, but it confusingly uses the + // Rn field to encode it. Format(instr, "mul'cond's 'rn, 'rm, 'rs"); } else { - // In the manual the order is rd, rm, rs, rn. But mla swaps the - // positions of rn and rd in the encoding. + // The MLA instruction description (A 4.1.28) refers to the order + // of registers as "Rd, Rm, Rs, Rn". But confusingly it uses the + // Rn field to encode the Rd register and the Rd field to encode + // the Rn register. Format(instr, "mla'cond's 'rn, 'rm, 'rs, 'rd"); } } else { - // In the manual the order is RdHi, RdLo, Rm, Rs. - // RdHi is what other instructions call Rn and RdLo is Rd. - Format(instr, "'um'al'cond's 'rn, 'rd, 'rm, 'rs"); + // The signed/long multiply instructions use the terms RdHi and RdLo + // when referring to the target registers. They are mapped to the Rn + // and Rd fields as follows: + // RdLo == Rd field + // RdHi == Rn field + // The order of registers is: <RdLo>, <RdHi>, <Rm>, <Rs> + Format(instr, "'um'al'cond's 'rd, 'rn, 'rm, 'rs"); } } else { Unknown(instr); // not used by V8 diff --git a/V8Binding/v8/src/arm/ic-arm.cc b/V8Binding/v8/src/arm/ic-arm.cc index 07c767e..b436760 100644 --- a/V8Binding/v8/src/arm/ic-arm.cc +++ b/V8Binding/v8/src/arm/ic-arm.cc @@ -77,6 +77,13 @@ static void GenerateDictionaryLoad(MacroAssembler* masm, __ cmp(r3, Operand(JS_GLOBAL_PROXY_TYPE)); __ b(eq, miss); + // Possible work-around for http://crbug.com/16276. + // See also: http://codereview.chromium.org/155418. + __ cmp(r3, Operand(JS_GLOBAL_OBJECT_TYPE)); + __ b(eq, miss); + __ cmp(r3, Operand(JS_BUILTINS_OBJECT_TYPE)); + __ b(eq, miss); + // Check that the properties array is a dictionary. __ ldr(t0, FieldMemOperand(t1, JSObject::kPropertiesOffset)); __ ldr(r3, FieldMemOperand(t0, HeapObject::kMapOffset)); @@ -192,11 +199,14 @@ void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) { // -- [sp] : receiver // ----------------------------------- - // NOTE: Right now, this code always misses on ARM which is - // sub-optimal. We should port the fast case code from IA-32. + Label miss; - Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Miss)); - __ Jump(ic, RelocInfo::CODE_TARGET); + // Load receiver. + __ ldr(r0, MemOperand(sp, 0)); + + StubCompiler::GenerateLoadFunctionPrototype(masm, r0, r1, r3, &miss); + __ bind(&miss); + StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC); } diff --git a/V8Binding/v8/src/arm/macro-assembler-arm.cc b/V8Binding/v8/src/arm/macro-assembler-arm.cc index 3d6b8cb..47e2749 100644 --- a/V8Binding/v8/src/arm/macro-assembler-arm.cc +++ b/V8Binding/v8/src/arm/macro-assembler-arm.cc @@ -290,11 +290,24 @@ void MacroAssembler::EnterExitFrame(StackFrame::Type type) { // Align the stack at this point. After this point we have 5 pushes, // so in fact we have to unalign here! See also the assert on the // alignment immediately below. - if (OS::ActivationFrameAlignment() != kPointerSize) { +#if defined(V8_HOST_ARCH_ARM) + // Running on the real platform. Use the alignment as mandated by the local + // environment. + // Note: This will break if we ever start generating snapshots on one ARM + // platform for another ARM platform with a different alignment. + int activation_frame_alignment = OS::ActivationFrameAlignment(); +#else // defined(V8_HOST_ARCH_ARM) + // If we are using the simulator then we should always align to the expected + // alignment. As the simulator is used to generate snapshots we do not know + // if the target platform will need alignment, so we will always align at + // this point here. + int activation_frame_alignment = 2 * kPointerSize; +#endif // defined(V8_HOST_ARCH_ARM) + if (activation_frame_alignment != kPointerSize) { // This code needs to be made more general if this assert doesn't hold. - ASSERT(OS::ActivationFrameAlignment() == 2 * kPointerSize); + ASSERT(activation_frame_alignment == 2 * kPointerSize); mov(r7, Operand(Smi::FromInt(0))); - tst(sp, Operand(OS::ActivationFrameAlignment() - 1)); + tst(sp, Operand(activation_frame_alignment - 1)); push(r7, eq); // Conditional push instruction. } diff --git a/V8Binding/v8/src/arm/simulator-arm.cc b/V8Binding/v8/src/arm/simulator-arm.cc index 1b42919..e5500aa 100644 --- a/V8Binding/v8/src/arm/simulator-arm.cc +++ b/V8Binding/v8/src/arm/simulator-arm.cc @@ -1080,25 +1080,44 @@ void Simulator::DecodeType01(Instr* instr) { // multiply instruction or extra loads and stores if (instr->Bits(7, 4) == 9) { if (instr->Bit(24) == 0) { - // Multiply instructions have Rd in a funny place. - int rd = instr->RnField(); + // Raw field decoding here. Multiply instructions have their Rd in + // funny places. + int rn = instr->RnField(); int rm = instr->RmField(); int rs = instr->RsField(); int32_t rs_val = get_register(rs); int32_t rm_val = get_register(rm); if (instr->Bit(23) == 0) { if (instr->Bit(21) == 0) { + // The MUL instruction description (A 4.1.33) refers to Rd as being + // the destination for the operation, but it confusingly uses the + // Rn field to encode it. // Format(instr, "mul'cond's 'rn, 'rm, 'rs"); + int rd = rn; // Remap the rn field to the Rd register. int32_t alu_out = rm_val * rs_val; set_register(rd, alu_out); if (instr->HasS()) { SetNZFlags(alu_out); } } else { - UNIMPLEMENTED(); // mla is not used by V8. + // The MLA instruction description (A 4.1.28) refers to the order + // of registers as "Rd, Rm, Rs, Rn". But confusingly it uses the + // Rn field to encode the Rd register and the Rd field to encode + // the Rn register. + Format(instr, "mla'cond's 'rn, 'rm, 'rs, 'rd"); } } else { - // Format(instr, "'um'al'cond's 'rn, 'rd, 'rs, 'rm"); + // The signed/long multiply instructions use the terms RdHi and RdLo + // when referring to the target registers. They are mapped to the Rn + // and Rd fields as follows: + // RdLo == Rd + // RdHi == Rn (This is confusingly stored in variable rd here + // because the mul instruction from above uses the + // Rn field to encode the Rd register. Good luck figuring + // this out without reading the ARM instruction manual + // at a very detailed level.) + // Format(instr, "'um'al'cond's 'rd, 'rn, 'rs, 'rm"); + int rd_hi = rn; // Remap the rn field to the RdHi register. int rd_lo = instr->RdField(); int32_t hi_res = 0; int32_t lo_res = 0; @@ -1117,7 +1136,7 @@ void Simulator::DecodeType01(Instr* instr) { lo_res = static_cast<int32_t>(result & 0xffffffff); } set_register(rd_lo, lo_res); - set_register(rd, hi_res); + set_register(rd_hi, hi_res); if (instr->HasS()) { UNIMPLEMENTED(); } diff --git a/V8Binding/v8/src/arm/stub-cache-arm.cc b/V8Binding/v8/src/arm/stub-cache-arm.cc index e3e5502..6d9ace8 100644 --- a/V8Binding/v8/src/arm/stub-cache-arm.cc +++ b/V8Binding/v8/src/arm/stub-cache-arm.cc @@ -171,110 +171,6 @@ void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, } -void StubCompiler::GenerateLoadField(MacroAssembler* masm, - JSObject* object, - JSObject* holder, - Register receiver, - Register scratch1, - Register scratch2, - int index, - Label* miss_label) { - // Check that the receiver isn't a smi. - __ tst(receiver, Operand(kSmiTagMask)); - __ b(eq, miss_label); - - // Check that the maps haven't changed. - Register reg = - masm->CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label); - GenerateFastPropertyLoad(masm, r0, reg, holder, index); - __ Ret(); -} - - -void StubCompiler::GenerateLoadConstant(MacroAssembler* masm, - JSObject* object, - JSObject* holder, - Register receiver, - Register scratch1, - Register scratch2, - Object* value, - Label* miss_label) { - // Check that the receiver isn't a smi. - __ tst(receiver, Operand(kSmiTagMask)); - __ b(eq, miss_label); - - // Check that the maps haven't changed. - Register reg = - masm->CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label); - - // Return the constant value. - __ mov(r0, Operand(Handle<Object>(value))); - __ Ret(); -} - - -void StubCompiler::GenerateLoadCallback(MacroAssembler* masm, - JSObject* object, - JSObject* holder, - Register receiver, - Register name, - Register scratch1, - Register scratch2, - AccessorInfo* callback, - Label* miss_label) { - // Check that the receiver isn't a smi. - __ tst(receiver, Operand(kSmiTagMask)); - __ b(eq, miss_label); - - // Check that the maps haven't changed. - Register reg = - masm->CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label); - - // Push the arguments on the JS stack of the caller. - __ push(receiver); // receiver - __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback data - __ push(ip); - __ push(name); // name - __ push(reg); // holder - - // Do tail-call to the runtime system. - ExternalReference load_callback_property = - ExternalReference(IC_Utility(IC::kLoadCallbackProperty)); - __ TailCallRuntime(load_callback_property, 4); -} - - -void StubCompiler::GenerateLoadInterceptor(MacroAssembler* masm, - JSObject* object, - JSObject* holder, - Smi* lookup_hint, - Register receiver, - Register name, - Register scratch1, - Register scratch2, - Label* miss_label) { - // Check that the receiver isn't a smi. - __ tst(receiver, Operand(kSmiTagMask)); - __ b(eq, miss_label); - - // Check that the maps haven't changed. - Register reg = - masm->CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label); - - // Push the arguments on the JS stack of the caller. - __ push(receiver); // receiver - __ push(reg); // holder - __ push(name); // name - __ mov(scratch1, Operand(lookup_hint)); - __ push(scratch1); - - // Do tail-call to the runtime system. - ExternalReference load_ic_property = - ExternalReference(IC_Utility(IC::kLoadInterceptorProperty)); - __ TailCallRuntime(load_ic_property, 4); -} - - void StubCompiler::GenerateLoadArrayLength(MacroAssembler* masm, Register receiver, Register scratch, @@ -351,6 +247,17 @@ void StubCompiler::GenerateLoadStringLength2(MacroAssembler* masm, } +void StubCompiler::GenerateLoadFunctionPrototype(MacroAssembler* masm, + Register receiver, + Register scratch1, + Register scratch2, + Label* miss_label) { + __ TryGetFunctionPrototype(receiver, scratch1, scratch2, miss_label); + __ mov(r0, scratch1); + __ Ret(); +} + + // Generate StoreField code, value is passed in r0 register. // After executing generated code, the receiver_reg and name_reg // may be clobbered. @@ -462,6 +369,147 @@ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { #define __ ACCESS_MASM(masm()) +Register StubCompiler::CheckPrototypes(JSObject* object, + Register object_reg, + JSObject* holder, + Register holder_reg, + Register scratch, + String* name, + Label* miss) { + // Check that the maps haven't changed. + Register result = + masm()->CheckMaps(object, object_reg, holder, holder_reg, scratch, miss); + + // If we've skipped any global objects, it's not enough to verify + // that their maps haven't changed. + while (object != holder) { + if (object->IsGlobalObject()) { + GlobalObject* global = GlobalObject::cast(object); + Object* probe = global->EnsurePropertyCell(name); + if (probe->IsFailure()) { + set_failure(Failure::cast(probe)); + return result; + } + JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe); + ASSERT(cell->value()->IsTheHole()); + __ mov(scratch, Operand(Handle<Object>(cell))); + __ ldr(scratch, + FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset)); + __ cmp(scratch, Operand(Factory::the_hole_value())); + __ b(ne, miss); + } + object = JSObject::cast(object->GetPrototype()); + } + + // Return the register containin the holder. + return result; +} + + +void StubCompiler::GenerateLoadField(JSObject* object, + JSObject* holder, + Register receiver, + Register scratch1, + Register scratch2, + int index, + String* name, + Label* miss) { + // Check that the receiver isn't a smi. + __ tst(receiver, Operand(kSmiTagMask)); + __ b(eq, miss); + + // Check that the maps haven't changed. + Register reg = + CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss); + GenerateFastPropertyLoad(masm(), r0, reg, holder, index); + __ Ret(); +} + + +void StubCompiler::GenerateLoadConstant(JSObject* object, + JSObject* holder, + Register receiver, + Register scratch1, + Register scratch2, + Object* value, + String* name, + Label* miss) { + // Check that the receiver isn't a smi. + __ tst(receiver, Operand(kSmiTagMask)); + __ b(eq, miss); + + // Check that the maps haven't changed. + Register reg = + CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss); + + // Return the constant value. + __ mov(r0, Operand(Handle<Object>(value))); + __ Ret(); +} + + +void StubCompiler::GenerateLoadCallback(JSObject* object, + JSObject* holder, + Register receiver, + Register name_reg, + Register scratch1, + Register scratch2, + AccessorInfo* callback, + String* name, + Label* miss) { + // Check that the receiver isn't a smi. + __ tst(receiver, Operand(kSmiTagMask)); + __ b(eq, miss); + + // Check that the maps haven't changed. + Register reg = + CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss); + + // Push the arguments on the JS stack of the caller. + __ push(receiver); // receiver + __ mov(ip, Operand(Handle<AccessorInfo>(callback))); // callback data + __ push(ip); + __ push(name_reg); // name + __ push(reg); // holder + + // Do tail-call to the runtime system. + ExternalReference load_callback_property = + ExternalReference(IC_Utility(IC::kLoadCallbackProperty)); + __ TailCallRuntime(load_callback_property, 4); +} + + +void StubCompiler::GenerateLoadInterceptor(JSObject* object, + JSObject* holder, + Smi* lookup_hint, + Register receiver, + Register name_reg, + Register scratch1, + Register scratch2, + String* name, + Label* miss) { + // Check that the receiver isn't a smi. + __ tst(receiver, Operand(kSmiTagMask)); + __ b(eq, miss); + + // Check that the maps haven't changed. + Register reg = + CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss); + + // Push the arguments on the JS stack of the caller. + __ push(receiver); // receiver + __ push(reg); // holder + __ push(name_reg); // name + __ mov(scratch1, Operand(lookup_hint)); + __ push(scratch1); + + // Do tail-call to the runtime system. + ExternalReference load_ic_property = + ExternalReference(IC_Utility(IC::kLoadInterceptorProperty)); + __ TailCallRuntime(load_ic_property, 4); +} + + Object* StubCompiler::CompileLazyCompile(Code::Flags flags) { // ----------- S t a t e ------------- // -- r1: function @@ -513,7 +561,7 @@ Object* CallStubCompiler::CompileCallField(Object* object, // Do the right check and compute the holder register. Register reg = - masm()->CheckMaps(JSObject::cast(object), r0, holder, r3, r2, &miss); + CheckPrototypes(JSObject::cast(object), r0, holder, r3, r2, name, &miss); GenerateFastPropertyLoad(masm(), r1, reg, holder, index); // Check that the function really is a function. @@ -546,6 +594,7 @@ Object* CallStubCompiler::CompileCallField(Object* object, Object* CallStubCompiler::CompileCallConstant(Object* object, JSObject* holder, JSFunction* function, + String* name, CheckType check) { // ----------- S t a t e ------------- // -- lr: return address @@ -569,7 +618,7 @@ Object* CallStubCompiler::CompileCallConstant(Object* object, switch (check) { case RECEIVER_MAP_CHECK: // Check that the maps haven't changed. - __ CheckMaps(JSObject::cast(object), r1, holder, r3, r2, &miss); + CheckPrototypes(JSObject::cast(object), r1, holder, r3, r2, name, &miss); // Patch the receiver on the stack with the global proxy if // necessary. @@ -587,8 +636,8 @@ Object* CallStubCompiler::CompileCallConstant(Object* object, GenerateLoadGlobalFunctionPrototype(masm(), Context::STRING_FUNCTION_INDEX, r2); - __ CheckMaps(JSObject::cast(object->GetPrototype()), - r2, holder, r3, r1, &miss); + CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3, + r1, name, &miss); break; case NUMBER_CHECK: { @@ -603,8 +652,8 @@ Object* CallStubCompiler::CompileCallConstant(Object* object, GenerateLoadGlobalFunctionPrototype(masm(), Context::NUMBER_FUNCTION_INDEX, r2); - __ CheckMaps(JSObject::cast(object->GetPrototype()), - r2, holder, r3, r1, &miss); + CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3, + r1, name, &miss); break; } @@ -620,13 +669,13 @@ Object* CallStubCompiler::CompileCallConstant(Object* object, GenerateLoadGlobalFunctionPrototype(masm(), Context::BOOLEAN_FUNCTION_INDEX, r2); - __ CheckMaps(JSObject::cast(object->GetPrototype()), - r2, holder, r3, r1, &miss); + CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3, + r1, name, &miss); break; } case JSARRAY_HAS_FAST_ELEMENTS_CHECK: - __ CheckMaps(JSObject::cast(object), r1, holder, r3, r2, &miss); + CheckPrototypes(JSObject::cast(object), r1, holder, r3, r2, name, &miss); // Make sure object->elements()->map() != Heap::hash_table_map() // Get the elements array of the object. __ ldr(r3, FieldMemOperand(r1, JSObject::kElementsOffset)); @@ -685,7 +734,8 @@ Object* CallStubCompiler::CompileCallInterceptor(Object* object, } -Object* CallStubCompiler::CompileCallGlobal(GlobalObject* object, +Object* CallStubCompiler::CompileCallGlobal(JSObject* object, + GlobalObject* holder, JSGlobalPropertyCell* cell, JSFunction* function, String* name) { @@ -699,11 +749,19 @@ Object* CallStubCompiler::CompileCallGlobal(GlobalObject* object, // Get the number of arguments. const int argc = arguments().immediate(); - // Check that the map of the global has not changed. - __ ldr(r2, MemOperand(sp, argc * kPointerSize)); - __ ldr(r3, FieldMemOperand(r2, HeapObject::kMapOffset)); - __ cmp(r3, Operand(Handle<Map>(object->map()))); - __ b(ne, &miss); + // Get the receiver from the stack. + __ ldr(r0, MemOperand(sp, argc * kPointerSize)); + + // If the object is the holder then we know that it's a global + // object which can only happen for contextual calls. In this case, + // the receiver cannot be a smi. + if (object != holder) { + __ tst(r0, Operand(kSmiTagMask)); + __ b(eq, &miss); + } + + // Check that the maps haven't changed. + CheckPrototypes(object, r0, holder, r3, r2, name, &miss); // Get the value from the cell. __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell))); @@ -715,8 +773,10 @@ Object* CallStubCompiler::CompileCallGlobal(GlobalObject* object, // Patch the receiver on the stack with the global proxy if // necessary. - __ ldr(r3, FieldMemOperand(r2, GlobalObject::kGlobalReceiverOffset)); - __ str(r3, MemOperand(sp, argc * kPointerSize)); + if (object->IsGlobalObject()) { + __ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset)); + __ str(r3, MemOperand(sp, argc * kPointerSize)); + } // Setup the context (function already in r1). __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset)); @@ -902,8 +962,6 @@ Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, // Store the value in the cell. __ mov(r2, Operand(Handle<JSGlobalPropertyCell>(cell))); __ str(r0, FieldMemOperand(r2, JSGlobalPropertyCell::kValueOffset)); - __ mov(r1, Operand(JSGlobalPropertyCell::kValueOffset)); - __ RecordWrite(r2, r1, r3); __ Ret(); @@ -932,7 +990,7 @@ Object* LoadStubCompiler::CompileLoadField(JSObject* object, __ ldr(r0, MemOperand(sp, 0)); - GenerateLoadField(masm(), object, holder, r0, r3, r1, index, &miss); + GenerateLoadField(object, holder, r0, r3, r1, index, name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::LOAD_IC); @@ -953,7 +1011,7 @@ Object* LoadStubCompiler::CompileLoadCallback(JSObject* object, Label miss; __ ldr(r0, MemOperand(sp, 0)); - GenerateLoadCallback(masm(), object, holder, r0, r2, r3, r1, callback, &miss); + GenerateLoadCallback(object, holder, r0, r2, r3, r1, callback, name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::LOAD_IC); @@ -975,7 +1033,7 @@ Object* LoadStubCompiler::CompileLoadConstant(JSObject* object, __ ldr(r0, MemOperand(sp, 0)); - GenerateLoadConstant(masm(), object, holder, r0, r3, r1, value, &miss); + GenerateLoadConstant(object, holder, r0, r3, r1, value, name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::LOAD_IC); @@ -996,14 +1054,14 @@ Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* object, __ ldr(r0, MemOperand(sp, 0)); - GenerateLoadInterceptor(masm(), - object, + GenerateLoadInterceptor(object, holder, holder->InterceptorPropertyLookupHint(name), r0, r2, r3, r1, + name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::LOAD_IC); @@ -1013,7 +1071,8 @@ Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* object, } -Object* LoadStubCompiler::CompileLoadGlobal(GlobalObject* object, +Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object, + GlobalObject* holder, JSGlobalPropertyCell* cell, String* name, bool is_dont_delete) { @@ -1026,11 +1085,19 @@ Object* LoadStubCompiler::CompileLoadGlobal(GlobalObject* object, __ IncrementCounter(&Counters::named_load_global_inline, 1, r1, r3); - // Check that the map of the global has not changed. + // Get the receiver from the stack. __ ldr(r1, MemOperand(sp, 0 * kPointerSize)); - __ ldr(r3, FieldMemOperand(r1, HeapObject::kMapOffset)); - __ cmp(r3, Operand(Handle<Map>(object->map()))); - __ b(ne, &miss); + + // If the object is the holder then we know that it's a global + // object which can only happen for contextual calls. In this case, + // the receiver cannot be a smi. + if (object != holder) { + __ tst(r1, Operand(kSmiTagMask)); + __ b(eq, &miss); + } + + // Check that the map of the global has not changed. + CheckPrototypes(object, r1, holder, r3, r0, name, &miss); // Get the value from the cell. __ mov(r3, Operand(Handle<JSGlobalPropertyCell>(cell))); @@ -1073,7 +1140,7 @@ Object* KeyedLoadStubCompiler::CompileLoadField(String* name, __ cmp(r2, Operand(Handle<String>(name))); __ b(ne, &miss); - GenerateLoadField(masm(), receiver, holder, r0, r3, r1, index, &miss); + GenerateLoadField(receiver, holder, r0, r3, r1, index, name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); @@ -1098,8 +1165,7 @@ Object* KeyedLoadStubCompiler::CompileLoadCallback(String* name, __ cmp(r2, Operand(Handle<String>(name))); __ b(ne, &miss); - GenerateLoadCallback(masm(), receiver, holder, r0, r2, r3, - r1, callback, &miss); + GenerateLoadCallback(receiver, holder, r0, r2, r3, r1, callback, name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); @@ -1125,7 +1191,7 @@ Object* KeyedLoadStubCompiler::CompileLoadConstant(String* name, __ cmp(r2, Operand(Handle<String>(name))); __ b(ne, &miss); - GenerateLoadConstant(masm(), receiver, holder, r0, r3, r1, value, &miss); + GenerateLoadConstant(receiver, holder, r0, r3, r1, value, name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); @@ -1151,14 +1217,14 @@ Object* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, __ cmp(r2, Operand(Handle<String>(name))); __ b(ne, &miss); - GenerateLoadInterceptor(masm(), - receiver, + GenerateLoadInterceptor(receiver, holder, Smi::FromInt(JSObject::kLookupInHolder), r0, r2, r3, r1, + name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); diff --git a/V8Binding/v8/src/bootstrapper.cc b/V8Binding/v8/src/bootstrapper.cc index 8ef4956..ad5396e 100644 --- a/V8Binding/v8/src/bootstrapper.cc +++ b/V8Binding/v8/src/bootstrapper.cc @@ -134,7 +134,7 @@ void Bootstrapper::TearDown() { } -// Pending fixups are code positions that have refer to builtin code +// Pending fixups are code positions that refer to builtin code // objects that were not available at the time the code was generated. // The pending list is processed whenever an environment has been // created. @@ -216,7 +216,6 @@ bool PendingFixups::Process(Handle<JSBuiltinsObject> builtins) { *reinterpret_cast<Object**>(pc) = f->code(); } } else { - ASSERT(is_pc_relative); Assembler::set_target_address_at(pc, f->code()->instruction_start()); } @@ -1374,43 +1373,35 @@ void Genesis::TransferNamedProperties(Handle<JSObject> from, if (from->HasFastProperties()) { Handle<DescriptorArray> descs = Handle<DescriptorArray>(from->map()->instance_descriptors()); - int offset = 0; - while (true) { - // Iterating through the descriptors is not gc safe so we have to - // store the value in a handle and create a new stream for each entry. - DescriptorReader stream(*descs, offset); - if (stream.eos()) break; - // We have to read out the next offset before we do anything that may - // cause a gc, since the DescriptorReader is not gc safe. - offset = stream.next_position(); - PropertyDetails details = stream.GetDetails(); + for (int i = 0; i < descs->number_of_descriptors(); i++) { + PropertyDetails details = PropertyDetails(descs->GetDetails(i)); switch (details.type()) { case FIELD: { HandleScope inner; - Handle<String> key = Handle<String>(stream.GetKey()); - int index = stream.GetFieldIndex(); + Handle<String> key = Handle<String>(descs->GetKey(i)); + int index = descs->GetFieldIndex(i); Handle<Object> value = Handle<Object>(from->FastPropertyAt(index)); SetProperty(to, key, value, details.attributes()); break; } case CONSTANT_FUNCTION: { HandleScope inner; - Handle<String> key = Handle<String>(stream.GetKey()); + Handle<String> key = Handle<String>(descs->GetKey(i)); Handle<JSFunction> fun = - Handle<JSFunction>(stream.GetConstantFunction()); + Handle<JSFunction>(descs->GetConstantFunction(i)); SetProperty(to, key, fun, details.attributes()); break; } case CALLBACKS: { LookupResult result; - to->LocalLookup(stream.GetKey(), &result); + to->LocalLookup(descs->GetKey(i), &result); // If the property is already there we skip it if (result.IsValid()) continue; HandleScope inner; Handle<DescriptorArray> inst_descs = Handle<DescriptorArray>(to->map()->instance_descriptors()); - Handle<String> key = Handle<String>(stream.GetKey()); - Handle<Object> entry = Handle<Object>(stream.GetCallbacksObject()); + Handle<String> key = Handle<String>(descs->GetKey(i)); + Handle<Object> entry = Handle<Object>(descs->GetCallbacksObject(i)); inst_descs = Factory::CopyAppendProxyDescriptor(inst_descs, key, entry, @@ -1556,7 +1547,7 @@ Genesis::Genesis(Handle<Object> global_object, // will always do unlinking. previous_ = current_; current_ = this; - result_ = NULL; + result_ = Handle<Context>::null(); // If V8 isn't running and cannot be initialized, just return. if (!V8::IsRunning() && !V8::Initialize(NULL)) return; diff --git a/V8Binding/v8/src/code-stubs.cc b/V8Binding/v8/src/code-stubs.cc index 37bc707..9c24c60 100644 --- a/V8Binding/v8/src/code-stubs.cc +++ b/V8Binding/v8/src/code-stubs.cc @@ -85,7 +85,7 @@ Handle<Code> CodeStub::GetCode() { Handle<NumberDictionary>(Heap::code_stubs()), key, code); - Heap::set_code_stubs(*dict); + Heap::public_set_code_stubs(*dict); index = Heap::code_stubs()->FindEntry(key); } ASSERT(index != NumberDictionary::kNotFound); diff --git a/V8Binding/v8/src/compilation-cache.cc b/V8Binding/v8/src/compilation-cache.cc index 535b843..4066bc7 100644 --- a/V8Binding/v8/src/compilation-cache.cc +++ b/V8Binding/v8/src/compilation-cache.cc @@ -288,6 +288,7 @@ void CompilationCacheScript::Put(Handle<String> source, HandleScope scope; ASSERT(boilerplate->IsBoilerplate()); Handle<CompilationCacheTable> table = GetTable(0); + // TODO(X64): -fstrict-aliasing causes a problem with table. Fix it. CALL_HEAP_FUNCTION_VOID(table->Put(*source, *boilerplate)); } diff --git a/V8Binding/v8/src/d8-posix.cc b/V8Binding/v8/src/d8-posix.cc index 3a091f9..fe130ce 100644 --- a/V8Binding/v8/src/d8-posix.cc +++ b/V8Binding/v8/src/d8-posix.cc @@ -370,7 +370,11 @@ static Handle<Value> GetStdout(int child_fd, // whether it exited normally. In the common case this doesn't matter because // we don't get here before the child has closed stdout and most programs don't // do that before they exit. -#if defined(WNOWAIT) && !defined(ANDROID) +// +// We're disabling usage of waitid in Mac OS X because it doens't work for us: +// a parent process hangs on waiting while a child process is already a zombie. +// See http://code.google.com/p/v8/issues/detail?id=401. +#if defined(WNOWAIT) && !defined(ANDROID) && !defined(__APPLE__) #define HAS_WAITID 1 #endif diff --git a/V8Binding/v8/src/date-delay.js b/V8Binding/v8/src/date-delay.js index 3414cb9..6adde46 100644 --- a/V8Binding/v8/src/date-delay.js +++ b/V8Binding/v8/src/date-delay.js @@ -427,6 +427,19 @@ function TimeClip(time) { } +// The Date cache is used to limit the cost of parsing the same Date +// strings over and over again. +var Date_cache = { + // Cached time value. + time: $NaN, + // Cached year when interpreting the time as a local time. Only + // valid when the time matches cached time. + year: $NaN, + // String input for which the cached time is valid. + string: null +}; + + %SetCode($Date, function(year, month, date, hours, minutes, seconds, ms) { if (!%_IsConstructCall()) { // ECMA 262 - 15.9.2 @@ -442,6 +455,20 @@ function TimeClip(time) { } else if (argc == 1) { if (IS_NUMBER(year)) { value = TimeClip(year); + + } else if (IS_STRING(year)) { + // Probe the Date cache. If we already have a time value for the + // given time, we re-use that instead of parsing the string again. + var cache = Date_cache; + if (cache.string === year) { + value = cache.time; + } else { + value = DateParse(year); + cache.time = value; + cache.year = YearFromTime(LocalTimeNoCheck(value)); + cache.string = year; + } + } else { // According to ECMA 262, no hint should be given for this // conversion. However, ToPrimitive defaults to STRING_HINT for @@ -537,8 +564,9 @@ function GetUTCHoursFrom(aDate) { function GetFullYearFrom(aDate) { var t = DATE_VALUE(aDate); if (NUMBER_IS_NAN(t)) return t; - // Ignore the DST offset for year computations. - return YearFromTime(t + local_time_offset); + var cache = Date_cache; + if (cache.time === t) return cache.year; + return YearFromTime(LocalTimeNoCheck(t)); } @@ -634,7 +662,7 @@ function DatePrintString(time) { // ------------------------------------------------------------------- -// Reused output buffer. +// Reused output buffer. Used when parsing date strings. var parse_buffer = $Array(7); // ECMA 262 - 15.9.4.2 diff --git a/V8Binding/v8/src/debug-delay.js b/V8Binding/v8/src/debug-delay.js index 857c554..4f60851 100644 --- a/V8Binding/v8/src/debug-delay.js +++ b/V8Binding/v8/src/debug-delay.js @@ -223,7 +223,8 @@ function IsBreakPointTriggered(break_id, break_point) { // Object representing a script break point. The script is referenced by its // script name or script id and the break point is represented as line and // column. -function ScriptBreakPoint(type, script_id_or_name, opt_line, opt_column) { +function ScriptBreakPoint(type, script_id_or_name, opt_line, opt_column, + opt_groupId) { this.type_ = type; if (type == Debug.ScriptBreakPointType.ScriptId) { this.script_id_ = script_id_or_name; @@ -232,6 +233,7 @@ function ScriptBreakPoint(type, script_id_or_name, opt_line, opt_column) { } this.line_ = opt_line || 0; this.column_ = opt_column; + this.groupId_ = opt_groupId; this.hit_count_ = 0; this.active_ = true; this.condition_ = null; @@ -244,6 +246,11 @@ ScriptBreakPoint.prototype.number = function() { }; +ScriptBreakPoint.prototype.groupId = function() { + return this.groupId_; +}; + + ScriptBreakPoint.prototype.type = function() { return this.type_; }; @@ -611,10 +618,12 @@ Debug.findScriptBreakPoint = function(break_point_number, remove) { // Sets a breakpoint in a script identified through id or name at the // specified source line and column within that line. Debug.setScriptBreakPoint = function(type, script_id_or_name, - opt_line, opt_column, opt_condition) { + opt_line, opt_column, opt_condition, + opt_groupId) { // Create script break point object. var script_break_point = - new ScriptBreakPoint(type, script_id_or_name, opt_line, opt_column); + new ScriptBreakPoint(type, script_id_or_name, opt_line, opt_column, + opt_groupId); // Assign number to the new script break point and add it. script_break_point.number_ = next_break_point_number++; @@ -636,19 +645,19 @@ Debug.setScriptBreakPoint = function(type, script_id_or_name, Debug.setScriptBreakPointById = function(script_id, opt_line, opt_column, - opt_condition) { + opt_condition, opt_groupId) { return this.setScriptBreakPoint(Debug.ScriptBreakPointType.ScriptId, script_id, opt_line, opt_column, - opt_condition) + opt_condition, opt_groupId); } Debug.setScriptBreakPointByName = function(script_name, opt_line, opt_column, - opt_condition) { + opt_condition, opt_groupId) { return this.setScriptBreakPoint(Debug.ScriptBreakPointType.ScriptName, script_name, opt_line, opt_column, - opt_condition) + opt_condition, opt_groupId); } @@ -1210,6 +1219,8 @@ DebugCommandProcessor.prototype.processDebugJSONRequest = function(json_request) this.changeBreakPointRequest_(request, response); } else if (request.command == 'clearbreakpoint') { this.clearBreakPointRequest_(request, response); + } else if (request.command == 'clearbreakpointgroup') { + this.clearBreakPointGroupRequest_(request, response); } else if (request.command == 'backtrace') { this.backtraceRequest_(request, response); } else if (request.command == 'frame') { @@ -1325,6 +1336,7 @@ DebugCommandProcessor.prototype.setBreakPointRequest_ = true : request.arguments.enabled; var condition = request.arguments.condition; var ignoreCount = request.arguments.ignoreCount; + var groupId = request.arguments.groupId; // Check for legal arguments. if (!type || IS_UNDEFINED(target)) { @@ -1378,10 +1390,11 @@ DebugCommandProcessor.prototype.setBreakPointRequest_ = } else if (type == 'script') { // set script break point. break_point_number = - Debug.setScriptBreakPointByName(target, line, column, condition); + Debug.setScriptBreakPointByName(target, line, column, condition, + groupId); } else { // type == 'scriptId. break_point_number = - Debug.setScriptBreakPointById(target, line, column, condition); + Debug.setScriptBreakPointById(target, line, column, condition, groupId); } // Set additional break point properties. @@ -1454,6 +1467,40 @@ DebugCommandProcessor.prototype.changeBreakPointRequest_ = function(request, res } +DebugCommandProcessor.prototype.clearBreakPointGroupRequest_ = function(request, response) { + // Check for legal request. + if (!request.arguments) { + response.failed('Missing arguments'); + return; + } + + // Pull out arguments. + var group_id = request.arguments.groupId; + + // Check for legal arguments. + if (!group_id) { + response.failed('Missing argument "groupId"'); + return; + } + + var cleared_break_points = []; + var new_script_break_points = []; + for (var i = 0; i < script_break_points.length; i++) { + var next_break_point = script_break_points[i]; + if (next_break_point.groupId() == group_id) { + cleared_break_points.push(next_break_point.number()); + next_break_point.clear(); + } else { + new_script_break_points.push(next_break_point); + } + } + script_break_points = new_script_break_points; + + // Add the cleared break point numbers to the response. + response.body = { breakpoints: cleared_break_points }; +} + + DebugCommandProcessor.prototype.clearBreakPointRequest_ = function(request, response) { // Check for legal request. if (!request.arguments) { diff --git a/V8Binding/v8/src/debug.cc b/V8Binding/v8/src/debug.cc index e37bfb7..52be930 100644 --- a/V8Binding/v8/src/debug.cc +++ b/V8Binding/v8/src/debug.cc @@ -1260,6 +1260,7 @@ void Debug::SetBreak(StackFrame::Id break_frame_id, int break_id) { // Handle stepping into a function. void Debug::HandleStepIn(Handle<JSFunction> function, + Handle<Object> holder, Address fp, bool is_constructor) { // If the frame pointer is not supplied by the caller find it. @@ -1285,21 +1286,12 @@ void Debug::HandleStepIn(Handle<JSFunction> function, Builtins::builtin(Builtins::FunctionCall)) { // Handle function.apply and function.call separately to flood the // function to be called and not the code for Builtins::FunctionApply or - // Builtins::FunctionCall. At the point of the call IC to call either - // Builtins::FunctionApply or Builtins::FunctionCall the expression - // stack has the following content: - // symbol "apply" or "call" - // function apply or call was called on - // receiver for apply or call (first parameter to apply or call) - // ... further arguments to apply or call. - JavaScriptFrameIterator it; - ASSERT(it.frame()->fp() == fp); - ASSERT(it.frame()->GetExpression(1)->IsJSFunction()); - if (it.frame()->GetExpression(1)->IsJSFunction()) { - Handle<JSFunction> - actual_function(JSFunction::cast(it.frame()->GetExpression(1))); - Handle<SharedFunctionInfo> actual_shared(actual_function->shared()); - Debug::FloodWithOneShot(actual_shared); + // Builtins::FunctionCall. The receiver of call/apply is the target + // function. + if (!holder.is_null() && holder->IsJSFunction()) { + Handle<SharedFunctionInfo> shared_info( + JSFunction::cast(*holder)->shared()); + Debug::FloodWithOneShot(shared_info); } } else { Debug::FloodWithOneShot(Handle<SharedFunctionInfo>(function->shared())); diff --git a/V8Binding/v8/src/debug.h b/V8Binding/v8/src/debug.h index a1abced..970dbbe 100644 --- a/V8Binding/v8/src/debug.h +++ b/V8Binding/v8/src/debug.h @@ -270,6 +270,7 @@ class Debug { static bool StepInActive() { return thread_local_.step_into_fp_ != 0; } static void HandleStepIn(Handle<JSFunction> function, + Handle<Object> holder, Address fp, bool is_constructor); static Address step_in_fp() { return thread_local_.step_into_fp_; } @@ -363,6 +364,10 @@ class Debug { static const int kIa32CallInstructionLength = 5; static const int kIa32JSReturnSequenceLength = 6; + // The x64 JS return sequence is padded with int3 to make it large + // enough to hold a call instruction when the debugger patches it. + static const int kX64JSReturnSequenceLength = 13; + // Code generator routines. static void GenerateLoadICDebugBreak(MacroAssembler* masm); static void GenerateStoreICDebugBreak(MacroAssembler* masm); diff --git a/V8Binding/v8/src/factory.cc b/V8Binding/v8/src/factory.cc index 4d7a957..1045a4c 100644 --- a/V8Binding/v8/src/factory.cc +++ b/V8Binding/v8/src/factory.cc @@ -570,12 +570,10 @@ Handle<DescriptorArray> Factory::CopyAppendCallbackDescriptors( int descriptor_count = 0; // Copy the descriptors from the array. - DescriptorWriter w(*result); - for (DescriptorReader r(*array); !r.eos(); r.advance()) { - if (!r.IsNullDescriptor()) { - w.WriteFrom(&r); + for (int i = 0; i < array->number_of_descriptors(); i++) { + if (array->GetType(i) != NULL_DESCRIPTOR) { + result->CopyFrom(descriptor_count++, *array, i); } - descriptor_count++; } // Number of duplicates detected. @@ -594,7 +592,7 @@ Handle<DescriptorArray> Factory::CopyAppendCallbackDescriptors( if (result->LinearSearch(*key, descriptor_count) == DescriptorArray::kNotFound) { CallbacksDescriptor desc(*key, *entry, entry->property_attributes()); - w.Write(&desc); + result->Set(descriptor_count, &desc); descriptor_count++; } else { duplicates++; @@ -604,13 +602,11 @@ Handle<DescriptorArray> Factory::CopyAppendCallbackDescriptors( // If duplicates were detected, allocate a result of the right size // and transfer the elements. if (duplicates > 0) { + int number_of_descriptors = result->number_of_descriptors() - duplicates; Handle<DescriptorArray> new_result = - NewDescriptorArray(result->number_of_descriptors() - duplicates); - DescriptorWriter w(*new_result); - DescriptorReader r(*result); - while (!w.eos()) { - w.WriteFrom(&r); - r.advance(); + NewDescriptorArray(number_of_descriptors); + for (int i = 0; i < number_of_descriptors; i++) { + new_result->CopyFrom(i, *result, i); } result = new_result; } diff --git a/V8Binding/v8/src/factory.h b/V8Binding/v8/src/factory.h index 90fb29c..0afdd76 100644 --- a/V8Binding/v8/src/factory.h +++ b/V8Binding/v8/src/factory.h @@ -28,6 +28,7 @@ #ifndef V8_FACTORY_H_ #define V8_FACTORY_H_ +#include "globals.h" #include "heap.h" #include "zone-inl.h" @@ -299,13 +300,19 @@ class Factory : public AllStatic { Handle<JSObject> instance, bool* pending_exception); -#define ROOT_ACCESSOR(type, name) \ - static Handle<type> name() { return Handle<type>(&Heap::name##_); } +#define ROOT_ACCESSOR(type, name, camel_name) \ + static inline Handle<type> name() { \ + return Handle<type>(bit_cast<type**, Object**>( \ + &Heap::roots_[Heap::k##camel_name##RootIndex])); \ + } ROOT_LIST(ROOT_ACCESSOR) #undef ROOT_ACCESSOR_ACCESSOR #define SYMBOL_ACCESSOR(name, str) \ - static Handle<String> name() { return Handle<String>(&Heap::name##_); } + static inline Handle<String> name() { \ + return Handle<String>(bit_cast<String**, Object**>( \ + &Heap::roots_[Heap::k##name##RootIndex])); \ + } SYMBOL_LIST(SYMBOL_ACCESSOR) #undef SYMBOL_ACCESSOR diff --git a/V8Binding/v8/src/flag-definitions.h b/V8Binding/v8/src/flag-definitions.h index 814b2c4..4e7829b 100644 --- a/V8Binding/v8/src/flag-definitions.h +++ b/V8Binding/v8/src/flag-definitions.h @@ -158,6 +158,8 @@ DEFINE_bool(gc_global, false, "always perform global GCs") DEFINE_int(gc_interval, -1, "garbage collect after <n> allocations") DEFINE_bool(trace_gc, false, "print one trace line following each garbage collection") +DEFINE_bool(trace_gc_verbose, false, + "print more details following each garbage collection") DEFINE_bool(collect_maps, true, "garbage collect maps from which no objects can be reached") @@ -208,8 +210,6 @@ DEFINE_bool(preemption, false, // Regexp DEFINE_bool(trace_regexps, false, "trace regexp execution") -DEFINE_bool(regexp_native, true, - "use native code regexp implementation (IA32 only)") DEFINE_bool(regexp_optimization, true, "generate optimized regexp code") // Testing flags test/cctest/test-{flags,api,serialization}.cc diff --git a/V8Binding/v8/src/globals.h b/V8Binding/v8/src/globals.h index 8088331..44bd527 100644 --- a/V8Binding/v8/src/globals.h +++ b/V8Binding/v8/src/globals.h @@ -257,14 +257,16 @@ typedef bool (*WeakSlotCallback)(Object** pointer); // NOTE: SpaceIterator depends on AllocationSpace enumeration values being // consecutive. enum AllocationSpace { - NEW_SPACE, // Semispaces collected with copying collector. - OLD_POINTER_SPACE, // Must be first of the paged spaces - see PagedSpaces. - OLD_DATA_SPACE, // May not have pointers to new space. - CODE_SPACE, // Also one of the old spaces. Marked executable. - MAP_SPACE, // Only map objects. - LO_SPACE, // Large objects. + NEW_SPACE, // Semispaces collected with copying collector. + OLD_POINTER_SPACE, // May contain pointers to new space. + OLD_DATA_SPACE, // Must not have pointers to new space. + CODE_SPACE, // No pointers to new space, marked executable. + MAP_SPACE, // Only and all map objects. + CELL_SPACE, // Only and all cell objects. + LO_SPACE, // Promoted large objects. + FIRST_SPACE = NEW_SPACE, - LAST_SPACE = LO_SPACE // <= 5 (see kSpaceBits and kLOSpacePointer) + LAST_SPACE = LO_SPACE }; const int kSpaceTagSize = 3; const int kSpaceTagMask = (1 << kSpaceTagSize) - 1; diff --git a/V8Binding/v8/src/handles.cc b/V8Binding/v8/src/handles.cc index 44ca602..510ea95 100644 --- a/V8Binding/v8/src/handles.cc +++ b/V8Binding/v8/src/handles.cc @@ -289,10 +289,11 @@ Handle<Object> GetHiddenProperties(Handle<JSObject> obj, // hidden symbols hash code is zero (and no other string has hash // code zero) it will always occupy the first entry if present. DescriptorArray* descriptors = obj->map()->instance_descriptors(); - DescriptorReader r(descriptors, 0); // Explicitly position reader at zero. - if (!r.eos() && (r.GetKey() == *key) && r.IsProperty()) { - ASSERT(r.type() == FIELD); - return Handle<Object>(obj->FastPropertyAt(r.GetFieldIndex())); + if ((descriptors->number_of_descriptors() > 0) && + (descriptors->GetKey(0) == *key) && + descriptors->IsProperty(0)) { + ASSERT(descriptors->GetType(0) == FIELD); + return Handle<Object>(obj->FastPropertyAt(descriptors->GetFieldIndex(0))); } } @@ -372,10 +373,10 @@ static void ClearWrapperCache(Persistent<v8::Value> handle, void*) { Handle<JSValue> GetScriptWrapper(Handle<Script> script) { - Handle<Object> cache(reinterpret_cast<Object**>(script->wrapper()->proxy())); - if (!cache.is_null()) { + if (script->wrapper()->proxy() != NULL) { // Return the script wrapper directly from the cache. - return Handle<JSValue>(JSValue::cast(*cache)); + return Handle<JSValue>( + reinterpret_cast<JSValue**>(script->wrapper()->proxy())); } // Construct a new script wrapper. @@ -588,12 +589,13 @@ Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object) { int num_enum = object->NumberOfEnumProperties(); Handle<FixedArray> storage = Factory::NewFixedArray(num_enum); Handle<FixedArray> sort_array = Factory::NewFixedArray(num_enum); - for (DescriptorReader r(object->map()->instance_descriptors()); - !r.eos(); - r.advance()) { - if (r.IsProperty() && !r.IsDontEnum()) { - (*storage)->set(index, r.GetKey()); - (*sort_array)->set(index, Smi::FromInt(r.GetDetails().index())); + Handle<DescriptorArray> descs = + Handle<DescriptorArray>(object->map()->instance_descriptors()); + for (int i = 0; i < descs->number_of_descriptors(); i++) { + if (descs->IsProperty(i) && !descs->IsDontEnum(i)) { + (*storage)->set(index, descs->GetKey(i)); + PropertyDetails details(descs->GetDetails(i)); + (*sort_array)->set(index, Smi::FromInt(details.index())); index++; } } diff --git a/V8Binding/v8/src/handles.h b/V8Binding/v8/src/handles.h index af638b8..a86dc96 100644 --- a/V8Binding/v8/src/handles.h +++ b/V8Binding/v8/src/handles.h @@ -42,7 +42,7 @@ namespace internal { template<class T> class Handle { public: - INLINE(Handle(T** location)) { location_ = location; } + INLINE(Handle(T** location)) { location_ = location; } INLINE(explicit Handle(T* obj)); INLINE(Handle()) : location_(NULL) {} @@ -59,7 +59,7 @@ class Handle { location_ = reinterpret_cast<T**>(handle.location()); } - INLINE(T* operator ->() const) { return operator*(); } + INLINE(T* operator ->() const) { return operator*(); } // Check if this handle refers to the exact same object as the other handle. bool is_identical_to(const Handle<T> other) const { diff --git a/V8Binding/v8/src/heap-inl.h b/V8Binding/v8/src/heap-inl.h index 810d3d4..36c6f4b 100644 --- a/V8Binding/v8/src/heap-inl.h +++ b/V8Binding/v8/src/heap-inl.h @@ -82,6 +82,8 @@ Object* Heap::AllocateRaw(int size_in_bytes, result = code_space_->AllocateRaw(size_in_bytes); } else if (LO_SPACE == space) { result = lo_space_->AllocateRaw(size_in_bytes); + } else if (CELL_SPACE == space) { + result = cell_space_->AllocateRaw(size_in_bytes); } else { ASSERT(MAP_SPACE == space); result = map_space_->AllocateRaw(size_in_bytes); @@ -107,12 +109,23 @@ Object* Heap::NumberFromUint32(uint32_t value) { } -Object* Heap::AllocateRawMap(int size_in_bytes) { +Object* Heap::AllocateRawMap() { #ifdef DEBUG Counters::objs_since_last_full.Increment(); Counters::objs_since_last_young.Increment(); #endif - Object* result = map_space_->AllocateRaw(size_in_bytes); + Object* result = map_space_->AllocateRaw(Map::kSize); + if (result->IsFailure()) old_gen_exhausted_ = true; + return result; +} + + +Object* Heap::AllocateRawCell() { +#ifdef DEBUG + Counters::objs_since_last_full.Increment(); + Counters::objs_since_last_young.Increment(); +#endif + Object* result = cell_space_->AllocateRaw(JSGlobalPropertyCell::kSize); if (result->IsFailure()) old_gen_exhausted_ = true; return result; } @@ -216,7 +229,7 @@ void Heap::ScavengeObject(HeapObject** p, HeapObject* object) { void Heap::SetLastScriptId(Object* last_script_id) { - last_script_id_ = last_script_id; + roots_[kLastScriptIdRootIndex] = last_script_id; } diff --git a/V8Binding/v8/src/heap.cc b/V8Binding/v8/src/heap.cc index 749013a..adfac3f 100644 --- a/V8Binding/v8/src/heap.cc +++ b/V8Binding/v8/src/heap.cc @@ -43,27 +43,17 @@ namespace v8 { namespace internal { -#define ROOT_ALLOCATION(type, name) type* Heap::name##_; - ROOT_LIST(ROOT_ALLOCATION) -#undef ROOT_ALLOCATION - - -#define STRUCT_ALLOCATION(NAME, Name, name) Map* Heap::name##_map_; - STRUCT_LIST(STRUCT_ALLOCATION) -#undef STRUCT_ALLOCATION - - -#define SYMBOL_ALLOCATION(name, string) String* Heap::name##_; - SYMBOL_LIST(SYMBOL_ALLOCATION) -#undef SYMBOL_ALLOCATION String* Heap::hidden_symbol_; +Object* Heap::roots_[Heap::kRootListLength]; + NewSpace Heap::new_space_; OldSpace* Heap::old_pointer_space_ = NULL; OldSpace* Heap::old_data_space_ = NULL; OldSpace* Heap::code_space_ = NULL; MapSpace* Heap::map_space_ = NULL; +CellSpace* Heap::cell_space_ = NULL; LargeObjectSpace* Heap::lo_space_ = NULL; static const int kMinimumPromotionLimit = 2*MB; @@ -121,7 +111,8 @@ int Heap::Capacity() { old_pointer_space_->Capacity() + old_data_space_->Capacity() + code_space_->Capacity() + - map_space_->Capacity(); + map_space_->Capacity() + + cell_space_->Capacity(); } @@ -132,7 +123,8 @@ int Heap::Available() { old_pointer_space_->Available() + old_data_space_->Available() + code_space_->Available() + - map_space_->Available(); + map_space_->Available() + + cell_space_->Available(); } @@ -141,6 +133,7 @@ bool Heap::HasBeenSetup() { old_data_space_ != NULL && code_space_ != NULL && map_space_ != NULL && + cell_space_ != NULL && lo_space_ != NULL; } @@ -214,6 +207,27 @@ void Heap::ReportStatisticsBeforeGC() { } +#if defined(ENABLE_LOGGING_AND_PROFILING) +void Heap::PrintShortHeapStatistics() { + if (!FLAG_trace_gc_verbose) return; + PrintF("Memory allocator, used: %8d, available: %8d\n", + MemoryAllocator::Size(), MemoryAllocator::Available()); + PrintF("New space, used: %8d, available: %8d\n", + Heap::new_space_.Size(), new_space_.Available()); + PrintF("Old pointers, used: %8d, available: %8d\n", + old_pointer_space_->Size(), old_pointer_space_->Available()); + PrintF("Old data space, used: %8d, available: %8d\n", + old_data_space_->Size(), old_data_space_->Available()); + PrintF("Code space, used: %8d, available: %8d\n", + code_space_->Size(), code_space_->Available()); + PrintF("Map space, used: %8d, available: %8d\n", + map_space_->Size(), map_space_->Available()); + PrintF("Large object space, used: %8d, avaialble: %8d\n", + map_space_->Size(), map_space_->Available()); +} +#endif + + // TODO(1238405): Combine the infrastructure for --heap-stats and // --log-gc to avoid the complicated preprocessor and flag testing. void Heap::ReportStatisticsAfterGC() { @@ -284,9 +298,8 @@ void Heap::GarbageCollectionEpilogue() { Counters::alive_after_last_gc.Set(SizeOfObjects()); - SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table_); - Counters::symbol_table_capacity.Set(symbol_table->Capacity()); - Counters::number_of_symbols.Set(symbol_table->NumberOfElements()); + Counters::symbol_table_capacity.Set(symbol_table()->Capacity()); + Counters::number_of_symbols.Set(symbol_table()->NumberOfElements()); #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING) ReportStatisticsAfterGC(); #endif @@ -371,6 +384,8 @@ bool Heap::CollectGarbage(int requested_size, AllocationSpace space) { return code_space_->Available() >= requested_size; case MAP_SPACE: return map_space_->Available() >= requested_size; + case CELL_SPACE: + return cell_space_->Available() >= requested_size; case LO_SPACE: return lo_space_->Available() >= requested_size; } @@ -405,8 +420,7 @@ class SymbolTableVerifier : public ObjectVisitor { static void VerifySymbolTable() { #ifdef DEBUG SymbolTableVerifier verifier; - SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table()); - symbol_table->IterateElements(&verifier); + Heap::symbol_table()->IterateElements(&verifier); #endif // DEBUG } @@ -608,6 +622,7 @@ static void VerifyNonPointerSpacePointers() { } #endif + void Heap::Scavenge() { #ifdef DEBUG if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers(); @@ -666,7 +681,7 @@ void Heap::Scavenge() { // Copy objects reachable from weak pointers. GlobalHandles::IterateWeakRoots(&scavenge_visitor); -#if V8_HOST_ARCH_64_BIT +#ifdef V8_HOST_ARCH_64_BIT // TODO(X64): Make this go away again. We currently disable RSets for // 64-bit-mode. HeapObjectIterator old_pointer_iterator(old_pointer_space_); @@ -686,13 +701,25 @@ void Heap::Scavenge() { heap_object->Iterate(&scavenge_visitor); } } -#else // V8_HOST_ARCH_64_BIT +#else // !defined(V8_HOST_ARCH_64_BIT) // Copy objects reachable from the old generation. By definition, // there are no intergenerational pointers in code or data spaces. IterateRSet(old_pointer_space_, &ScavengePointer); IterateRSet(map_space_, &ScavengePointer); lo_space_->IterateRSet(&ScavengePointer); -#endif // V8_HOST_ARCH_64_BIT +#endif + + // Copy objects reachable from cells by scavenging cell values directly. + HeapObjectIterator cell_iterator(cell_space_); + while (cell_iterator.has_next()) { + HeapObject* cell = cell_iterator.next(); + if (cell->IsJSGlobalPropertyCell()) { + Address value_address = + reinterpret_cast<Address>(cell) + + (JSGlobalPropertyCell::kValueOffset - kHeapObjectTag); + scavenge_visitor.VisitPointer(reinterpret_cast<Object**>(value_address)); + } + } do { ASSERT(new_space_front <= new_space_.top()); @@ -832,8 +859,8 @@ int Heap::UpdateRSet(HeapObject* obj) { void Heap::RebuildRSets() { - // By definition, we do not care about remembered set bits in code or data - // spaces. + // By definition, we do not care about remembered set bits in code, + // data, or cell spaces. map_space_->ClearRSet(); RebuildRSets(map_space_); @@ -1008,11 +1035,11 @@ void Heap::ScavengePointer(HeapObject** p) { Object* Heap::AllocatePartialMap(InstanceType instance_type, int instance_size) { - Object* result = AllocateRawMap(Map::kSize); + Object* result = AllocateRawMap(); if (result->IsFailure()) return result; // Map::cast cannot be used due to uninitialized map field. - reinterpret_cast<Map*>(result)->set_map(meta_map()); + reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map()); reinterpret_cast<Map*>(result)->set_instance_type(instance_type); reinterpret_cast<Map*>(result)->set_instance_size(instance_size); reinterpret_cast<Map*>(result)->set_inobject_properties(0); @@ -1022,7 +1049,7 @@ Object* Heap::AllocatePartialMap(InstanceType instance_type, Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) { - Object* result = AllocateRawMap(Map::kSize); + Object* result = AllocateRawMap(); if (result->IsFailure()) return result; Map* map = reinterpret_cast<Map*>(result); @@ -1041,41 +1068,59 @@ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) { } +const Heap::StringTypeTable Heap::string_type_table[] = { +#define STRING_TYPE_ELEMENT(type, size, name, camel_name) \ + {type, size, k##camel_name##MapRootIndex}, + STRING_TYPE_LIST(STRING_TYPE_ELEMENT) +#undef STRING_TYPE_ELEMENT +}; + + +const Heap::ConstantSymbolTable Heap::constant_symbol_table[] = { +#define CONSTANT_SYMBOL_ELEMENT(name, contents) \ + {contents, k##name##RootIndex}, + SYMBOL_LIST(CONSTANT_SYMBOL_ELEMENT) +#undef CONSTANT_SYMBOL_ELEMENT +}; + + +const Heap::StructTable Heap::struct_table[] = { +#define STRUCT_TABLE_ELEMENT(NAME, Name, name) \ + { NAME##_TYPE, Name::kSize, k##Name##MapRootIndex }, + STRUCT_LIST(STRUCT_TABLE_ELEMENT) +#undef STRUCT_TABLE_ELEMENT +}; + + bool Heap::CreateInitialMaps() { Object* obj = AllocatePartialMap(MAP_TYPE, Map::kSize); if (obj->IsFailure()) return false; - // Map::cast cannot be used due to uninitialized map field. - meta_map_ = reinterpret_cast<Map*>(obj); - meta_map()->set_map(meta_map()); + Map* new_meta_map = reinterpret_cast<Map*>(obj); + set_meta_map(new_meta_map); + new_meta_map->set_map(new_meta_map); obj = AllocatePartialMap(FIXED_ARRAY_TYPE, FixedArray::kHeaderSize); if (obj->IsFailure()) return false; - fixed_array_map_ = Map::cast(obj); + set_fixed_array_map(Map::cast(obj)); obj = AllocatePartialMap(ODDBALL_TYPE, Oddball::kSize); if (obj->IsFailure()) return false; - oddball_map_ = Map::cast(obj); - - obj = AllocatePartialMap(JS_GLOBAL_PROPERTY_CELL_TYPE, - JSGlobalPropertyCell::kSize); - if (obj->IsFailure()) return false; - global_property_cell_map_ = Map::cast(obj); + set_oddball_map(Map::cast(obj)); // Allocate the empty array obj = AllocateEmptyFixedArray(); if (obj->IsFailure()) return false; - empty_fixed_array_ = FixedArray::cast(obj); + set_empty_fixed_array(FixedArray::cast(obj)); obj = Allocate(oddball_map(), OLD_DATA_SPACE); if (obj->IsFailure()) return false; - null_value_ = obj; + set_null_value(obj); - // Allocate the empty descriptor array. AllocateMap can now be used. + // Allocate the empty descriptor array. obj = AllocateEmptyFixedArray(); if (obj->IsFailure()) return false; - // There is a check against empty_descriptor_array() in cast(). - empty_descriptor_array_ = reinterpret_cast<DescriptorArray*>(obj); + set_empty_descriptor_array(DescriptorArray::cast(obj)); // Fix the instance_descriptors for the existing maps. meta_map()->set_instance_descriptors(empty_descriptor_array()); @@ -1087,113 +1132,112 @@ bool Heap::CreateInitialMaps() { oddball_map()->set_instance_descriptors(empty_descriptor_array()); oddball_map()->set_code_cache(empty_fixed_array()); - global_property_cell_map()->set_instance_descriptors( - empty_descriptor_array()); - global_property_cell_map()->set_code_cache(empty_fixed_array()); - // Fix prototype object for existing maps. meta_map()->set_prototype(null_value()); meta_map()->set_constructor(null_value()); fixed_array_map()->set_prototype(null_value()); fixed_array_map()->set_constructor(null_value()); + oddball_map()->set_prototype(null_value()); oddball_map()->set_constructor(null_value()); - global_property_cell_map()->set_prototype(null_value()); - global_property_cell_map()->set_constructor(null_value()); - obj = AllocateMap(HEAP_NUMBER_TYPE, HeapNumber::kSize); if (obj->IsFailure()) return false; - heap_number_map_ = Map::cast(obj); + set_heap_number_map(Map::cast(obj)); obj = AllocateMap(PROXY_TYPE, Proxy::kSize); if (obj->IsFailure()) return false; - proxy_map_ = Map::cast(obj); + set_proxy_map(Map::cast(obj)); -#define ALLOCATE_STRING_MAP(type, size, name) \ - obj = AllocateMap(type, size); \ - if (obj->IsFailure()) return false; \ - name##_map_ = Map::cast(obj); - STRING_TYPE_LIST(ALLOCATE_STRING_MAP); -#undef ALLOCATE_STRING_MAP + for (unsigned i = 0; i < ARRAY_SIZE(string_type_table); i++) { + const StringTypeTable& entry = string_type_table[i]; + obj = AllocateMap(entry.type, entry.size); + if (obj->IsFailure()) return false; + roots_[entry.index] = Map::cast(obj); + } obj = AllocateMap(SHORT_STRING_TYPE, SeqTwoByteString::kAlignedSize); if (obj->IsFailure()) return false; - undetectable_short_string_map_ = Map::cast(obj); - undetectable_short_string_map_->set_is_undetectable(); + set_undetectable_short_string_map(Map::cast(obj)); + Map::cast(obj)->set_is_undetectable(); obj = AllocateMap(MEDIUM_STRING_TYPE, SeqTwoByteString::kAlignedSize); if (obj->IsFailure()) return false; - undetectable_medium_string_map_ = Map::cast(obj); - undetectable_medium_string_map_->set_is_undetectable(); + set_undetectable_medium_string_map(Map::cast(obj)); + Map::cast(obj)->set_is_undetectable(); obj = AllocateMap(LONG_STRING_TYPE, SeqTwoByteString::kAlignedSize); if (obj->IsFailure()) return false; - undetectable_long_string_map_ = Map::cast(obj); - undetectable_long_string_map_->set_is_undetectable(); + set_undetectable_long_string_map(Map::cast(obj)); + Map::cast(obj)->set_is_undetectable(); obj = AllocateMap(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize); if (obj->IsFailure()) return false; - undetectable_short_ascii_string_map_ = Map::cast(obj); - undetectable_short_ascii_string_map_->set_is_undetectable(); + set_undetectable_short_ascii_string_map(Map::cast(obj)); + Map::cast(obj)->set_is_undetectable(); obj = AllocateMap(MEDIUM_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize); if (obj->IsFailure()) return false; - undetectable_medium_ascii_string_map_ = Map::cast(obj); - undetectable_medium_ascii_string_map_->set_is_undetectable(); + set_undetectable_medium_ascii_string_map(Map::cast(obj)); + Map::cast(obj)->set_is_undetectable(); obj = AllocateMap(LONG_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize); if (obj->IsFailure()) return false; - undetectable_long_ascii_string_map_ = Map::cast(obj); - undetectable_long_ascii_string_map_->set_is_undetectable(); + set_undetectable_long_ascii_string_map(Map::cast(obj)); + Map::cast(obj)->set_is_undetectable(); obj = AllocateMap(BYTE_ARRAY_TYPE, Array::kAlignedSize); if (obj->IsFailure()) return false; - byte_array_map_ = Map::cast(obj); + set_byte_array_map(Map::cast(obj)); obj = AllocateMap(CODE_TYPE, Code::kHeaderSize); if (obj->IsFailure()) return false; - code_map_ = Map::cast(obj); + set_code_map(Map::cast(obj)); + + obj = AllocateMap(JS_GLOBAL_PROPERTY_CELL_TYPE, + JSGlobalPropertyCell::kSize); + if (obj->IsFailure()) return false; + set_global_property_cell_map(Map::cast(obj)); obj = AllocateMap(FILLER_TYPE, kPointerSize); if (obj->IsFailure()) return false; - one_word_filler_map_ = Map::cast(obj); + set_one_pointer_filler_map(Map::cast(obj)); obj = AllocateMap(FILLER_TYPE, 2 * kPointerSize); if (obj->IsFailure()) return false; - two_word_filler_map_ = Map::cast(obj); + set_two_pointer_filler_map(Map::cast(obj)); -#define ALLOCATE_STRUCT_MAP(NAME, Name, name) \ - obj = AllocateMap(NAME##_TYPE, Name::kSize); \ - if (obj->IsFailure()) return false; \ - name##_map_ = Map::cast(obj); - STRUCT_LIST(ALLOCATE_STRUCT_MAP) -#undef ALLOCATE_STRUCT_MAP + for (unsigned i = 0; i < ARRAY_SIZE(struct_table); i++) { + const StructTable& entry = struct_table[i]; + obj = AllocateMap(entry.type, entry.size); + if (obj->IsFailure()) return false; + roots_[entry.index] = Map::cast(obj); + } obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize); if (obj->IsFailure()) return false; - hash_table_map_ = Map::cast(obj); + set_hash_table_map(Map::cast(obj)); obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize); if (obj->IsFailure()) return false; - context_map_ = Map::cast(obj); + set_context_map(Map::cast(obj)); obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize); if (obj->IsFailure()) return false; - catch_context_map_ = Map::cast(obj); + set_catch_context_map(Map::cast(obj)); obj = AllocateMap(FIXED_ARRAY_TYPE, HeapObject::kHeaderSize); if (obj->IsFailure()) return false; - global_context_map_ = Map::cast(obj); + set_global_context_map(Map::cast(obj)); obj = AllocateMap(JS_FUNCTION_TYPE, JSFunction::kSize); if (obj->IsFailure()) return false; - boilerplate_function_map_ = Map::cast(obj); + set_boilerplate_function_map(Map::cast(obj)); obj = AllocateMap(SHARED_FUNCTION_INFO_TYPE, SharedFunctionInfo::kSize); if (obj->IsFailure()) return false; - shared_function_info_map_ = Map::cast(obj); + set_shared_function_info_map(Map::cast(obj)); ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array())); return true; @@ -1230,9 +1274,7 @@ Object* Heap::AllocateHeapNumber(double value) { Object* Heap::AllocateJSGlobalPropertyCell(Object* value) { - Object* result = AllocateRaw(JSGlobalPropertyCell::kSize, - OLD_POINTER_SPACE, - OLD_POINTER_SPACE); + Object* result = AllocateRawCell(); if (result->IsFailure()) return result; HeapObject::cast(result)->set_map(global_property_cell_map()); JSGlobalPropertyCell::cast(result)->set_value(value); @@ -1254,15 +1296,15 @@ bool Heap::CreateApiObjects() { obj = AllocateMap(JS_OBJECT_TYPE, JSObject::kHeaderSize); if (obj->IsFailure()) return false; - neander_map_ = Map::cast(obj); + set_neander_map(Map::cast(obj)); - obj = Heap::AllocateJSObjectFromMap(neander_map_); + obj = Heap::AllocateJSObjectFromMap(neander_map()); if (obj->IsFailure()) return false; Object* elements = AllocateFixedArray(2); if (elements->IsFailure()) return false; FixedArray::cast(elements)->set(0, Smi::FromInt(0)); JSObject::cast(obj)->set_elements(FixedArray::cast(elements)); - message_listeners_ = JSObject::cast(obj); + set_message_listeners(JSObject::cast(obj)); return true; } @@ -1270,25 +1312,25 @@ bool Heap::CreateApiObjects() { void Heap::CreateCEntryStub() { CEntryStub stub; - c_entry_code_ = *stub.GetCode(); + set_c_entry_code(*stub.GetCode()); } void Heap::CreateCEntryDebugBreakStub() { CEntryDebugBreakStub stub; - c_entry_debug_break_code_ = *stub.GetCode(); + set_c_entry_debug_break_code(*stub.GetCode()); } void Heap::CreateJSEntryStub() { JSEntryStub stub; - js_entry_code_ = *stub.GetCode(); + set_js_entry_code(*stub.GetCode()); } void Heap::CreateJSConstructEntryStub() { JSConstructEntryStub stub; - js_construct_entry_code_ = *stub.GetCode(); + set_js_construct_entry_code(*stub.GetCode()); } @@ -1319,34 +1361,35 @@ bool Heap::CreateInitialObjects() { // The -0 value must be set before NumberFromDouble works. obj = AllocateHeapNumber(-0.0, TENURED); if (obj->IsFailure()) return false; - minus_zero_value_ = obj; - ASSERT(signbit(minus_zero_value_->Number()) != 0); + set_minus_zero_value(obj); + ASSERT(signbit(minus_zero_value()->Number()) != 0); obj = AllocateHeapNumber(OS::nan_value(), TENURED); if (obj->IsFailure()) return false; - nan_value_ = obj; + set_nan_value(obj); obj = Allocate(oddball_map(), OLD_DATA_SPACE); if (obj->IsFailure()) return false; - undefined_value_ = obj; + set_undefined_value(obj); ASSERT(!InNewSpace(undefined_value())); // Allocate initial symbol table. obj = SymbolTable::Allocate(kInitialSymbolTableSize); if (obj->IsFailure()) return false; - symbol_table_ = obj; + // Don't use set_symbol_table() due to asserts. + roots_[kSymbolTableRootIndex] = obj; // Assign the print strings for oddballs after creating symboltable. Object* symbol = LookupAsciiSymbol("undefined"); if (symbol->IsFailure()) return false; - Oddball::cast(undefined_value_)->set_to_string(String::cast(symbol)); - Oddball::cast(undefined_value_)->set_to_number(nan_value_); + Oddball::cast(undefined_value())->set_to_string(String::cast(symbol)); + Oddball::cast(undefined_value())->set_to_number(nan_value()); // Assign the print strings for oddballs after creating symboltable. symbol = LookupAsciiSymbol("null"); if (symbol->IsFailure()) return false; - Oddball::cast(null_value_)->set_to_string(String::cast(symbol)); - Oddball::cast(null_value_)->set_to_number(Smi::FromInt(0)); + Oddball::cast(null_value())->set_to_string(String::cast(symbol)); + Oddball::cast(null_value())->set_to_number(Smi::FromInt(0)); // Allocate the null_value obj = Oddball::cast(null_value())->Initialize("null", Smi::FromInt(0)); @@ -1354,32 +1397,31 @@ bool Heap::CreateInitialObjects() { obj = CreateOddball(oddball_map(), "true", Smi::FromInt(1)); if (obj->IsFailure()) return false; - true_value_ = obj; + set_true_value(obj); obj = CreateOddball(oddball_map(), "false", Smi::FromInt(0)); if (obj->IsFailure()) return false; - false_value_ = obj; + set_false_value(obj); obj = CreateOddball(oddball_map(), "hole", Smi::FromInt(-1)); if (obj->IsFailure()) return false; - the_hole_value_ = obj; + set_the_hole_value(obj); // Allocate the empty string. obj = AllocateRawAsciiString(0, TENURED); if (obj->IsFailure()) return false; - empty_string_ = String::cast(obj); + set_empty_string(String::cast(obj)); -#define SYMBOL_INITIALIZE(name, string) \ - obj = LookupAsciiSymbol(string); \ - if (obj->IsFailure()) return false; \ - (name##_) = String::cast(obj); - SYMBOL_LIST(SYMBOL_INITIALIZE) -#undef SYMBOL_INITIALIZE + for (unsigned i = 0; i < ARRAY_SIZE(constant_symbol_table); i++) { + obj = LookupAsciiSymbol(constant_symbol_table[i].contents); + if (obj->IsFailure()) return false; + roots_[constant_symbol_table[i].index] = String::cast(obj); + } // Allocate the hidden symbol which is used to identify the hidden properties // in JSObjects. The hash code has a special value so that it will not match // the empty string when searching for the property. It cannot be part of the - // SYMBOL_LIST because it needs to be allocated manually with the special + // loop above because it needs to be allocated manually with the special // hash code in place. The hash code for the hidden_symbol is zero to ensure // that it will always be at the first entry in property descriptors. obj = AllocateSymbol(CStrVector(""), 0, String::kHashComputedMask); @@ -1389,37 +1431,37 @@ bool Heap::CreateInitialObjects() { // Allocate the proxy for __proto__. obj = AllocateProxy((Address) &Accessors::ObjectPrototype); if (obj->IsFailure()) return false; - prototype_accessors_ = Proxy::cast(obj); + set_prototype_accessors(Proxy::cast(obj)); // Allocate the code_stubs dictionary. obj = NumberDictionary::Allocate(4); if (obj->IsFailure()) return false; - code_stubs_ = NumberDictionary::cast(obj); + set_code_stubs(NumberDictionary::cast(obj)); // Allocate the non_monomorphic_cache used in stub-cache.cc obj = NumberDictionary::Allocate(4); if (obj->IsFailure()) return false; - non_monomorphic_cache_ = NumberDictionary::cast(obj); + set_non_monomorphic_cache(NumberDictionary::cast(obj)); CreateFixedStubs(); // Allocate the number->string conversion cache obj = AllocateFixedArray(kNumberStringCacheSize * 2); if (obj->IsFailure()) return false; - number_string_cache_ = FixedArray::cast(obj); + set_number_string_cache(FixedArray::cast(obj)); // Allocate cache for single character strings. obj = AllocateFixedArray(String::kMaxAsciiCharCode+1); if (obj->IsFailure()) return false; - single_character_string_cache_ = FixedArray::cast(obj); + set_single_character_string_cache(FixedArray::cast(obj)); // Allocate cache for external strings pointing to native source code. obj = AllocateFixedArray(Natives::GetBuiltinsCount()); if (obj->IsFailure()) return false; - natives_source_cache_ = FixedArray::cast(obj); + set_natives_source_cache(FixedArray::cast(obj)); // Handling of script id generation is in Factory::NewScript. - last_script_id_ = undefined_value(); + set_last_script_id(undefined_value()); // Initialize keyed lookup cache. KeyedLookupCache::Clear(); @@ -1457,13 +1499,13 @@ Object* Heap::GetNumberStringCache(Object* number) { } else { hash = double_get_hash(number->Number()); } - Object* key = number_string_cache_->get(hash * 2); + Object* key = number_string_cache()->get(hash * 2); if (key == number) { - return String::cast(number_string_cache_->get(hash * 2 + 1)); + return String::cast(number_string_cache()->get(hash * 2 + 1)); } else if (key->IsHeapNumber() && number->IsHeapNumber() && key->Number() == number->Number()) { - return String::cast(number_string_cache_->get(hash * 2 + 1)); + return String::cast(number_string_cache()->get(hash * 2 + 1)); } return undefined_value(); } @@ -1473,12 +1515,12 @@ void Heap::SetNumberStringCache(Object* number, String* string) { int hash; if (number->IsSmi()) { hash = smi_get_hash(Smi::cast(number)); - number_string_cache_->set(hash * 2, number, SKIP_WRITE_BARRIER); + number_string_cache()->set(hash * 2, number, SKIP_WRITE_BARRIER); } else { hash = double_get_hash(number->Number()); - number_string_cache_->set(hash * 2, number); + number_string_cache()->set(hash * 2, number); } - number_string_cache_->set(hash * 2 + 1, string); + number_string_cache()->set(hash * 2 + 1, string); } @@ -1491,19 +1533,19 @@ Object* Heap::SmiOrNumberFromDouble(double value, static const DoubleRepresentation plus_zero(0.0); static const DoubleRepresentation minus_zero(-0.0); static const DoubleRepresentation nan(OS::nan_value()); - ASSERT(minus_zero_value_ != NULL); + ASSERT(minus_zero_value() != NULL); ASSERT(sizeof(plus_zero.value) == sizeof(plus_zero.bits)); DoubleRepresentation rep(value); if (rep.bits == plus_zero.bits) return Smi::FromInt(0); // not uncommon if (rep.bits == minus_zero.bits) { return new_object ? AllocateHeapNumber(-0.0, pretenure) - : minus_zero_value_; + : minus_zero_value(); } if (rep.bits == nan.bits) { return new_object ? AllocateHeapNumber(OS::nan_value(), pretenure) - : nan_value_; + : nan_value(); } // Try to represent the value as a tagged small integer. @@ -1809,7 +1851,7 @@ void Heap::CreateFillerObjectAt(Address addr, int size) { if (size == 0) return; HeapObject* filler = HeapObject::FromAddress(addr); if (size == kPointerSize) { - filler->set_map(Heap::one_word_filler_map()); + filler->set_map(Heap::one_pointer_filler_map()); } else { filler->set_map(Heap::byte_array_map()); ByteArray::cast(filler)->set_length(ByteArray::LengthFor(size)); @@ -2685,6 +2727,8 @@ void Heap::ReportHeapStatistics(const char* title) { code_space_->ReportStatistics(); PrintF("Map space : "); map_space_->ReportStatistics(); + PrintF("Cell space : "); + cell_space_->ReportStatistics(); PrintF("Large object space : "); lo_space_->ReportStatistics(); PrintF(">>>>>> ========================================= >>>>>>\n"); @@ -2705,6 +2749,7 @@ bool Heap::Contains(Address addr) { old_data_space_->Contains(addr) || code_space_->Contains(addr) || map_space_->Contains(addr) || + cell_space_->Contains(addr) || lo_space_->SlowContains(addr)); } @@ -2729,6 +2774,8 @@ bool Heap::InSpace(Address addr, AllocationSpace space) { return code_space_->Contains(addr); case MAP_SPACE: return map_space_->Contains(addr); + case CELL_SPACE: + return cell_space_->Contains(addr); case LO_SPACE: return lo_space_->SlowContains(addr); } @@ -2742,22 +2789,31 @@ void Heap::Verify() { ASSERT(HasBeenSetup()); VerifyPointersVisitor visitor; - Heap::IterateRoots(&visitor); + IterateRoots(&visitor); - AllSpaces spaces; - while (Space* space = spaces.next()) { - space->Verify(); - } + new_space_.Verify(); + + VerifyPointersAndRSetVisitor rset_visitor; + old_pointer_space_->Verify(&rset_visitor); + map_space_->Verify(&rset_visitor); + + VerifyPointersVisitor no_rset_visitor; + old_data_space_->Verify(&no_rset_visitor); + code_space_->Verify(&no_rset_visitor); + cell_space_->Verify(&no_rset_visitor); + + lo_space_->Verify(); } #endif // DEBUG Object* Heap::LookupSymbol(Vector<const char> string) { Object* symbol = NULL; - Object* new_table = - SymbolTable::cast(symbol_table_)->LookupSymbol(string, &symbol); + Object* new_table = symbol_table()->LookupSymbol(string, &symbol); if (new_table->IsFailure()) return new_table; - symbol_table_ = new_table; + // Can't use set_symbol_table because SymbolTable::cast knows that + // SymbolTable is a singleton and checks for identity. + roots_[kSymbolTableRootIndex] = new_table; ASSERT(symbol != NULL); return symbol; } @@ -2766,10 +2822,11 @@ Object* Heap::LookupSymbol(Vector<const char> string) { Object* Heap::LookupSymbol(String* string) { if (string->IsSymbol()) return string; Object* symbol = NULL; - Object* new_table = - SymbolTable::cast(symbol_table_)->LookupString(string, &symbol); + Object* new_table = symbol_table()->LookupString(string, &symbol); if (new_table->IsFailure()) return new_table; - symbol_table_ = new_table; + // Can't use set_symbol_table because SymbolTable::cast knows that + // SymbolTable is a singleton and checks for identity. + roots_[kSymbolTableRootIndex] = new_table; ASSERT(symbol != NULL); return symbol; } @@ -2780,8 +2837,7 @@ bool Heap::LookupSymbolIfExists(String* string, String** symbol) { *symbol = string; return true; } - SymbolTable* table = SymbolTable::cast(symbol_table_); - return table->LookupSymbolIfExists(string, symbol); + return symbol_table()->LookupSymbolIfExists(string, symbol); } @@ -2868,28 +2924,15 @@ void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) { void Heap::IterateRoots(ObjectVisitor* v) { IterateStrongRoots(v); - v->VisitPointer(reinterpret_cast<Object**>(&symbol_table_)); + v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex])); SYNCHRONIZE_TAG("symbol_table"); } void Heap::IterateStrongRoots(ObjectVisitor* v) { -#define ROOT_ITERATE(type, name) \ - v->VisitPointer(bit_cast<Object**, type**>(&name##_)); - STRONG_ROOT_LIST(ROOT_ITERATE); -#undef ROOT_ITERATE + v->VisitPointers(&roots_[0], &roots_[kStrongRootListLength]); SYNCHRONIZE_TAG("strong_root_list"); -#define STRUCT_MAP_ITERATE(NAME, Name, name) \ - v->VisitPointer(bit_cast<Object**, Map**>(&name##_map_)); - STRUCT_LIST(STRUCT_MAP_ITERATE); -#undef STRUCT_MAP_ITERATE - SYNCHRONIZE_TAG("struct_map"); - -#define SYMBOL_ITERATE(name, string) \ - v->VisitPointer(bit_cast<Object**, String**>(&name##_)); - SYMBOL_LIST(SYMBOL_ITERATE) -#undef SYMBOL_ITERATE v->VisitPointer(bit_cast<Object**, String**>(&hidden_symbol_)); SYNCHRONIZE_TAG("symbol"); @@ -2964,6 +3007,7 @@ int Heap::PromotedSpaceSize() { + old_data_space_->Size() + code_space_->Size() + map_space_->Size() + + cell_space_->Size() + lo_space_->Size(); } @@ -3041,6 +3085,13 @@ bool Heap::Setup(bool create_heap_objects) { // enough to hold at least a page will cause it to allocate. if (!map_space_->Setup(NULL, 0)) return false; + // Initialize global property cell space. + cell_space_ = new CellSpace(old_generation_size_, CELL_SPACE); + if (cell_space_ == NULL) return false; + // Setting up a paged space without giving it a virtual memory range big + // enough to hold at least a page will cause it to allocate. + if (!cell_space_->Setup(NULL, 0)) return false; + // The large object code space may contain code or data. We set the memory // to be non-executable here for safety, but this means we need to enable it // explicitly when allocating large code objects. @@ -3093,6 +3144,12 @@ void Heap::TearDown() { map_space_ = NULL; } + if (cell_space_ != NULL) { + cell_space_->TearDown(); + delete cell_space_; + cell_space_ = NULL; + } + if (lo_space_ != NULL) { lo_space_->TearDown(); delete lo_space_; @@ -3104,11 +3161,9 @@ void Heap::TearDown() { void Heap::Shrink() { - // Try to shrink map, old, and code spaces. - map_space_->Shrink(); - old_pointer_space_->Shrink(); - old_data_space_->Shrink(); - code_space_->Shrink(); + // Try to shrink all paged spaces. + PagedSpaces spaces; + while (PagedSpace* space = spaces.next()) space->Shrink(); } @@ -3116,24 +3171,16 @@ void Heap::Shrink() { void Heap::Protect() { if (HasBeenSetup()) { - new_space_.Protect(); - map_space_->Protect(); - old_pointer_space_->Protect(); - old_data_space_->Protect(); - code_space_->Protect(); - lo_space_->Protect(); + AllSpaces spaces; + while (Space* space = spaces.next()) space->Protect(); } } void Heap::Unprotect() { if (HasBeenSetup()) { - new_space_.Unprotect(); - map_space_->Unprotect(); - old_pointer_space_->Unprotect(); - old_data_space_->Unprotect(); - code_space_->Unprotect(); - lo_space_->Unprotect(); + AllSpaces spaces; + while (Space* space = spaces.next()) space->Unprotect(); } } @@ -3171,6 +3218,8 @@ Space* AllSpaces::next() { return Heap::code_space(); case MAP_SPACE: return Heap::map_space(); + case CELL_SPACE: + return Heap::cell_space(); case LO_SPACE: return Heap::lo_space(); default: @@ -3189,6 +3238,8 @@ PagedSpace* PagedSpaces::next() { return Heap::code_space(); case MAP_SPACE: return Heap::map_space(); + case CELL_SPACE: + return Heap::cell_space(); default: return NULL; } @@ -3262,6 +3313,9 @@ ObjectIterator* SpaceIterator::CreateIterator() { case MAP_SPACE: iterator_ = new HeapObjectIterator(Heap::map_space()); break; + case CELL_SPACE: + iterator_ = new HeapObjectIterator(Heap::cell_space()); + break; case LO_SPACE: iterator_ = new LargeObjectIterator(Heap::lo_space()); break; @@ -3366,8 +3420,8 @@ void HeapProfiler::WriteSample() { // Lump all the string types together. int string_number = 0; int string_bytes = 0; -#define INCREMENT_SIZE(type, size, name) \ - string_number += info[type].number(); \ +#define INCREMENT_SIZE(type, size, name, camel_name) \ + string_number += info[type].number(); \ string_bytes += info[type].bytes(); STRING_TYPE_LIST(INCREMENT_SIZE) #undef INCREMENT_SIZE @@ -3587,6 +3641,10 @@ GCTracer::~GCTracer() { CollectorString(), start_size_, SizeOfHeapObjects(), static_cast<int>(OS::TimeCurrentMillis() - start_time_)); + +#if defined(ENABLE_LOGGING_AND_PROFILING) + Heap::PrintShortHeapStatistics(); +#endif } diff --git a/V8Binding/v8/src/heap.h b/V8Binding/v8/src/heap.h index 9f61ce2..55a66bc 100644 --- a/V8Binding/v8/src/heap.h +++ b/V8Binding/v8/src/heap.h @@ -34,105 +34,107 @@ namespace v8 { namespace internal { // Defines all the roots in Heap. -#define STRONG_ROOT_LIST(V) \ - V(Map, meta_map) \ - V(Map, heap_number_map) \ - V(Map, short_string_map) \ - V(Map, medium_string_map) \ - V(Map, long_string_map) \ - V(Map, short_ascii_string_map) \ - V(Map, medium_ascii_string_map) \ - V(Map, long_ascii_string_map) \ - V(Map, short_symbol_map) \ - V(Map, medium_symbol_map) \ - V(Map, long_symbol_map) \ - V(Map, short_ascii_symbol_map) \ - V(Map, medium_ascii_symbol_map) \ - V(Map, long_ascii_symbol_map) \ - V(Map, short_cons_symbol_map) \ - V(Map, medium_cons_symbol_map) \ - V(Map, long_cons_symbol_map) \ - V(Map, short_cons_ascii_symbol_map) \ - V(Map, medium_cons_ascii_symbol_map) \ - V(Map, long_cons_ascii_symbol_map) \ - V(Map, short_sliced_symbol_map) \ - V(Map, medium_sliced_symbol_map) \ - V(Map, long_sliced_symbol_map) \ - V(Map, short_sliced_ascii_symbol_map) \ - V(Map, medium_sliced_ascii_symbol_map) \ - V(Map, long_sliced_ascii_symbol_map) \ - V(Map, short_external_symbol_map) \ - V(Map, medium_external_symbol_map) \ - V(Map, long_external_symbol_map) \ - V(Map, short_external_ascii_symbol_map) \ - V(Map, medium_external_ascii_symbol_map) \ - V(Map, long_external_ascii_symbol_map) \ - V(Map, short_cons_string_map) \ - V(Map, medium_cons_string_map) \ - V(Map, long_cons_string_map) \ - V(Map, short_cons_ascii_string_map) \ - V(Map, medium_cons_ascii_string_map) \ - V(Map, long_cons_ascii_string_map) \ - V(Map, short_sliced_string_map) \ - V(Map, medium_sliced_string_map) \ - V(Map, long_sliced_string_map) \ - V(Map, short_sliced_ascii_string_map) \ - V(Map, medium_sliced_ascii_string_map) \ - V(Map, long_sliced_ascii_string_map) \ - V(Map, short_external_string_map) \ - V(Map, medium_external_string_map) \ - V(Map, long_external_string_map) \ - V(Map, short_external_ascii_string_map) \ - V(Map, medium_external_ascii_string_map) \ - V(Map, long_external_ascii_string_map) \ - V(Map, undetectable_short_string_map) \ - V(Map, undetectable_medium_string_map) \ - V(Map, undetectable_long_string_map) \ - V(Map, undetectable_short_ascii_string_map) \ - V(Map, undetectable_medium_ascii_string_map) \ - V(Map, undetectable_long_ascii_string_map) \ - V(Map, byte_array_map) \ - V(Map, fixed_array_map) \ - V(Map, hash_table_map) \ - V(Map, context_map) \ - V(Map, catch_context_map) \ - V(Map, global_context_map) \ - V(Map, code_map) \ - V(Map, oddball_map) \ - V(Map, global_property_cell_map) \ - V(Map, boilerplate_function_map) \ - V(Map, shared_function_info_map) \ - V(Map, proxy_map) \ - V(Map, one_word_filler_map) \ - V(Map, two_word_filler_map) \ - V(Object, nan_value) \ - V(Object, undefined_value) \ - V(Object, minus_zero_value) \ - V(Object, null_value) \ - V(Object, true_value) \ - V(Object, false_value) \ - V(String, empty_string) \ - V(FixedArray, empty_fixed_array) \ - V(DescriptorArray, empty_descriptor_array) \ - V(Object, the_hole_value) \ - V(Map, neander_map) \ - V(JSObject, message_listeners) \ - V(Proxy, prototype_accessors) \ - V(NumberDictionary, code_stubs) \ - V(NumberDictionary, non_monomorphic_cache) \ - V(Code, js_entry_code) \ - V(Code, js_construct_entry_code) \ - V(Code, c_entry_code) \ - V(Code, c_entry_debug_break_code) \ - V(FixedArray, number_string_cache) \ - V(FixedArray, single_character_string_cache) \ - V(FixedArray, natives_source_cache) \ - V(Object, last_script_id) +#define STRONG_ROOT_LIST(V) \ + V(Map, meta_map, MetaMap) \ + V(Map, heap_number_map, HeapNumberMap) \ + V(Map, short_string_map, ShortStringMap) \ + V(Map, medium_string_map, MediumStringMap) \ + V(Map, long_string_map, LongStringMap) \ + V(Map, short_ascii_string_map, ShortAsciiStringMap) \ + V(Map, medium_ascii_string_map, MediumAsciiStringMap) \ + V(Map, long_ascii_string_map, LongAsciiStringMap) \ + V(Map, short_symbol_map, ShortSymbolMap) \ + V(Map, medium_symbol_map, MediumSymbolMap) \ + V(Map, long_symbol_map, LongSymbolMap) \ + V(Map, short_ascii_symbol_map, ShortAsciiSymbolMap) \ + V(Map, medium_ascii_symbol_map, MediumAsciiSymbolMap) \ + V(Map, long_ascii_symbol_map, LongAsciiSymbolMap) \ + V(Map, short_cons_symbol_map, ShortConsSymbolMap) \ + V(Map, medium_cons_symbol_map, MediumConsSymbolMap) \ + V(Map, long_cons_symbol_map, LongConsSymbolMap) \ + V(Map, short_cons_ascii_symbol_map, ShortConsAsciiSymbolMap) \ + V(Map, medium_cons_ascii_symbol_map, MediumConsAsciiSymbolMap) \ + V(Map, long_cons_ascii_symbol_map, LongConsAsciiSymbolMap) \ + V(Map, short_sliced_symbol_map, ShortSlicedSymbolMap) \ + V(Map, medium_sliced_symbol_map, MediumSlicedSymbolMap) \ + V(Map, long_sliced_symbol_map, LongSlicedSymbolMap) \ + V(Map, short_sliced_ascii_symbol_map, ShortSlicedAsciiSymbolMap) \ + V(Map, medium_sliced_ascii_symbol_map, MediumSlicedAsciiSymbolMap) \ + V(Map, long_sliced_ascii_symbol_map, LongSlicedAsciiSymbolMap) \ + V(Map, short_external_symbol_map, ShortExternalSymbolMap) \ + V(Map, medium_external_symbol_map, MediumExternalSymbolMap) \ + V(Map, long_external_symbol_map, LongExternalSymbolMap) \ + V(Map, short_external_ascii_symbol_map, ShortExternalAsciiSymbolMap) \ + V(Map, medium_external_ascii_symbol_map, MediumExternalAsciiSymbolMap) \ + V(Map, long_external_ascii_symbol_map, LongExternalAsciiSymbolMap) \ + V(Map, short_cons_string_map, ShortConsStringMap) \ + V(Map, medium_cons_string_map, MediumConsStringMap) \ + V(Map, long_cons_string_map, LongConsStringMap) \ + V(Map, short_cons_ascii_string_map, ShortConsAsciiStringMap) \ + V(Map, medium_cons_ascii_string_map, MediumConsAsciiStringMap) \ + V(Map, long_cons_ascii_string_map, LongConsAsciiStringMap) \ + V(Map, short_sliced_string_map, ShortSlicedStringMap) \ + V(Map, medium_sliced_string_map, MediumSlicedStringMap) \ + V(Map, long_sliced_string_map, LongSlicedStringMap) \ + V(Map, short_sliced_ascii_string_map, ShortSlicedAsciiStringMap) \ + V(Map, medium_sliced_ascii_string_map, MediumSlicedAsciiStringMap) \ + V(Map, long_sliced_ascii_string_map, LongSlicedAsciiStringMap) \ + V(Map, short_external_string_map, ShortExternalStringMap) \ + V(Map, medium_external_string_map, MediumExternalStringMap) \ + V(Map, long_external_string_map, LongExternalStringMap) \ + V(Map, short_external_ascii_string_map, ShortExternalAsciiStringMap) \ + V(Map, medium_external_ascii_string_map, MediumExternalAsciiStringMap) \ + V(Map, long_external_ascii_string_map, LongExternalAsciiStringMap) \ + V(Map, undetectable_short_string_map, UndetectableShortStringMap) \ + V(Map, undetectable_medium_string_map, UndetectableMediumStringMap) \ + V(Map, undetectable_long_string_map, UndetectableLongStringMap) \ + V(Map, undetectable_short_ascii_string_map, UndetectableShortAsciiStringMap) \ + V(Map, \ + undetectable_medium_ascii_string_map, \ + UndetectableMediumAsciiStringMap) \ + V(Map, undetectable_long_ascii_string_map, UndetectableLongAsciiStringMap) \ + V(Map, byte_array_map, ByteArrayMap) \ + V(Map, fixed_array_map, FixedArrayMap) \ + V(Map, hash_table_map, HashTableMap) \ + V(Map, context_map, ContextMap) \ + V(Map, catch_context_map, CatchContextMap) \ + V(Map, global_context_map, GlobalContextMap) \ + V(Map, code_map, CodeMap) \ + V(Map, oddball_map, OddballMap) \ + V(Map, global_property_cell_map, GlobalPropertyCellMap) \ + V(Map, boilerplate_function_map, BoilerplateFunctionMap) \ + V(Map, shared_function_info_map, SharedFunctionInfoMap) \ + V(Map, proxy_map, ProxyMap) \ + V(Map, one_pointer_filler_map, OnePointerFillerMap) \ + V(Map, two_pointer_filler_map, TwoPointerFillerMap) \ + V(Object, nan_value, NanValue) \ + V(Object, undefined_value, UndefinedValue) \ + V(Object, minus_zero_value, MinusZeroValue) \ + V(Object, null_value, NullValue) \ + V(Object, true_value, TrueValue) \ + V(Object, false_value, FalseValue) \ + V(String, empty_string, EmptyString) \ + V(FixedArray, empty_fixed_array, EmptyFixedArray) \ + V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \ + V(Object, the_hole_value, TheHoleValue) \ + V(Map, neander_map, NeanderMap) \ + V(JSObject, message_listeners, MessageListeners) \ + V(Proxy, prototype_accessors, PrototypeAccessors) \ + V(NumberDictionary, code_stubs, CodeStubs) \ + V(NumberDictionary, non_monomorphic_cache, NonMonomorphicCache) \ + V(Code, js_entry_code, JsEntryCode) \ + V(Code, js_construct_entry_code, JsConstructEntryCode) \ + V(Code, c_entry_code, CEntryCode) \ + V(Code, c_entry_debug_break_code, CEntryDebugBreakCode) \ + V(FixedArray, number_string_cache, NumberStringCache) \ + V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \ + V(FixedArray, natives_source_cache, NativesSourceCache) \ + V(Object, last_script_id, LastScriptId) #define ROOT_LIST(V) \ STRONG_ROOT_LIST(V) \ - V(Object, symbol_table) + V(SymbolTable, symbol_table, SymbolTable) #define SYMBOL_LIST(V) \ V(Array_symbol, "Array") \ @@ -261,6 +263,7 @@ class Heap : public AllStatic { static OldSpace* old_data_space() { return old_data_space_; } static OldSpace* code_space() { return code_space_; } static MapSpace* map_space() { return map_space_; } + static CellSpace* cell_space() { return cell_space_; } static LargeObjectSpace* lo_space() { return lo_space_; } static bool always_allocate() { return always_allocate_scope_depth_ != 0; } @@ -636,18 +639,29 @@ class Heap : public AllStatic { global_gc_epilogue_callback_ = callback; } - // Heap roots -#define ROOT_ACCESSOR(type, name) static type* name() { return name##_; } + // Heap root getters. We have versions with and without type::cast() here. + // You can't use type::cast during GC because the assert fails. +#define ROOT_ACCESSOR(type, name, camel_name) \ + static inline type* name() { \ + return type::cast(roots_[k##camel_name##RootIndex]); \ + } \ + static inline type* raw_unchecked_##name() { \ + return reinterpret_cast<type*>(roots_[k##camel_name##RootIndex]); \ + } ROOT_LIST(ROOT_ACCESSOR) #undef ROOT_ACCESSOR // Utility type maps -#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \ - static Map* name##_map() { return name##_map_; } +#define STRUCT_MAP_ACCESSOR(NAME, Name, name) \ + static inline Map* name##_map() { \ + return Map::cast(roots_[k##Name##MapRootIndex]); \ + } STRUCT_LIST(STRUCT_MAP_ACCESSOR) #undef STRUCT_MAP_ACCESSOR -#define SYMBOL_ACCESSOR(name, str) static String* name() { return name##_; } +#define SYMBOL_ACCESSOR(name, str) static inline String* name() { \ + return String::cast(roots_[k##name##RootIndex]); \ + } SYMBOL_LIST(SYMBOL_ACCESSOR) #undef SYMBOL_ACCESSOR @@ -692,11 +706,13 @@ class Heap : public AllStatic { static inline AllocationSpace TargetSpaceId(InstanceType type); // Sets the stub_cache_ (only used when expanding the dictionary). - static void set_code_stubs(NumberDictionary* value) { code_stubs_ = value; } + static void public_set_code_stubs(NumberDictionary* value) { + roots_[kCodeStubsRootIndex] = value; + } // Sets the non_monomorphic_cache_ (only used when expanding the dictionary). - static void set_non_monomorphic_cache(NumberDictionary* value) { - non_monomorphic_cache_ = value; + static void public_set_non_monomorphic_cache(NumberDictionary* value) { + roots_[kNonMonomorphicCacheRootIndex] = value; } // Update the next script id. @@ -717,6 +733,11 @@ class Heap : public AllStatic { static void ZapFromSpace(); #endif +#if defined(ENABLE_LOGGING_AND_PROFILING) + // Print short heap statistics. + static void PrintShortHeapStatistics(); +#endif + // Makes a new symbol object // Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation // failed. @@ -837,6 +858,7 @@ class Heap : public AllStatic { static OldSpace* old_data_space_; static OldSpace* code_space_; static MapSpace* map_space_; + static CellSpace* cell_space_; static LargeObjectSpace* lo_space_; static HeapState gc_state_; @@ -849,6 +871,13 @@ class Heap : public AllStatic { static int mc_count_; // how many mark-compact collections happened static int gc_count_; // how many gc happened +#define ROOT_ACCESSOR(type, name, camel_name) \ + static inline void set_##name(type* value) { \ + roots_[k##camel_name##RootIndex] = value; \ + } + ROOT_LIST(ROOT_ACCESSOR) +#undef ROOT_ACCESSOR + #ifdef DEBUG static bool allocation_allowed_; @@ -883,20 +912,49 @@ class Heap : public AllStatic { // last GC. static int old_gen_exhausted_; - // Declare all the roots -#define ROOT_DECLARATION(type, name) static type* name##_; - ROOT_LIST(ROOT_DECLARATION) -#undef ROOT_DECLARATION + // Declare all the root indices. + enum RootListIndex { +#define ROOT_INDEX_DECLARATION(type, name, camel_name) k##camel_name##RootIndex, + STRONG_ROOT_LIST(ROOT_INDEX_DECLARATION) +#undef ROOT_INDEX_DECLARATION // Utility type maps -#define DECLARE_STRUCT_MAP(NAME, Name, name) static Map* name##_map_; +#define DECLARE_STRUCT_MAP(NAME, Name, name) k##Name##MapRootIndex, STRUCT_LIST(DECLARE_STRUCT_MAP) #undef DECLARE_STRUCT_MAP -#define SYMBOL_DECLARATION(name, str) static String* name##_; - SYMBOL_LIST(SYMBOL_DECLARATION) +#define SYMBOL_INDEX_DECLARATION(name, str) k##name##RootIndex, + SYMBOL_LIST(SYMBOL_INDEX_DECLARATION) #undef SYMBOL_DECLARATION + kSymbolTableRootIndex, + kStrongRootListLength = kSymbolTableRootIndex, + kRootListLength + }; + + static Object* roots_[kRootListLength]; + + struct StringTypeTable { + InstanceType type; + int size; + RootListIndex index; + }; + + struct ConstantSymbolTable { + const char* contents; + RootListIndex index; + }; + + struct StructTable { + InstanceType type; + int size; + RootListIndex index; + }; + + static const StringTypeTable string_type_table[]; + static const ConstantSymbolTable constant_symbol_table[]; + static const StructTable struct_table[]; + // The special hidden symbol which is an empty string, but does not match // any string when looked up in properties. static String* hidden_symbol_; @@ -924,7 +982,10 @@ class Heap : public AllStatic { // to Heap::AllocateRaw(size_in_bytes, MAP_SPACE), except that (a) it doesn't // have to test the allocation space argument and (b) can reduce code size // (since both AllocateRaw and AllocateRawMap are inlined). - static inline Object* AllocateRawMap(int size_in_bytes); + static inline Object* AllocateRawMap(); + + // Allocate an uninitialized object in the global property cell space. + static inline Object* AllocateRawCell(); // Initializes a JSObject based on its map. static void InitializeJSObjectFromMap(JSObject* obj, diff --git a/V8Binding/v8/src/ia32/assembler-ia32.cc b/V8Binding/v8/src/ia32/assembler-ia32.cc index f3cb854..02bde2a 100644 --- a/V8Binding/v8/src/ia32/assembler-ia32.cc +++ b/V8Binding/v8/src/ia32/assembler-ia32.cc @@ -114,8 +114,10 @@ void CpuFeatures::Probe() { CodeDesc desc; assm.GetCode(&desc); - Object* code = - Heap::CreateCode(desc, NULL, Code::ComputeFlags(Code::STUB), NULL); + Object* code = Heap::CreateCode(desc, + NULL, + Code::ComputeFlags(Code::STUB), + Handle<Code>::null()); if (!code->IsCode()) return; LOG(CodeCreateEvent(Logger::BUILTIN_TAG, Code::cast(code), "CpuFeatures::Probe")); diff --git a/V8Binding/v8/src/ia32/codegen-ia32.cc b/V8Binding/v8/src/ia32/codegen-ia32.cc index 3b2eaa0..6d1dc2d 100644 --- a/V8Binding/v8/src/ia32/codegen-ia32.cc +++ b/V8Binding/v8/src/ia32/codegen-ia32.cc @@ -7591,6 +7591,16 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, __ dec(Operand::StaticVariable(scope_depth)); } + // Make sure we're not trying to return 'the hole' from the runtime + // call as this may lead to crashes in the IC code later. + if (FLAG_debug_code) { + Label okay; + __ cmp(eax, Factory::the_hole_value()); + __ j(not_equal, &okay); + __ int3(); + __ bind(&okay); + } + // Check for failure result. Label failure_returned; ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0); diff --git a/V8Binding/v8/src/ia32/ic-ia32.cc b/V8Binding/v8/src/ia32/ic-ia32.cc index 97de4da..90e0fd1 100644 --- a/V8Binding/v8/src/ia32/ic-ia32.cc +++ b/V8Binding/v8/src/ia32/ic-ia32.cc @@ -75,6 +75,12 @@ static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label, __ cmp(r0, JS_GLOBAL_PROXY_TYPE); __ j(equal, miss_label, not_taken); + // Possible work-around for http://crbug.com/16276. + __ cmp(r0, JS_GLOBAL_OBJECT_TYPE); + __ j(equal, miss_label, not_taken); + __ cmp(r0, JS_BUILTINS_OBJECT_TYPE); + __ j(equal, miss_label, not_taken); + // Check that the properties array is a dictionary. __ mov(r0, FieldOperand(r1, JSObject::kPropertiesOffset)); __ cmp(FieldOperand(r0, HeapObject::kMapOffset), diff --git a/V8Binding/v8/src/ia32/stub-cache-ia32.cc b/V8Binding/v8/src/ia32/stub-cache-ia32.cc index ce4981d..0a887d5 100644 --- a/V8Binding/v8/src/ia32/stub-cache-ia32.cc +++ b/V8Binding/v8/src/ia32/stub-cache-ia32.cc @@ -273,114 +273,6 @@ void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm, } -void StubCompiler::GenerateLoadField(MacroAssembler* masm, - JSObject* object, - JSObject* holder, - Register receiver, - Register scratch1, - Register scratch2, - int index, - Label* miss_label) { - // Check that the receiver isn't a smi. - __ test(receiver, Immediate(kSmiTagMask)); - __ j(zero, miss_label, not_taken); - - // Check that the maps haven't changed. - Register reg = - masm->CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label); - - // Get the value from the properties. - GenerateFastPropertyLoad(masm, eax, reg, holder, index); - __ ret(0); -} - - -void StubCompiler::GenerateLoadCallback(MacroAssembler* masm, - JSObject* object, - JSObject* holder, - Register receiver, - Register name, - Register scratch1, - Register scratch2, - AccessorInfo* callback, - Label* miss_label) { - // Check that the receiver isn't a smi. - __ test(receiver, Immediate(kSmiTagMask)); - __ j(zero, miss_label, not_taken); - - // Check that the maps haven't changed. - Register reg = - masm->CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label); - - // Push the arguments on the JS stack of the caller. - __ pop(scratch2); // remove return address - __ push(receiver); // receiver - __ push(Immediate(Handle<AccessorInfo>(callback))); // callback data - __ push(name); // name - __ push(reg); // holder - __ push(scratch2); // restore return address - - // Do tail-call to the runtime system. - ExternalReference load_callback_property = - ExternalReference(IC_Utility(IC::kLoadCallbackProperty)); - __ TailCallRuntime(load_callback_property, 4); -} - - -void StubCompiler::GenerateLoadConstant(MacroAssembler* masm, - JSObject* object, - JSObject* holder, - Register receiver, - Register scratch1, - Register scratch2, - Object* value, - Label* miss_label) { - // Check that the receiver isn't a smi. - __ test(receiver, Immediate(kSmiTagMask)); - __ j(zero, miss_label, not_taken); - - // Check that the maps haven't changed. - Register reg = - masm->CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label); - - // Return the constant value. - __ mov(eax, Handle<Object>(value)); - __ ret(0); -} - - -void StubCompiler::GenerateLoadInterceptor(MacroAssembler* masm, - JSObject* object, - JSObject* holder, - Smi* lookup_hint, - Register receiver, - Register name, - Register scratch1, - Register scratch2, - Label* miss_label) { - // Check that the receiver isn't a smi. - __ test(receiver, Immediate(kSmiTagMask)); - __ j(zero, miss_label, not_taken); - - // Check that the maps haven't changed. - Register reg = - masm->CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label); - - // Push the arguments on the JS stack of the caller. - __ pop(scratch2); // remove return address - __ push(receiver); // receiver - __ push(reg); // holder - __ push(name); // name - // TODO(367): Maybe don't push lookup_hint for LOOKUP_IN_HOLDER and/or - // LOOKUP_IN_PROTOTYPE, but use a special version of lookup method? - __ push(Immediate(lookup_hint)); - __ push(scratch2); // restore return address - - // Do tail-call to the runtime system. - ExternalReference load_ic_property = - ExternalReference(IC_Utility(IC::kLoadInterceptorProperty)); - __ TailCallRuntime(load_ic_property, 4); -} void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) { @@ -474,10 +366,159 @@ void StubCompiler::GenerateStoreField(MacroAssembler* masm, #undef __ - #define __ ACCESS_MASM(masm()) +Register StubCompiler::CheckPrototypes(JSObject* object, + Register object_reg, + JSObject* holder, + Register holder_reg, + Register scratch, + String* name, + Label* miss) { + // Check that the maps haven't changed. + Register result = + masm()->CheckMaps(object, object_reg, holder, holder_reg, scratch, miss); + + // If we've skipped any global objects, it's not enough to verify + // that their maps haven't changed. + while (object != holder) { + if (object->IsGlobalObject()) { + GlobalObject* global = GlobalObject::cast(object); + Object* probe = global->EnsurePropertyCell(name); + if (probe->IsFailure()) { + set_failure(Failure::cast(probe)); + return result; + } + JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(probe); + ASSERT(cell->value()->IsTheHole()); + __ mov(scratch, Immediate(Handle<Object>(cell))); + __ cmp(FieldOperand(scratch, JSGlobalPropertyCell::kValueOffset), + Immediate(Factory::the_hole_value())); + __ j(not_equal, miss, not_taken); + } + object = JSObject::cast(object->GetPrototype()); + } + + // Return the register containin the holder. + return result; +} + + +void StubCompiler::GenerateLoadField(JSObject* object, + JSObject* holder, + Register receiver, + Register scratch1, + Register scratch2, + int index, + String* name, + Label* miss) { + // Check that the receiver isn't a smi. + __ test(receiver, Immediate(kSmiTagMask)); + __ j(zero, miss, not_taken); + + // Check the prototype chain. + Register reg = + CheckPrototypes(object, receiver, holder, + scratch1, scratch2, name, miss); + + // Get the value from the properties. + GenerateFastPropertyLoad(masm(), eax, reg, holder, index); + __ ret(0); +} + + +void StubCompiler::GenerateLoadCallback(JSObject* object, + JSObject* holder, + Register receiver, + Register name_reg, + Register scratch1, + Register scratch2, + AccessorInfo* callback, + String* name, + Label* miss) { + // Check that the receiver isn't a smi. + __ test(receiver, Immediate(kSmiTagMask)); + __ j(zero, miss, not_taken); + + // Check that the maps haven't changed. + Register reg = + CheckPrototypes(object, receiver, holder, + scratch1, scratch2, name, miss); + + // Push the arguments on the JS stack of the caller. + __ pop(scratch2); // remove return address + __ push(receiver); // receiver + __ push(Immediate(Handle<AccessorInfo>(callback))); // callback data + __ push(name_reg); // name + __ push(reg); // holder + __ push(scratch2); // restore return address + + // Do tail-call to the runtime system. + ExternalReference load_callback_property = + ExternalReference(IC_Utility(IC::kLoadCallbackProperty)); + __ TailCallRuntime(load_callback_property, 4); +} + + +void StubCompiler::GenerateLoadConstant(JSObject* object, + JSObject* holder, + Register receiver, + Register scratch1, + Register scratch2, + Object* value, + String* name, + Label* miss) { + // Check that the receiver isn't a smi. + __ test(receiver, Immediate(kSmiTagMask)); + __ j(zero, miss, not_taken); + + // Check that the maps haven't changed. + Register reg = + CheckPrototypes(object, receiver, holder, + scratch1, scratch2, name, miss); + + // Return the constant value. + __ mov(eax, Handle<Object>(value)); + __ ret(0); +} + + +void StubCompiler::GenerateLoadInterceptor(JSObject* object, + JSObject* holder, + Smi* lookup_hint, + Register receiver, + Register name_reg, + Register scratch1, + Register scratch2, + String* name, + Label* miss) { + // Check that the receiver isn't a smi. + __ test(receiver, Immediate(kSmiTagMask)); + __ j(zero, miss, not_taken); + + // Check that the maps haven't changed. + Register reg = + CheckPrototypes(object, receiver, holder, + scratch1, scratch2, name, miss); + + // Push the arguments on the JS stack of the caller. + __ pop(scratch2); // remove return address + __ push(receiver); // receiver + __ push(reg); // holder + __ push(name_reg); // name + // TODO(367): Maybe don't push lookup_hint for LOOKUP_IN_HOLDER and/or + // LOOKUP_IN_PROTOTYPE, but use a special version of lookup method? + __ push(Immediate(lookup_hint)); + __ push(scratch2); // restore return address + + // Do tail-call to the runtime system. + ExternalReference load_ic_property = + ExternalReference(IC_Utility(IC::kLoadInterceptorProperty)); + __ TailCallRuntime(load_ic_property, 4); +} + + // TODO(1241006): Avoid having lazy compile stubs specialized by the // number of arguments. It is not needed anymore. Object* StubCompiler::CompileLazyCompile(Code::Flags flags) { @@ -520,7 +561,8 @@ Object* CallStubCompiler::CompileCallField(Object* object, // Do the right check and compute the holder register. Register reg = - masm()->CheckMaps(JSObject::cast(object), edx, holder, ebx, ecx, &miss); + CheckPrototypes(JSObject::cast(object), edx, holder, + ebx, ecx, name, &miss); GenerateFastPropertyLoad(masm(), edi, reg, holder, index); @@ -553,6 +595,7 @@ Object* CallStubCompiler::CompileCallField(Object* object, Object* CallStubCompiler::CompileCallConstant(Object* object, JSObject* holder, JSFunction* function, + String* name, CheckType check) { // ----------- S t a t e ------------- // ----------------------------------- @@ -575,7 +618,8 @@ Object* CallStubCompiler::CompileCallConstant(Object* object, switch (check) { case RECEIVER_MAP_CHECK: // Check that the maps haven't changed. - __ CheckMaps(JSObject::cast(object), edx, holder, ebx, ecx, &miss); + CheckPrototypes(JSObject::cast(object), edx, holder, + ebx, ecx, name, &miss); // Patch the receiver on the stack with the global proxy if // necessary. @@ -595,8 +639,8 @@ Object* CallStubCompiler::CompileCallConstant(Object* object, GenerateLoadGlobalFunctionPrototype(masm(), Context::STRING_FUNCTION_INDEX, ecx); - __ CheckMaps(JSObject::cast(object->GetPrototype()), - ecx, holder, ebx, edx, &miss); + CheckPrototypes(JSObject::cast(object->GetPrototype()), ecx, holder, + ebx, edx, name, &miss); break; case NUMBER_CHECK: { @@ -611,8 +655,8 @@ Object* CallStubCompiler::CompileCallConstant(Object* object, GenerateLoadGlobalFunctionPrototype(masm(), Context::NUMBER_FUNCTION_INDEX, ecx); - __ CheckMaps(JSObject::cast(object->GetPrototype()), - ecx, holder, ebx, edx, &miss); + CheckPrototypes(JSObject::cast(object->GetPrototype()), ecx, holder, + ebx, edx, name, &miss); break; } @@ -628,13 +672,14 @@ Object* CallStubCompiler::CompileCallConstant(Object* object, GenerateLoadGlobalFunctionPrototype(masm(), Context::BOOLEAN_FUNCTION_INDEX, ecx); - __ CheckMaps(JSObject::cast(object->GetPrototype()), - ecx, holder, ebx, edx, &miss); + CheckPrototypes(JSObject::cast(object->GetPrototype()), ecx, holder, + ebx, edx, name, &miss); break; } case JSARRAY_HAS_FAST_ELEMENTS_CHECK: - __ CheckMaps(JSObject::cast(object), edx, holder, ebx, ecx, &miss); + CheckPrototypes(JSObject::cast(object), edx, holder, + ebx, ecx, name, &miss); // Make sure object->elements()->map() != Heap::dictionary_array_map() // Get the elements array of the object. __ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset)); @@ -692,7 +737,8 @@ Object* CallStubCompiler::CompileCallInterceptor(Object* object, // Check that maps have not changed and compute the holder register. Register reg = - masm()->CheckMaps(JSObject::cast(object), edx, holder, ebx, ecx, &miss); + CheckPrototypes(JSObject::cast(object), edx, holder, + ebx, ecx, name, &miss); // Enter an internal frame. __ EnterInternalFrame(); @@ -745,7 +791,8 @@ Object* CallStubCompiler::CompileCallInterceptor(Object* object, } -Object* CallStubCompiler::CompileCallGlobal(GlobalObject* object, +Object* CallStubCompiler::CompileCallGlobal(JSObject* object, + GlobalObject* holder, JSGlobalPropertyCell* cell, JSFunction* function, String* name) { @@ -758,11 +805,19 @@ Object* CallStubCompiler::CompileCallGlobal(GlobalObject* object, // Get the number of arguments. const int argc = arguments().immediate(); - // Check that the map of the global has not changed. + // Get the receiver from the stack. __ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); - __ cmp(FieldOperand(edx, HeapObject::kMapOffset), - Immediate(Handle<Map>(object->map()))); - __ j(not_equal, &miss, not_taken); + + // If the object is the holder then we know that it's a global + // object which can only happen for contextual calls. In this case, + // the receiver cannot be a smi. + if (object != holder) { + __ test(edx, Immediate(kSmiTagMask)); + __ j(zero, &miss, not_taken); + } + + // Check that the maps haven't changed. + CheckPrototypes(object, edx, holder, ebx, ecx, name, &miss); // Get the value from the cell. __ mov(edi, Immediate(Handle<JSGlobalPropertyCell>(cell))); @@ -773,8 +828,10 @@ Object* CallStubCompiler::CompileCallGlobal(GlobalObject* object, __ j(not_equal, &miss, not_taken); // Patch the receiver on the stack with the global proxy. - __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset)); - __ mov(Operand(esp, (argc + 1) * kPointerSize), edx); + if (object->IsGlobalObject()) { + __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset)); + __ mov(Operand(esp, (argc + 1) * kPointerSize), edx); + } // Setup the context (function already in edi). __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); @@ -964,11 +1021,6 @@ Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object, __ mov(ecx, Immediate(Handle<JSGlobalPropertyCell>(cell))); __ mov(FieldOperand(ecx, JSGlobalPropertyCell::kValueOffset), eax); - // RecordWrite clobbers the value register. Pass the value being stored in - // edx. - __ mov(edx, eax); - __ RecordWrite(ecx, JSGlobalPropertyCell::kValueOffset, edx, ebx); - // Return the value (register eax). __ ret(0); @@ -1027,6 +1079,7 @@ Object* KeyedStoreStubCompiler::CompileStoreField(JSObject* object, } + Object* LoadStubCompiler::CompileLoadField(JSObject* object, JSObject* holder, int index, @@ -1039,7 +1092,7 @@ Object* LoadStubCompiler::CompileLoadField(JSObject* object, Label miss; __ mov(eax, (Operand(esp, kPointerSize))); - GenerateLoadField(masm(), object, holder, eax, ebx, edx, index, &miss); + GenerateLoadField(object, holder, eax, ebx, edx, index, name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::LOAD_IC); @@ -1060,8 +1113,8 @@ Object* LoadStubCompiler::CompileLoadCallback(JSObject* object, Label miss; __ mov(eax, (Operand(esp, kPointerSize))); - GenerateLoadCallback(masm(), object, holder, eax, ecx, ebx, - edx, callback, &miss); + GenerateLoadCallback(object, holder, eax, ecx, ebx, edx, + callback, name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::LOAD_IC); @@ -1082,7 +1135,7 @@ Object* LoadStubCompiler::CompileLoadConstant(JSObject* object, Label miss; __ mov(eax, (Operand(esp, kPointerSize))); - GenerateLoadConstant(masm(), object, holder, eax, ebx, edx, value, &miss); + GenerateLoadConstant(object, holder, eax, ebx, edx, value, name, &miss); __ bind(&miss); GenerateLoadMiss(masm(), Code::LOAD_IC); @@ -1104,14 +1157,14 @@ Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, __ mov(eax, (Operand(esp, kPointerSize))); // TODO(368): Compile in the whole chain: all the interceptors in // prototypes and ultimate answer. - GenerateLoadInterceptor(masm(), - receiver, + GenerateLoadInterceptor(receiver, holder, holder->InterceptorPropertyLookupHint(name), eax, ecx, edx, ebx, + name, &miss); __ bind(&miss); @@ -1122,7 +1175,8 @@ Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, } -Object* LoadStubCompiler::CompileLoadGlobal(GlobalObject* object, +Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object, + GlobalObject* holder, JSGlobalPropertyCell* cell, String* name, bool is_dont_delete) { @@ -1135,11 +1189,19 @@ Object* LoadStubCompiler::CompileLoadGlobal(GlobalObject* object, __ IncrementCounter(&Counters::named_load_global_inline, 1); - // Check that the map of the global has not changed. + // Get the receiver from the stack. __ mov(eax, (Operand(esp, kPointerSize))); - __ cmp(FieldOperand(eax, HeapObject::kMapOffset), - Immediate(Handle<Map>(object->map()))); - __ j(not_equal, &miss, not_taken); + + // If the object is the holder then we know that it's a global + // object which can only happen for contextual loads. In this case, + // the receiver cannot be a smi. + if (object != holder) { + __ test(eax, Immediate(kSmiTagMask)); + __ j(zero, &miss, not_taken); + } + + // Check that the maps haven't changed. + CheckPrototypes(object, eax, holder, ebx, edx, name, &miss); // Get the value from the cell. __ mov(eax, Immediate(Handle<JSGlobalPropertyCell>(cell))); @@ -1149,6 +1211,9 @@ Object* LoadStubCompiler::CompileLoadGlobal(GlobalObject* object, if (!is_dont_delete) { __ cmp(eax, Factory::the_hole_value()); __ j(equal, &miss, not_taken); + } else if (FLAG_debug_code) { + __ cmp(eax, Factory::the_hole_value()); + __ Check(not_equal, "DontDelete cells can't contain the hole"); } __ ret(0); @@ -1182,7 +1247,8 @@ Object* KeyedLoadStubCompiler::CompileLoadField(String* name, __ cmp(Operand(eax), Immediate(Handle<String>(name))); __ j(not_equal, &miss, not_taken); - GenerateLoadField(masm(), receiver, holder, ecx, ebx, edx, index, &miss); + GenerateLoadField(receiver, holder, ecx, ebx, edx, index, name, &miss); + __ bind(&miss); __ DecrementCounter(&Counters::keyed_load_field, 1); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); @@ -1211,8 +1277,8 @@ Object* KeyedLoadStubCompiler::CompileLoadCallback(String* name, __ cmp(Operand(eax), Immediate(Handle<String>(name))); __ j(not_equal, &miss, not_taken); - GenerateLoadCallback(masm(), receiver, holder, ecx, eax, ebx, edx, - callback, &miss); + GenerateLoadCallback(receiver, holder, ecx, eax, ebx, edx, + callback, name, &miss); __ bind(&miss); __ DecrementCounter(&Counters::keyed_load_callback, 1); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); @@ -1241,7 +1307,8 @@ Object* KeyedLoadStubCompiler::CompileLoadConstant(String* name, __ cmp(Operand(eax), Immediate(Handle<String>(name))); __ j(not_equal, &miss, not_taken); - GenerateLoadConstant(masm(), receiver, holder, ecx, ebx, edx, value, &miss); + GenerateLoadConstant(receiver, holder, ecx, ebx, edx, + value, name, &miss); __ bind(&miss); __ DecrementCounter(&Counters::keyed_load_constant_function, 1); GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC); @@ -1269,14 +1336,14 @@ Object* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver, __ cmp(Operand(eax), Immediate(Handle<String>(name))); __ j(not_equal, &miss, not_taken); - GenerateLoadInterceptor(masm(), - receiver, + GenerateLoadInterceptor(receiver, holder, Smi::FromInt(JSObject::kLookupInHolder), ecx, eax, edx, ebx, + name, &miss); __ bind(&miss); __ DecrementCounter(&Counters::keyed_load_interceptor, 1); diff --git a/V8Binding/v8/src/ic.cc b/V8Binding/v8/src/ic.cc index dfdf722..7e82295 100644 --- a/V8Binding/v8/src/ic.cc +++ b/V8Binding/v8/src/ic.cc @@ -327,13 +327,11 @@ Object* CallIC::LoadFunction(State state, return TypeError("non_object_property_call", object, name); } - Object* result = Heap::the_hole_value(); - // Check if the name is trivially convertible to an index and get // the element if so. uint32_t index; if (name->AsArrayIndex(&index)) { - result = object->GetElement(index); + Object* result = object->GetElement(index); if (result->IsJSFunction()) return result; // Try to find a suitable function delegate for the object at hand. @@ -363,7 +361,7 @@ Object* CallIC::LoadFunction(State state, // Get the property. PropertyAttributes attr; - result = object->GetProperty(*object, &lookup, *name, &attr); + Object* result = object->GetProperty(*object, &lookup, *name, &attr); if (result->IsFailure()) return result; if (lookup.type() == INTERCEPTOR) { // If the object does not have the requested property, check which @@ -397,7 +395,7 @@ Object* CallIC::LoadFunction(State state, // cause GC. HandleScope scope; Handle<JSFunction> function(JSFunction::cast(result)); - Debug::HandleStepIn(function, fp(), false); + Debug::HandleStepIn(function, object, fp(), false); return *function; } #endif @@ -452,24 +450,26 @@ void CallIC::UpdateCaches(LookupResult* lookup, } case NORMAL: { if (!object->IsJSObject()) return; - if (object->IsGlobalObject()) { - // The stub generated for the global object picks the value directly - // from the property cell. So the property must be directly on the - // global object. - Handle<GlobalObject> global = Handle<GlobalObject>::cast(object); - if (lookup->holder() != *global) return; + Handle<JSObject> receiver = Handle<JSObject>::cast(object); + + if (lookup->holder()->IsGlobalObject()) { + GlobalObject* global = GlobalObject::cast(lookup->holder()); JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup)); if (!cell->value()->IsJSFunction()) return; JSFunction* function = JSFunction::cast(cell->value()); - code = StubCache::ComputeCallGlobal(argc, in_loop, *name, *global, - cell, function); + code = StubCache::ComputeCallGlobal(argc, + in_loop, + *name, + *receiver, + global, + cell, + function); } else { // There is only one shared stub for calling normalized // properties. It does not traverse the prototype chain, so the // property must be found in the receiver for the stub to be // applicable. - Handle<JSObject> receiver = Handle<JSObject>::cast(object); if (lookup->holder() != *receiver) return; code = StubCache::ComputeCallNormal(argc, in_loop, *name, *receiver); } @@ -657,16 +657,15 @@ void LoadIC::UpdateCaches(LookupResult* lookup, break; } case NORMAL: { - if (object->IsGlobalObject()) { - // The stub generated for the global object picks the value directly - // from the property cell. So the property must be directly on the - // global object. - Handle<GlobalObject> global = Handle<GlobalObject>::cast(object); - if (lookup->holder() != *global) return; + if (lookup->holder()->IsGlobalObject()) { + GlobalObject* global = GlobalObject::cast(lookup->holder()); JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(global->GetPropertyCell(lookup)); - code = StubCache::ComputeLoadGlobal(*name, *global, - cell, lookup->IsDontDelete()); + code = StubCache::ComputeLoadGlobal(*name, + *receiver, + global, + cell, + lookup->IsDontDelete()); } else { // There is only one shared stub for loading normalized // properties. It does not traverse the prototype chain, so the diff --git a/V8Binding/v8/src/jsregexp.cc b/V8Binding/v8/src/jsregexp.cc index 879f671..852d431 100644 --- a/V8Binding/v8/src/jsregexp.cc +++ b/V8Binding/v8/src/jsregexp.cc @@ -263,7 +263,6 @@ Handle<Object> RegExpImpl::AtomExec(Handle<JSRegExp> re, // Irregexp implementation. - // Ensures that the regexp object contains a compiled version of the // source for either ASCII or non-ASCII strings. // If the compiled version doesn't already exist, it is compiled @@ -271,25 +270,26 @@ Handle<Object> RegExpImpl::AtomExec(Handle<JSRegExp> re, // If compilation fails, an exception is thrown and this function // returns false. bool RegExpImpl::EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii) { - int index; - if (is_ascii) { - index = JSRegExp::kIrregexpASCIICodeIndex; - } else { - index = JSRegExp::kIrregexpUC16CodeIndex; - } - Object* entry = re->DataAt(index); - if (!entry->IsTheHole()) { - // A value has already been compiled. - if (entry->IsJSObject()) { - // If it's a JS value, it's an error. - Top::Throw(entry); - return false; - } - return true; - } +#ifdef V8_NATIVE_REGEXP + if (re->DataAt(JSRegExp::code_index(is_ascii))->IsCode()) return true; +#else // ! V8_NATIVE_REGEXP (RegExp interpreter code) + if (re->DataAt(JSRegExp::code_index(is_ascii))->IsByteArray()) return true; +#endif + return CompileIrregexp(re, is_ascii); +} + +bool RegExpImpl::CompileIrregexp(Handle<JSRegExp> re, bool is_ascii) { // Compile the RegExp. CompilationZoneScope zone_scope(DELETE_ON_EXIT); + Object* entry = re->DataAt(JSRegExp::code_index(is_ascii)); + if (entry->IsJSObject()) { + // If it's a JSObject, a previous compilation failed and threw this object. + // Re-throw the object without trying again. + Top::Throw(entry); + return false; + } + ASSERT(entry->IsTheHole()); JSRegExp::Flags flags = re->GetFlags(); @@ -302,7 +302,7 @@ bool RegExpImpl::EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii) { FlatStringReader reader(pattern); if (!ParseRegExp(&reader, flags.is_multiline(), &compile_data)) { // Throw an exception if we fail to parse the pattern. - // THIS SHOULD NOT HAPPEN. We already parsed it successfully once. + // THIS SHOULD NOT HAPPEN. We already pre-parsed it successfully once. ThrowRegExpException(re, pattern, compile_data.error, @@ -325,17 +325,15 @@ bool RegExpImpl::EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii) { Handle<Object> regexp_err = Factory::NewSyntaxError("malformed_regexp", array); Top::Throw(*regexp_err); - re->SetDataAt(index, *regexp_err); + re->SetDataAt(JSRegExp::code_index(is_ascii), *regexp_err); return false; } - NoHandleAllocation no_handles; - - FixedArray* data = FixedArray::cast(re->data()); - data->set(index, result.code); - int register_max = IrregexpMaxRegisterCount(data); + Handle<FixedArray> data = Handle<FixedArray>(FixedArray::cast(re->data())); + data->set(JSRegExp::code_index(is_ascii), result.code); + int register_max = IrregexpMaxRegisterCount(*data); if (result.num_registers > register_max) { - SetIrregexpMaxRegisterCount(data, result.num_registers); + SetIrregexpMaxRegisterCount(*data, result.num_registers); } return true; @@ -364,24 +362,12 @@ int RegExpImpl::IrregexpNumberOfRegisters(FixedArray* re) { ByteArray* RegExpImpl::IrregexpByteCode(FixedArray* re, bool is_ascii) { - int index; - if (is_ascii) { - index = JSRegExp::kIrregexpASCIICodeIndex; - } else { - index = JSRegExp::kIrregexpUC16CodeIndex; - } - return ByteArray::cast(re->get(index)); + return ByteArray::cast(re->get(JSRegExp::code_index(is_ascii))); } Code* RegExpImpl::IrregexpNativeCode(FixedArray* re, bool is_ascii) { - int index; - if (is_ascii) { - index = JSRegExp::kIrregexpASCIICodeIndex; - } else { - index = JSRegExp::kIrregexpUC16CodeIndex; - } - return Code::cast(re->get(index)); + return Code::cast(re->get(JSRegExp::code_index(is_ascii))); } @@ -408,6 +394,7 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp, int number_of_capture_registers = (IrregexpNumberOfCaptures(FixedArray::cast(jsregexp->data())) + 1) * 2; +#ifndef V8_NATIVE_REGEXP #ifdef DEBUG if (FLAG_trace_regexp_bytecodes) { String* pattern = jsregexp->Pattern(); @@ -415,6 +402,7 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp, PrintF("\n\nSubject string: '%s'\n\n", *(subject->ToCString())); } #endif +#endif if (!subject->IsFlat()) { FlattenString(subject); @@ -422,88 +410,83 @@ Handle<Object> RegExpImpl::IrregexpExec(Handle<JSRegExp> jsregexp, last_match_info->EnsureSize(number_of_capture_registers + kLastMatchOverhead); - bool rc; - // We have to initialize this with something to make gcc happy but we can't - // initialize it with its real value until after the GC-causing things are - // over. - FixedArray* array = NULL; + Handle<FixedArray> array; // Dispatch to the correct RegExp implementation. - Handle<String> original_subject = subject; Handle<FixedArray> regexp(FixedArray::cast(jsregexp->data())); - if (UseNativeRegexp()) { +#ifdef V8_NATIVE_REGEXP #if V8_TARGET_ARCH_IA32 - OffsetsVector captures(number_of_capture_registers); - int* captures_vector = captures.vector(); - RegExpMacroAssemblerIA32::Result res; - do { - bool is_ascii = subject->IsAsciiRepresentation(); - if (!EnsureCompiledIrregexp(jsregexp, is_ascii)) { - return Handle<Object>::null(); - } - Handle<Code> code(RegExpImpl::IrregexpNativeCode(*regexp, is_ascii)); - res = RegExpMacroAssemblerIA32::Match(code, - subject, - captures_vector, - captures.length(), - previous_index); - // If result is RETRY, the string have changed representation, and we - // must restart from scratch. - } while (res == RegExpMacroAssemblerIA32::RETRY); - if (res == RegExpMacroAssemblerIA32::EXCEPTION) { - ASSERT(Top::has_pending_exception()); - return Handle<Object>::null(); - } - ASSERT(res == RegExpMacroAssemblerIA32::SUCCESS - || res == RegExpMacroAssemblerIA32::FAILURE); - - rc = (res == RegExpMacroAssemblerIA32::SUCCESS); - if (!rc) return Factory::null_value(); - - array = last_match_info->elements(); - ASSERT(array->length() >= number_of_capture_registers + kLastMatchOverhead); - // The captures come in (start, end+1) pairs. - for (int i = 0; i < number_of_capture_registers; i += 2) { - SetCapture(array, i, captures_vector[i]); - SetCapture(array, i + 1, captures_vector[i + 1]); - } -#else // !V8_TARGET_ARCH_IA32 - UNREACHABLE(); -#endif - } else { + OffsetsVector captures(number_of_capture_registers); + int* captures_vector = captures.vector(); + RegExpMacroAssemblerIA32::Result res; + do { bool is_ascii = subject->IsAsciiRepresentation(); if (!EnsureCompiledIrregexp(jsregexp, is_ascii)) { return Handle<Object>::null(); } - // Now that we have done EnsureCompiledIrregexp we can get the number of - // registers. - int number_of_registers = - IrregexpNumberOfRegisters(FixedArray::cast(jsregexp->data())); - OffsetsVector registers(number_of_registers); - int* register_vector = registers.vector(); - for (int i = number_of_capture_registers - 1; i >= 0; i--) { - register_vector[i] = -1; - } - Handle<ByteArray> byte_codes(IrregexpByteCode(*regexp, is_ascii)); - - rc = IrregexpInterpreter::Match(byte_codes, - subject, - register_vector, - previous_index); - if (!rc) return Factory::null_value(); - - array = last_match_info->elements(); - ASSERT(array->length() >= number_of_capture_registers + kLastMatchOverhead); - // The captures come in (start, end+1) pairs. - for (int i = 0; i < number_of_capture_registers; i += 2) { - SetCapture(array, i, register_vector[i]); - SetCapture(array, i + 1, register_vector[i + 1]); - } + Handle<Code> code(RegExpImpl::IrregexpNativeCode(*regexp, is_ascii)); + res = RegExpMacroAssemblerIA32::Match(code, + subject, + captures_vector, + captures.length(), + previous_index); + // If result is RETRY, the string have changed representation, and we + // must restart from scratch. + } while (res == RegExpMacroAssemblerIA32::RETRY); + if (res == RegExpMacroAssemblerIA32::EXCEPTION) { + ASSERT(Top::has_pending_exception()); + return Handle<Object>::null(); } + ASSERT(res == RegExpMacroAssemblerIA32::SUCCESS + || res == RegExpMacroAssemblerIA32::FAILURE); - SetLastCaptureCount(array, number_of_capture_registers); - SetLastSubject(array, *original_subject); - SetLastInput(array, *original_subject); + if (res != RegExpMacroAssemblerIA32::SUCCESS) return Factory::null_value(); + + array = Handle<FixedArray>(last_match_info->elements()); + ASSERT(array->length() >= number_of_capture_registers + kLastMatchOverhead); + // The captures come in (start, end+1) pairs. + for (int i = 0; i < number_of_capture_registers; i += 2) { + SetCapture(*array, i, captures_vector[i]); + SetCapture(*array, i + 1, captures_vector[i + 1]); + } +#else // !V8_TARGET_ARCH_IA32 + UNREACHABLE(); +#endif // V8_TARGET_ARCH_IA32 +#else // !V8_NATIVE_REGEXP + bool is_ascii = subject->IsAsciiRepresentation(); + if (!EnsureCompiledIrregexp(jsregexp, is_ascii)) { + return Handle<Object>::null(); + } + // Now that we have done EnsureCompiledIrregexp we can get the number of + // registers. + int number_of_registers = + IrregexpNumberOfRegisters(FixedArray::cast(jsregexp->data())); + OffsetsVector registers(number_of_registers); + int* register_vector = registers.vector(); + for (int i = number_of_capture_registers - 1; i >= 0; i--) { + register_vector[i] = -1; + } + Handle<ByteArray> byte_codes(IrregexpByteCode(*regexp, is_ascii)); + + if (!IrregexpInterpreter::Match(byte_codes, + subject, + register_vector, + previous_index)) { + return Factory::null_value(); + } + + array = Handle<FixedArray>(last_match_info->elements()); + ASSERT(array->length() >= number_of_capture_registers + kLastMatchOverhead); + // The captures come in (start, end+1) pairs. + for (int i = 0; i < number_of_capture_registers; i += 2) { + SetCapture(*array, i, register_vector[i]); + SetCapture(*array, i + 1, register_vector[i + 1]); + } +#endif // V8_NATIVE_REGEXP + + SetLastCaptureCount(*array, number_of_capture_registers); + SetLastSubject(*array, *subject); + SetLastInput(*array, *subject); return last_match_info; } @@ -4474,35 +4457,38 @@ RegExpEngine::CompilationResult RegExpEngine::Compile(RegExpCompileData* data, NodeInfo info = *node->info(); - if (RegExpImpl::UseNativeRegexp()) { +#ifdef V8_NATIVE_REGEXP #ifdef V8_TARGET_ARCH_ARM - UNREACHABLE(); + // ARM native regexp not implemented yet. + UNREACHABLE(); #endif #ifdef V8_TARGET_ARCH_X64 - UNREACHABLE(); + // X64 native regexp not implemented yet. + UNREACHABLE(); #endif #ifdef V8_TARGET_ARCH_IA32 - RegExpMacroAssemblerIA32::Mode mode; - if (is_ascii) { - mode = RegExpMacroAssemblerIA32::ASCII; - } else { - mode = RegExpMacroAssemblerIA32::UC16; - } - RegExpMacroAssemblerIA32 macro_assembler(mode, - (data->capture_count + 1) * 2); - return compiler.Assemble(¯o_assembler, - node, - data->capture_count, - pattern); -#endif + RegExpMacroAssemblerIA32::Mode mode; + if (is_ascii) { + mode = RegExpMacroAssemblerIA32::ASCII; + } else { + mode = RegExpMacroAssemblerIA32::UC16; } + RegExpMacroAssemblerIA32 macro_assembler(mode, + (data->capture_count + 1) * 2); + return compiler.Assemble(¯o_assembler, + node, + data->capture_count, + pattern); +#endif +#else // ! V8_NATIVE_REGEXP + // Interpreted regexp. EmbeddedVector<byte, 1024> codes; RegExpMacroAssemblerIrregexp macro_assembler(codes); return compiler.Assemble(¯o_assembler, node, data->capture_count, pattern); +#endif // V8_NATIVE_REGEXP } - }} // namespace v8::internal diff --git a/V8Binding/v8/src/jsregexp.h b/V8Binding/v8/src/jsregexp.h index a86f7e6..0e7965c 100644 --- a/V8Binding/v8/src/jsregexp.h +++ b/V8Binding/v8/src/jsregexp.h @@ -37,13 +37,15 @@ class RegExpMacroAssembler; class RegExpImpl { public: - static inline bool UseNativeRegexp() { -#ifdef V8_TARGET_ARCH_IA32 - return FLAG_regexp_native; + // Whether V8 is compiled with native regexp support or not. + static bool UsesNativeRegExp() { +#ifdef V8_NATIVE_REGEXP + return true; #else - return false; + return false; #endif } + // Creates a regular expression literal in the old space. // This function calls the garbage collector if necessary. static Handle<Object> CreateRegExpLiteral(Handle<JSFunction> constructor, @@ -148,7 +150,8 @@ class RegExpImpl { static String* last_ascii_string_; static String* two_byte_cached_string_; - static bool EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii); + static bool CompileIrregexp(Handle<JSRegExp> re, bool is_ascii); + static inline bool EnsureCompiledIrregexp(Handle<JSRegExp> re, bool is_ascii); // Set the subject cache. The previous string buffer is not deleted, so the diff --git a/V8Binding/v8/src/mark-compact.cc b/V8Binding/v8/src/mark-compact.cc index 5e46f2a..6e823b3 100644 --- a/V8Binding/v8/src/mark-compact.cc +++ b/V8Binding/v8/src/mark-compact.cc @@ -56,6 +56,7 @@ int MarkCompactCollector::live_old_data_objects_ = 0; int MarkCompactCollector::live_old_pointer_objects_ = 0; int MarkCompactCollector::live_code_objects_ = 0; int MarkCompactCollector::live_map_objects_ = 0; +int MarkCompactCollector::live_cell_objects_ = 0; int MarkCompactCollector::live_lo_objects_ = 0; #endif @@ -155,6 +156,7 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) { live_old_data_objects_ = 0; live_code_objects_ = 0; live_map_objects_ = 0; + live_cell_objects_ = 0; live_lo_objects_ = 0; #endif } @@ -224,7 +226,9 @@ static inline HeapObject* ShortCircuitConsString(Object** p) { if ((type & kShortcutTypeMask) != kShortcutTypeTag) return object; Object* second = reinterpret_cast<ConsString*>(object)->unchecked_second(); - if (reinterpret_cast<String*>(second) != Heap::empty_string()) return object; + if (second != Heap::raw_unchecked_empty_string()) { + return object; + } // Since we don't have the object's start, it is impossible to update the // remembered set. Therefore, we only replace the string with its left @@ -421,7 +425,7 @@ class SymbolTableCleaner : public ObjectVisitor { } } // Set the entry to null_value (as deleted). - *p = Heap::null_value(); + *p = Heap::raw_unchecked_null_value(); pointers_removed_++; } } @@ -475,7 +479,7 @@ void MarkCompactCollector::MarkDescriptorArray( DescriptorArray* descriptors) { if (descriptors->IsMarked()) return; // Empty descriptor array is marked as a root before any maps are marked. - ASSERT(descriptors != Heap::empty_descriptor_array()); + ASSERT(descriptors != Heap::raw_unchecked_empty_descriptor_array()); SetMark(descriptors); FixedArray* contents = reinterpret_cast<FixedArray*>( @@ -590,7 +594,7 @@ void MarkCompactCollector::MarkSymbolTable() { // and if it is a sliced string or a cons string backed by an // external string (even indirectly), then the external string does // not receive a weak reference callback. - SymbolTable* symbol_table = SymbolTable::cast(Heap::symbol_table()); + SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table(); // Mark the symbol table itself. SetMark(symbol_table); // Explicitly mark the prefix. @@ -708,6 +712,10 @@ void MarkCompactCollector::RefillMarkingStack() { ScanOverflowedObjects(&map_it); if (marking_stack.is_full()) return; + HeapObjectIterator cell_it(Heap::cell_space(), &OverflowObjectSize); + ScanOverflowedObjects(&cell_it); + if (marking_stack.is_full()) return; + LargeObjectIterator lo_it(Heap::lo_space(), &OverflowObjectSize); ScanOverflowedObjects(&lo_it); if (marking_stack.is_full()) return; @@ -780,10 +788,9 @@ void MarkCompactCollector::MarkLiveObjects() { ProcessObjectGroups(root_visitor.stack_visitor()); // Prune the symbol table removing all symbols only pointed to by the - // symbol table. Cannot use SymbolTable::cast here because the symbol + // symbol table. Cannot use symbol_table() here because the symbol // table is marked. - SymbolTable* symbol_table = - reinterpret_cast<SymbolTable*>(Heap::symbol_table()); + SymbolTable* symbol_table = Heap::raw_unchecked_symbol_table(); SymbolTableCleaner v; symbol_table->IterateElements(&v); symbol_table->ElementsRemoved(v.PointersRemoved()); @@ -808,6 +815,9 @@ void MarkCompactCollector::UpdateLiveObjectCount(HeapObject* obj) { } else if (Heap::map_space()->Contains(obj)) { ASSERT(obj->IsMap()); live_map_objects_++; + } else if (Heap::cell_space()->Contains(obj)) { + ASSERT(obj->IsJSGlobalPropertyCell()); + live_cell_objects_++; } else if (Heap::old_pointer_space()->Contains(obj)) { live_old_pointer_objects_++; } else if (Heap::old_data_space()->Contains(obj)) { @@ -967,27 +977,32 @@ inline Object* MCAllocateFromNewSpace(HeapObject* object, int object_size) { // Allocation functions for the paged spaces call the space's MCAllocateRaw. -inline Object* MCAllocateFromOldPointerSpace(HeapObject* object, +inline Object* MCAllocateFromOldPointerSpace(HeapObject* ignore, int object_size) { return Heap::old_pointer_space()->MCAllocateRaw(object_size); } -inline Object* MCAllocateFromOldDataSpace(HeapObject* object, int object_size) { +inline Object* MCAllocateFromOldDataSpace(HeapObject* ignore, int object_size) { return Heap::old_data_space()->MCAllocateRaw(object_size); } -inline Object* MCAllocateFromCodeSpace(HeapObject* object, int object_size) { +inline Object* MCAllocateFromCodeSpace(HeapObject* ignore, int object_size) { return Heap::code_space()->MCAllocateRaw(object_size); } -inline Object* MCAllocateFromMapSpace(HeapObject* object, int object_size) { +inline Object* MCAllocateFromMapSpace(HeapObject* ignore, int object_size) { return Heap::map_space()->MCAllocateRaw(object_size); } +inline Object* MCAllocateFromCellSpace(HeapObject* ignore, int object_size) { + return Heap::cell_space()->MCAllocateRaw(object_size); +} + + // The forwarding address is encoded at the same offset as the current // to-space object, but in from space. inline void EncodeForwardingAddressInNewSpace(HeapObject* old_object, @@ -1142,11 +1157,11 @@ static void SweepSpace(NewSpace* space) { // since their existing map might not be live after the collection. int size = object->Size(); if (size >= ByteArray::kHeaderSize) { - object->set_map(Heap::byte_array_map()); + object->set_map(Heap::raw_unchecked_byte_array_map()); ByteArray::cast(object)->set_length(ByteArray::LengthFor(size)); } else { ASSERT(size == kPointerSize); - object->set_map(Heap::one_word_filler_map()); + object->set_map(Heap::raw_unchecked_one_pointer_filler_map()); } ASSERT(object->Size() == size); } @@ -1196,8 +1211,8 @@ static void SweepSpace(PagedSpace* space, DeallocateFunction dealloc) { // loop. } - // If the last region was not live we need to from free_start to the - // allocation top in the page. + // If the last region was not live we need to deallocate from + // free_start to the allocation top in the page. if (!is_previous_alive) { int free_size = p->AllocationTop() - free_start; if (free_size > 0) { @@ -1241,6 +1256,21 @@ void MarkCompactCollector::DeallocateMapBlock(Address start, } +void MarkCompactCollector::DeallocateCellBlock(Address start, + int size_in_bytes) { + // Free-list elements in cell space are assumed to have a fixed size. + // We break the free block into chunks and add them to the free list + // individually. + int size = Heap::cell_space()->object_size_in_bytes(); + ASSERT(size_in_bytes % size == 0); + Heap::ClearRSetRange(start, size_in_bytes); + Address end = start + size_in_bytes; + for (Address a = start; a < end; a += size) { + Heap::cell_space()->Free(a); + } +} + + void MarkCompactCollector::EncodeForwardingAddresses() { ASSERT(state_ == ENCODE_FORWARDING_ADDRESSES); // Objects in the active semispace of the young generation may be @@ -1261,6 +1291,11 @@ void MarkCompactCollector::EncodeForwardingAddresses() { LogNonLiveCodeObject>( Heap::code_space()); + EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace, + IgnoreNonLiveObject>( + Heap::cell_space()); + + // Compute new space next to last after the old and code spaces have been // compacted. Objects in new space can be promoted to old or code space. EncodeForwardingAddressesInNewSpace(); @@ -1279,6 +1314,7 @@ void MarkCompactCollector::EncodeForwardingAddresses() { Heap::old_data_space()->MCWriteRelocationInfoToPage(); Heap::code_space()->MCWriteRelocationInfoToPage(); Heap::map_space()->MCWriteRelocationInfoToPage(); + Heap::cell_space()->MCWriteRelocationInfoToPage(); } @@ -1293,6 +1329,7 @@ void MarkCompactCollector::SweepSpaces() { SweepSpace(Heap::old_pointer_space(), &DeallocateOldPointerBlock); SweepSpace(Heap::old_data_space(), &DeallocateOldDataBlock); SweepSpace(Heap::code_space(), &DeallocateCodeBlock); + SweepSpace(Heap::cell_space(), &DeallocateCellBlock); SweepSpace(Heap::new_space()); SweepSpace(Heap::map_space(), &DeallocateMapBlock); } @@ -1371,15 +1408,16 @@ class UpdatingVisitor: public ObjectVisitor { ASSERT(!Heap::InFromSpace(obj)); if (Heap::new_space()->Contains(obj)) { - Address f_addr = Heap::new_space()->FromSpaceLow() + - Heap::new_space()->ToSpaceOffsetForAddress(old_addr); - new_addr = Memory::Address_at(f_addr); + Address forwarding_pointer_addr = + Heap::new_space()->FromSpaceLow() + + Heap::new_space()->ToSpaceOffsetForAddress(old_addr); + new_addr = Memory::Address_at(forwarding_pointer_addr); #ifdef DEBUG ASSERT(Heap::old_pointer_space()->Contains(new_addr) || Heap::old_data_space()->Contains(new_addr) || - Heap::code_space()->Contains(new_addr) || - Heap::new_space()->FromSpaceContains(new_addr)); + Heap::new_space()->FromSpaceContains(new_addr) || + Heap::lo_space()->Contains(HeapObject::FromAddress(new_addr))); if (Heap::new_space()->FromSpaceContains(new_addr)) { ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <= @@ -1392,32 +1430,19 @@ class UpdatingVisitor: public ObjectVisitor { return; } else { - ASSERT(Heap::old_pointer_space()->Contains(obj) || - Heap::old_data_space()->Contains(obj) || - Heap::code_space()->Contains(obj) || - Heap::map_space()->Contains(obj)); - - new_addr = MarkCompactCollector::GetForwardingAddressInOldSpace(obj); - ASSERT(Heap::old_pointer_space()->Contains(new_addr) || - Heap::old_data_space()->Contains(new_addr) || - Heap::code_space()->Contains(new_addr) || - Heap::map_space()->Contains(new_addr)); - #ifdef DEBUG - if (Heap::old_pointer_space()->Contains(obj)) { - ASSERT(Heap::old_pointer_space()->MCSpaceOffsetForAddress(new_addr) <= - Heap::old_pointer_space()->MCSpaceOffsetForAddress(old_addr)); - } else if (Heap::old_data_space()->Contains(obj)) { - ASSERT(Heap::old_data_space()->MCSpaceOffsetForAddress(new_addr) <= - Heap::old_data_space()->MCSpaceOffsetForAddress(old_addr)); - } else if (Heap::code_space()->Contains(obj)) { - ASSERT(Heap::code_space()->MCSpaceOffsetForAddress(new_addr) <= - Heap::code_space()->MCSpaceOffsetForAddress(old_addr)); - } else { - ASSERT(Heap::map_space()->MCSpaceOffsetForAddress(new_addr) <= - Heap::map_space()->MCSpaceOffsetForAddress(old_addr)); + PagedSpaces spaces; + PagedSpace* original_space = spaces.next(); + while (original_space != NULL) { + if (original_space->Contains(obj)) break; + original_space = spaces.next(); } + ASSERT(original_space != NULL); #endif + new_addr = MarkCompactCollector::GetForwardingAddressInOldSpace(obj); + ASSERT(original_space->Contains(new_addr)); + ASSERT(original_space->MCSpaceOffsetForAddress(new_addr) <= + original_space->MCSpaceOffsetForAddress(old_addr)); } *p = HeapObject::FromAddress(new_addr); @@ -1449,6 +1474,8 @@ void MarkCompactCollector::UpdatePointers() { &UpdatePointersInOldObject); int live_codes = IterateLiveObjects(Heap::code_space(), &UpdatePointersInOldObject); + int live_cells = IterateLiveObjects(Heap::cell_space(), + &UpdatePointersInOldObject); int live_news = IterateLiveObjects(Heap::new_space(), &UpdatePointersInNewObject); @@ -1460,15 +1487,14 @@ void MarkCompactCollector::UpdatePointers() { USE(live_pointer_olds); USE(live_data_olds); USE(live_codes); + USE(live_cells); USE(live_news); - -#ifdef DEBUG ASSERT(live_maps == live_map_objects_); ASSERT(live_data_olds == live_old_data_objects_); ASSERT(live_pointer_olds == live_old_pointer_objects_); ASSERT(live_codes == live_code_objects_); + ASSERT(live_cells == live_cell_objects_); ASSERT(live_news == live_young_objects_); -#endif } @@ -1589,30 +1615,31 @@ void MarkCompactCollector::RelocateObjects() { int live_data_olds = IterateLiveObjects(Heap::old_data_space(), &RelocateOldDataObject); int live_codes = IterateLiveObjects(Heap::code_space(), &RelocateCodeObject); + int live_cells = IterateLiveObjects(Heap::cell_space(), &RelocateCellObject); int live_news = IterateLiveObjects(Heap::new_space(), &RelocateNewObject); USE(live_maps); USE(live_data_olds); USE(live_pointer_olds); USE(live_codes); + USE(live_cells); USE(live_news); -#ifdef DEBUG ASSERT(live_maps == live_map_objects_); ASSERT(live_data_olds == live_old_data_objects_); ASSERT(live_pointer_olds == live_old_pointer_objects_); ASSERT(live_codes == live_code_objects_); + ASSERT(live_cells == live_cell_objects_); ASSERT(live_news == live_young_objects_); -#endif // Notify code object in LO to convert IC target to address // This must happen after lo_space_->Compact LargeObjectIterator it(Heap::lo_space()); while (it.has_next()) { ConvertCodeICTargetToAddress(it.next()); } - // Flips from and to spaces + // Flip from and to spaces Heap::new_space()->Flip(); - // Sets age_mark to bottom in to space + // Set age_mark to bottom in to space Address mark = Heap::new_space()->bottom(); Heap::new_space()->set_age_mark(mark); @@ -1636,7 +1663,7 @@ int MarkCompactCollector::ConvertCodeICTargetToAddress(HeapObject* obj) { int MarkCompactCollector::RelocateMapObject(HeapObject* obj) { - // decode map pointer (forwarded address) + // Recover map pointer. MapWord encoding = obj->map_word(); Address map_addr = encoding.DecodeMapAddress(Heap::map_space()); ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr))); @@ -1644,10 +1671,10 @@ int MarkCompactCollector::RelocateMapObject(HeapObject* obj) { // Get forwarding address before resetting map pointer Address new_addr = GetForwardingAddressInOldSpace(obj); - // recover map pointer + // Reset map pointer. The meta map object may not be copied yet so + // Map::cast does not yet work. obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr))); - // The meta map object may not be copied yet. Address old_addr = obj->address(); if (new_addr != old_addr) { @@ -1664,23 +1691,23 @@ int MarkCompactCollector::RelocateMapObject(HeapObject* obj) { } -static inline int RelocateOldObject(HeapObject* obj, - OldSpace* space, - Address new_addr, - Address map_addr) { - // recover map pointer - obj->set_map(reinterpret_cast<Map*>(HeapObject::FromAddress(map_addr))); +static inline int RestoreMap(HeapObject* obj, + PagedSpace* space, + Address new_addr, + Address map_addr) { + // This must be a non-map object, and the function relies on the + // assumption that the Map space is compacted before the other paged + // spaces (see RelocateObjects). + + // Reset map pointer. + obj->set_map(Map::cast(HeapObject::FromAddress(map_addr))); - // This is a non-map object, it relies on the assumption that the Map space - // is compacted before the Old space (see RelocateObjects). int obj_size = obj->Size(); ASSERT_OBJECT_SIZE(obj_size); ASSERT(space->MCSpaceOffsetForAddress(new_addr) <= space->MCSpaceOffsetForAddress(obj->address())); - space->MCAdjustRelocationEnd(new_addr, obj_size); - #ifdef DEBUG if (FLAG_gc_verbose) { PrintF("relocate %p -> %p\n", obj->address(), new_addr); @@ -1692,21 +1719,22 @@ static inline int RelocateOldObject(HeapObject* obj, int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj, - OldSpace* space) { - // decode map pointer (forwarded address) + PagedSpace* space) { + // Recover map pointer. MapWord encoding = obj->map_word(); Address map_addr = encoding.DecodeMapAddress(Heap::map_space()); ASSERT(Heap::map_space()->Contains(map_addr)); - // Get forwarding address before resetting map pointer + // Get forwarding address before resetting map pointer. Address new_addr = GetForwardingAddressInOldSpace(obj); - int obj_size = RelocateOldObject(obj, space, new_addr, map_addr); + // Reset the map pointer. + int obj_size = RestoreMap(obj, space, new_addr, map_addr); Address old_addr = obj->address(); if (new_addr != old_addr) { - memmove(new_addr, old_addr, obj_size); // copy contents + memmove(new_addr, old_addr, obj_size); // Copy contents } ASSERT(!HeapObject::FromAddress(new_addr)->IsCode()); @@ -1725,8 +1753,13 @@ int MarkCompactCollector::RelocateOldDataObject(HeapObject* obj) { } +int MarkCompactCollector::RelocateCellObject(HeapObject* obj) { + return RelocateOldNonCodeObject(obj, Heap::cell_space()); +} + + int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) { - // decode map pointer (forwarded address) + // Recover map pointer. MapWord encoding = obj->map_word(); Address map_addr = encoding.DecodeMapAddress(Heap::map_space()); ASSERT(Heap::map_space()->Contains(HeapObject::FromAddress(map_addr))); @@ -1734,23 +1767,23 @@ int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) { // Get forwarding address before resetting map pointer Address new_addr = GetForwardingAddressInOldSpace(obj); - int obj_size = RelocateOldObject(obj, Heap::code_space(), new_addr, map_addr); + // Reset the map pointer. + int obj_size = RestoreMap(obj, Heap::code_space(), new_addr, map_addr); - // convert inline cache target to address using old address + // Convert inline cache target to address using old address. if (obj->IsCode()) { - // convert target to address first related to old_address Code::cast(obj)->ConvertICTargetsFromObjectToAddress(); } Address old_addr = obj->address(); if (new_addr != old_addr) { - memmove(new_addr, old_addr, obj_size); // copy contents + memmove(new_addr, old_addr, obj_size); // Copy contents. } HeapObject* copied_to = HeapObject::FromAddress(new_addr); if (copied_to->IsCode()) { - // may also update inline cache target. + // May also update inline cache target. Code::cast(copied_to)->Relocate(new_addr - old_addr); // Notify the logger that compiled code has moved. LOG(CodeMoveEvent(old_addr, new_addr)); @@ -1770,15 +1803,15 @@ int MarkCompactCollector::RelocateNewObject(HeapObject* obj) { Address new_addr = Memory::Address_at(Heap::new_space()->FromSpaceLow() + offset); +#ifdef DEBUG if (Heap::new_space()->FromSpaceContains(new_addr)) { ASSERT(Heap::new_space()->FromSpaceOffsetForAddress(new_addr) <= Heap::new_space()->ToSpaceOffsetForAddress(old_addr)); } else { - OldSpace* target_space = Heap::TargetSpace(obj); - ASSERT(target_space == Heap::old_pointer_space() || - target_space == Heap::old_data_space()); - target_space->MCAdjustRelocationEnd(new_addr, obj_size); + ASSERT(Heap::TargetSpace(obj) == Heap::old_pointer_space() || + Heap::TargetSpace(obj) == Heap::old_data_space()); } +#endif // New and old addresses cannot overlap. memcpy(reinterpret_cast<void*>(new_addr), diff --git a/V8Binding/v8/src/mark-compact.h b/V8Binding/v8/src/mark-compact.h index d7ad630..bd9e4a0 100644 --- a/V8Binding/v8/src/mark-compact.h +++ b/V8Binding/v8/src/mark-compact.h @@ -293,6 +293,7 @@ class MarkCompactCollector: public AllStatic { static void DeallocateOldDataBlock(Address start, int size_in_bytes); static void DeallocateCodeBlock(Address start, int size_in_bytes); static void DeallocateMapBlock(Address start, int size_in_bytes); + static void DeallocateCellBlock(Address start, int size_in_bytes); // If we are not compacting the heap, we simply sweep the spaces except // for the large object space, clearing mark bits and adding unmarked @@ -352,8 +353,12 @@ class MarkCompactCollector: public AllStatic { static int RelocateOldPointerObject(HeapObject* obj); static int RelocateOldDataObject(HeapObject* obj); + // Relocate a property cell object. + static int RelocateCellObject(HeapObject* obj); + // Helper function. - static inline int RelocateOldNonCodeObject(HeapObject* obj, OldSpace* space); + static inline int RelocateOldNonCodeObject(HeapObject* obj, + PagedSpace* space); // Relocates an object in the code space. static int RelocateCodeObject(HeapObject* obj); @@ -393,6 +398,9 @@ class MarkCompactCollector: public AllStatic { // Number of live objects in Heap::map_space_. static int live_map_objects_; + // Number of live objects in Heap::cell_space_. + static int live_cell_objects_; + // Number of live objects in Heap::lo_space_. static int live_lo_objects_; diff --git a/V8Binding/v8/src/math.js b/V8Binding/v8/src/math.js index d12927e..db75cb2 100644 --- a/V8Binding/v8/src/math.js +++ b/V8Binding/v8/src/math.js @@ -68,10 +68,12 @@ function MathAtan(x) { } // ECMA 262 - 15.8.2.5 -function MathAtan2(x, y) { - if (!IS_NUMBER(x)) x = ToNumber(x); +// The naming of y and x matches the spec, as does the order in which +// ToNumber (valueOf) is called. +function MathAtan2(y, x) { if (!IS_NUMBER(y)) y = ToNumber(y); - return %Math_atan2(x, y); + if (!IS_NUMBER(x)) x = ToNumber(x); + return %Math_atan2(y, x); } // ECMA 262 - 15.8.2.6 @@ -117,11 +119,12 @@ function MathLog(x) { // ECMA 262 - 15.8.2.11 function MathMax(arg1, arg2) { // length == 2 var r = -$Infinity; - for (var i = %_ArgumentsLength() - 1; i >= 0; --i) { + var length = %_ArgumentsLength(); + for (var i = 0; i < length; i++) { var n = ToNumber(%_Arguments(i)); if (NUMBER_IS_NAN(n)) return n; - // Make sure +0 is consider greater than -0. - if (n > r || (n === 0 && r === 0 && (1 / n) > (1 / r))) r = n; + // Make sure +0 is considered greater than -0. + if (n > r || (r === 0 && n === 0 && !%_IsSmi(r))) r = n; } return r; } @@ -129,11 +132,12 @@ function MathMax(arg1, arg2) { // length == 2 // ECMA 262 - 15.8.2.12 function MathMin(arg1, arg2) { // length == 2 var r = $Infinity; - for (var i = %_ArgumentsLength() - 1; i >= 0; --i) { + var length = %_ArgumentsLength(); + for (var i = 0; i < length; i++) { var n = ToNumber(%_Arguments(i)); if (NUMBER_IS_NAN(n)) return n; - // Make sure -0 is consider less than +0. - if (n < r || (n === 0 && r === 0 && (1 / n) < (1 / r))) r = n; + // Make sure -0 is considered less than +0. + if (n < r || (r === 0 && n === 0 && !%_IsSmi(n))) r = n; } return r; } diff --git a/V8Binding/v8/src/messages.js b/V8Binding/v8/src/messages.js index 6157874..870c969 100644 --- a/V8Binding/v8/src/messages.js +++ b/V8Binding/v8/src/messages.js @@ -60,10 +60,8 @@ const kMessages = { unexpected_token_string: "Unexpected string", unexpected_token_identifier: "Unexpected identifier", unexpected_eos: "Unexpected end of input", - expected_label: "Expected label", malformed_regexp: "Invalid regular expression: /%0/: %1", unterminated_regexp: "Invalid regular expression: missing /", - pcre_error: "PCRE function %0, error code %1", regexp_flags: "Cannot supply flags when constructing one RegExp from another", invalid_lhs_in_assignment: "Invalid left-hand side in assignment", invalid_lhs_in_for_in: "Invalid left-hand side in for-in", @@ -74,21 +72,17 @@ const kMessages = { redeclaration: "%0 '%1' has already been declared", no_catch_or_finally: "Missing catch or finally after try", unknown_label: "Undefined label '%0'", - invalid_break: "Invalid break statement", - invalid_continue: "Invalid continue statement", uncaught_exception: "Uncaught %0", stack_trace: "Stack Trace:\n%0", called_non_callable: "%0 is not a function", undefined_method: "Object %1 has no method '%0'", property_not_function: "Property '%0' of object %1 is not a function", - null_or_undefined: "Cannot access property of null or undefined", cannot_convert_to_primitive: "Cannot convert object to primitive value", not_constructor: "%0 is not a constructor", not_defined: "%0 is not defined", non_object_property_load: "Cannot read property '%0' of %1", non_object_property_store: "Cannot set property '%0' of %1", non_object_property_call: "Cannot call method '%0' of %1", - illegal_eval: "Unsupported indirect eval() call", with_expression: "%0 has no properties", illegal_invocation: "Illegal invocation", no_setter_in_callback: "Cannot set property %0 of %1 which has only a getter", @@ -101,13 +95,11 @@ const kMessages = { reduce_no_initial: "Reduce of empty array with no initial value", // RangeError invalid_array_length: "Invalid array length", - invalid_array_apply_length: "Function.prototype.apply supports only up to 1024 arguments", stack_overflow: "Maximum call stack size exceeded", apply_overflow: "Function.prototype.apply cannot support %0 arguments", // SyntaxError unable_to_parse: "Parse error", duplicate_regexp_flag: "Duplicate RegExp flag %0", - unrecognized_regexp_flag: "Unrecognized RegExp flag %0", invalid_regexp: "Invalid RegExp pattern /%0/", illegal_break: "Illegal break statement", illegal_continue: "Illegal continue statement", diff --git a/V8Binding/v8/src/objects-debug.cc b/V8Binding/v8/src/objects-debug.cc index 1f199e4..8c57afd 100644 --- a/V8Binding/v8/src/objects-debug.cc +++ b/V8Binding/v8/src/objects-debug.cc @@ -271,29 +271,38 @@ void ByteArray::ByteArrayVerify() { void JSObject::PrintProperties() { if (HasFastProperties()) { - for (DescriptorReader r(map()->instance_descriptors()); - !r.eos(); - r.advance()) { + DescriptorArray* descs = map()->instance_descriptors(); + for (int i = 0; i < descs->number_of_descriptors(); i++) { PrintF(" "); - r.GetKey()->StringPrint(); + descs->GetKey(i)->StringPrint(); PrintF(": "); - if (r.type() == FIELD) { - FastPropertyAt(r.GetFieldIndex())->ShortPrint(); - PrintF(" (field at offset %d)\n", r.GetFieldIndex()); - } else if (r.type() == CONSTANT_FUNCTION) { - r.GetConstantFunction()->ShortPrint(); - PrintF(" (constant function)\n"); - } else if (r.type() == CALLBACKS) { - r.GetCallbacksObject()->ShortPrint(); - PrintF(" (callback)\n"); - } else if (r.type() == MAP_TRANSITION) { - PrintF(" (map transition)\n"); - } else if (r.type() == CONSTANT_TRANSITION) { - PrintF(" (constant transition)\n"); - } else if (r.type() == NULL_DESCRIPTOR) { - PrintF(" (null descriptor)\n"); - } else { - UNREACHABLE(); + switch (descs->GetType(i)) { + case FIELD: { + int index = descs->GetFieldIndex(i); + FastPropertyAt(index)->ShortPrint(); + PrintF(" (field at offset %d)\n", index); + break; + } + case CONSTANT_FUNCTION: + descs->GetConstantFunction(i)->ShortPrint(); + PrintF(" (constant function)\n"); + break; + case CALLBACKS: + descs->GetCallbacksObject(i)->ShortPrint(); + PrintF(" (callback)\n"); + break; + case MAP_TRANSITION: + PrintF(" (map transition)\n"); + break; + case CONSTANT_TRANSITION: + PrintF(" (constant transition)\n"); + break; + case NULL_DESCRIPTOR: + PrintF(" (null descriptor)\n"); + break; + default: + UNREACHABLE(); + break; } } } else { @@ -714,7 +723,7 @@ void JSRegExp::JSRegExpVerify() { break; } case JSRegExp::IRREGEXP: { - bool is_native = RegExpImpl::UseNativeRegexp(); + bool is_native = RegExpImpl::UsesNativeRegExp(); FixedArray* arr = FixedArray::cast(data()); Object* ascii_data = arr->get(JSRegExp::kIrregexpASCIICodeIndex); @@ -1064,11 +1073,10 @@ void JSObject::SpillInformation::Print() { void DescriptorArray::PrintDescriptors() { PrintF("Descriptor array %d\n", number_of_descriptors()); - int number = 0; - for (DescriptorReader r(this); !r.eos(); r.advance()) { + for (int i = 0; i < number_of_descriptors(); i++) { + PrintF(" %d: ", i); Descriptor desc; - r.Get(&desc); - PrintF(" %d: ", number++); + Get(i, &desc); desc.Print(); } PrintF("\n"); @@ -1078,14 +1086,14 @@ void DescriptorArray::PrintDescriptors() { bool DescriptorArray::IsSortedNoDuplicates() { String* current_key = NULL; uint32_t current = 0; - for (DescriptorReader r(this); !r.eos(); r.advance()) { - String* key = r.GetKey(); + for (int i = 0; i < number_of_descriptors(); i++) { + String* key = GetKey(i); if (key == current_key) { PrintDescriptors(); return false; } current_key = key; - uint32_t hash = r.GetKey()->Hash(); + uint32_t hash = GetKey(i)->Hash(); if (hash < current) { PrintDescriptors(); return false; diff --git a/V8Binding/v8/src/objects-inl.h b/V8Binding/v8/src/objects-inl.h index ff0f2e5..37c9b8b 100644 --- a/V8Binding/v8/src/objects-inl.h +++ b/V8Binding/v8/src/objects-inl.h @@ -481,7 +481,7 @@ bool Object::IsDictionary() { bool Object::IsSymbolTable() { - return IsHashTable() && this == Heap::symbol_table(); + return IsHashTable() && this == Heap::raw_unchecked_symbol_table(); } @@ -768,6 +768,8 @@ int Failure::value() const { Failure* Failure::RetryAfterGC(int requested_bytes) { + // Assert that the space encoding fits in the three bytes allotted for it. + ASSERT((LAST_SPACE & ~kSpaceTagMask) == 0); int requested = requested_bytes >> kObjectAlignmentBits; int value = (requested << kSpaceTagSize) | NEW_SPACE; ASSERT(value >> kSpaceTagSize == requested); @@ -1060,7 +1062,17 @@ ACCESSORS(Oddball, to_string, String, kToStringOffset) ACCESSORS(Oddball, to_number, Object, kToNumberOffset) -ACCESSORS(JSGlobalPropertyCell, value, Object, kValueOffset) +Object* JSGlobalPropertyCell::value() { + return READ_FIELD(this, kValueOffset); +} + + +void JSGlobalPropertyCell::set_value(Object* val, WriteBarrierMode ignored) { + // The write barrier is not used for global property cells. + ASSERT(!val->IsJSGlobalPropertyCell()); + WRITE_FIELD(this, kValueOffset, val); +} + int JSObject::GetHeaderSize() { switch (map()->instance_type()) { @@ -1339,6 +1351,56 @@ Smi* DescriptorArray::GetDetails(int descriptor_number) { } +PropertyType DescriptorArray::GetType(int descriptor_number) { + ASSERT(descriptor_number < number_of_descriptors()); + return PropertyDetails(GetDetails(descriptor_number)).type(); +} + + +int DescriptorArray::GetFieldIndex(int descriptor_number) { + return Descriptor::IndexFromValue(GetValue(descriptor_number)); +} + + +JSFunction* DescriptorArray::GetConstantFunction(int descriptor_number) { + return JSFunction::cast(GetValue(descriptor_number)); +} + + +Object* DescriptorArray::GetCallbacksObject(int descriptor_number) { + ASSERT(GetType(descriptor_number) == CALLBACKS); + return GetValue(descriptor_number); +} + + +AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) { + ASSERT(GetType(descriptor_number) == CALLBACKS); + Proxy* p = Proxy::cast(GetCallbacksObject(descriptor_number)); + return reinterpret_cast<AccessorDescriptor*>(p->proxy()); +} + + +bool DescriptorArray::IsProperty(int descriptor_number) { + return GetType(descriptor_number) < FIRST_PHANTOM_PROPERTY_TYPE; +} + + +bool DescriptorArray::IsTransition(int descriptor_number) { + PropertyType t = GetType(descriptor_number); + return t == MAP_TRANSITION || t == CONSTANT_TRANSITION; +} + + +bool DescriptorArray::IsNullDescriptor(int descriptor_number) { + return GetType(descriptor_number) == NULL_DESCRIPTOR; +} + + +bool DescriptorArray::IsDontEnum(int descriptor_number) { + return PropertyDetails(GetDetails(descriptor_number)).IsDontEnum(); +} + + void DescriptorArray::Get(int descriptor_number, Descriptor* desc) { desc->Init(GetKey(descriptor_number), GetValue(descriptor_number), @@ -1362,6 +1424,13 @@ void DescriptorArray::Set(int descriptor_number, Descriptor* desc) { } +void DescriptorArray::CopyFrom(int index, DescriptorArray* src, int src_index) { + Descriptor desc; + src->Get(src_index, &desc); + Set(index, &desc); +} + + void DescriptorArray::Swap(int first, int second) { fast_swap(this, ToKeyIndex(first), ToKeyIndex(second)); FixedArray* content_array = GetContentArray(); @@ -2642,7 +2711,7 @@ void Dictionary<Shape, Key>::SetEntry(int entry, Object* key, Object* value, PropertyDetails details) { - ASSERT(!key->IsString() || details.index() > 0); + ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0); int index = HashTable<Shape, Key>::EntryToIndex(entry); WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(); FixedArray::set(index, key, mode); @@ -2655,8 +2724,8 @@ void Map::ClearCodeCache() { // No write barrier is needed since empty_fixed_array is not in new space. // Please note this function is used during marking: // - MarkCompactCollector::MarkUnmarkedObject - ASSERT(!Heap::InNewSpace(Heap::empty_fixed_array())); - WRITE_FIELD(this, kCodeCacheOffset, Heap::empty_fixed_array()); + ASSERT(!Heap::InNewSpace(Heap::raw_unchecked_empty_fixed_array())); + WRITE_FIELD(this, kCodeCacheOffset, Heap::raw_unchecked_empty_fixed_array()); } diff --git a/V8Binding/v8/src/objects.cc b/V8Binding/v8/src/objects.cc index ee0ac2d..a9004c9 100644 --- a/V8Binding/v8/src/objects.cc +++ b/V8Binding/v8/src/objects.cc @@ -436,8 +436,7 @@ Object* JSObject::SetNormalizedProperty(String* name, store_value = Heap::AllocateJSGlobalPropertyCell(value); if (store_value->IsFailure()) return store_value; } - Object* dict = - property_dictionary()->Add(name, store_value, details); + Object* dict = property_dictionary()->Add(name, store_value, details); if (dict->IsFailure()) return dict; set_properties(StringDictionary::cast(dict)); return value; @@ -467,8 +466,15 @@ Object* JSObject::DeleteNormalizedProperty(String* name, DeleteMode mode) { // If we have a global object set the cell to the hole. if (IsGlobalObject()) { PropertyDetails details = dictionary->DetailsAt(entry); - if (details.IsDontDelete() && mode != FORCE_DELETION) { - return Heap::false_value(); + if (details.IsDontDelete()) { + if (mode != FORCE_DELETION) return Heap::false_value(); + // When forced to delete global properties, we have to make a + // map change to invalidate any ICs that think they can load + // from the DontDelete cell without checking if it contains + // the hole value. + Object* new_map = map()->CopyDropDescriptors(); + if (new_map->IsFailure()) return new_map; + set_map(Map::cast(new_map)); } JSGlobalPropertyCell* cell = JSGlobalPropertyCell::cast(dictionary->ValueAt(entry)); @@ -1626,6 +1632,7 @@ Object* JSObject::SetPropertyWithDefinedSetter(JSFunction* setter, return *value_handle; } + void JSObject::LookupCallbackSetterInPrototypes(String* name, LookupResult* result) { for (Object* pt = GetPrototype(); @@ -1653,7 +1660,7 @@ Object* JSObject::LookupCallbackSetterInPrototypes(uint32_t index) { if (JSObject::cast(pt)->HasFastElements()) continue; NumberDictionary* dictionary = JSObject::cast(pt)->element_dictionary(); int entry = dictionary->FindEntry(index); - if (entry != StringDictionary::kNotFound) { + if (entry != NumberDictionary::kNotFound) { Object* element = dictionary->ValueAt(entry); PropertyDetails details = dictionary->DetailsAt(entry); if (details.type() == CALLBACKS) { @@ -1705,8 +1712,6 @@ void JSObject::LocalLookupRealNamedProperty(String* name, } else { int entry = property_dictionary()->FindEntry(name); if (entry != StringDictionary::kNotFound) { - // Make sure to disallow caching for uninitialized constants - // found in the dictionary-mode objects. Object* value = property_dictionary()->ValueAt(entry); if (IsGlobalObject()) { PropertyDetails d = property_dictionary()->DetailsAt(entry); @@ -1717,14 +1722,14 @@ void JSObject::LocalLookupRealNamedProperty(String* name, value = JSGlobalPropertyCell::cast(value)->value(); ASSERT(result->IsLoaded()); } - if (value->IsTheHole()) { - result->DisallowCaching(); - } + // Make sure to disallow caching for uninitialized constants + // found in the dictionary-mode objects. + if (value->IsTheHole()) result->DisallowCaching(); result->DictionaryResult(this, entry); return; } // Slow case object skipped during lookup. Do not use inline caching. - result->DisallowCaching(); + if (!IsGlobalObject()) result->DisallowCaching(); } result->NotFound(); } @@ -1865,7 +1870,7 @@ Object* JSObject::SetProperty(LookupResult* result, if (value == result->GetConstantFunction()) return value; // Preserve the attributes of this existing property. attributes = result->GetAttributes(); - return ConvertDescriptorToFieldAndMapTransition(name, value, attributes); + return ConvertDescriptorToField(name, value, attributes); case CALLBACKS: return SetPropertyWithCallback(result->GetCallbackObject(), name, @@ -1928,7 +1933,7 @@ Object* JSObject::IgnoreAttributesAndSetLocalProperty( if (!result->IsLoaded()) { return SetLazyProperty(result, name, value, attributes); } - // Check of IsReadOnly removed from here in clone. + // Check of IsReadOnly removed from here in clone. switch (result->type()) { case NORMAL: return SetNormalizedProperty(result, value); @@ -1947,7 +1952,7 @@ Object* JSObject::IgnoreAttributesAndSetLocalProperty( if (value == result->GetConstantFunction()) return value; // Preserve the attributes of this existing property. attributes = result->GetAttributes(); - return ConvertDescriptorToFieldAndMapTransition(name, value, attributes); + return ConvertDescriptorToField(name, value, attributes); case CALLBACKS: case INTERCEPTOR: // Override callback in clone @@ -2110,20 +2115,19 @@ Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode) { if (obj->IsFailure()) return obj; StringDictionary* dictionary = StringDictionary::cast(obj); - for (DescriptorReader r(map()->instance_descriptors()); - !r.eos(); - r.advance()) { - PropertyDetails details = r.GetDetails(); + DescriptorArray* descs = map()->instance_descriptors(); + for (int i = 0; i < descs->number_of_descriptors(); i++) { + PropertyDetails details = descs->GetDetails(i); switch (details.type()) { case CONSTANT_FUNCTION: { PropertyDetails d = PropertyDetails(details.attributes(), NORMAL, details.index()); - Object* value = r.GetConstantFunction(); + Object* value = descs->GetConstantFunction(i); if (IsGlobalObject()) { value = Heap::AllocateJSGlobalPropertyCell(value); if (value->IsFailure()) return value; } - Object* result = dictionary->Add(r.GetKey(), value, d); + Object* result = dictionary->Add(descs->GetKey(i), value, d); if (result->IsFailure()) return result; dictionary = StringDictionary::cast(result); break; @@ -2131,12 +2135,12 @@ Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode) { case FIELD: { PropertyDetails d = PropertyDetails(details.attributes(), NORMAL, details.index()); - Object* value = FastPropertyAt(r.GetFieldIndex()); + Object* value = FastPropertyAt(descs->GetFieldIndex(i)); if (IsGlobalObject()) { value = Heap::AllocateJSGlobalPropertyCell(value); if (value->IsFailure()) return value; } - Object* result = dictionary->Add(r.GetKey(), value, d); + Object* result = dictionary->Add(descs->GetKey(i), value, d); if (result->IsFailure()) return result; dictionary = StringDictionary::cast(result); break; @@ -2144,12 +2148,12 @@ Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode) { case CALLBACKS: { PropertyDetails d = PropertyDetails(details.attributes(), CALLBACKS, details.index()); - Object* value = r.GetCallbacksObject(); + Object* value = descs->GetCallbacksObject(i); if (IsGlobalObject()) { value = Heap::AllocateJSGlobalPropertyCell(value); if (value->IsFailure()) return value; } - Object* result = dictionary->Add(r.GetKey(), value, d); + Object* result = dictionary->Add(descs->GetKey(i), value, d); if (result->IsFailure()) return result; dictionary = StringDictionary::cast(result); break; @@ -2563,35 +2567,44 @@ bool JSObject::IsSimpleEnum() { int Map::NumberOfDescribedProperties() { int result = 0; - for (DescriptorReader r(instance_descriptors()); !r.eos(); r.advance()) { - if (r.IsProperty()) result++; + DescriptorArray* descs = instance_descriptors(); + for (int i = 0; i < descs->number_of_descriptors(); i++) { + if (descs->IsProperty(i)) result++; } return result; } int Map::PropertyIndexFor(String* name) { - for (DescriptorReader r(instance_descriptors()); !r.eos(); r.advance()) { - if (r.Equals(name) && !r.IsNullDescriptor()) return r.GetFieldIndex(); + DescriptorArray* descs = instance_descriptors(); + for (int i = 0; i < descs->number_of_descriptors(); i++) { + if (name->Equals(descs->GetKey(i)) && !descs->IsNullDescriptor(i)) { + return descs->GetFieldIndex(i); + } } return -1; } int Map::NextFreePropertyIndex() { - int index = -1; - for (DescriptorReader r(instance_descriptors()); !r.eos(); r.advance()) { - if (r.type() == FIELD) { - if (r.GetFieldIndex() > index) index = r.GetFieldIndex(); + int max_index = -1; + DescriptorArray* descs = instance_descriptors(); + for (int i = 0; i < descs->number_of_descriptors(); i++) { + if (descs->GetType(i) == FIELD) { + int current_index = descs->GetFieldIndex(i); + if (current_index > max_index) max_index = current_index; } } - return index+1; + return max_index + 1; } AccessorDescriptor* Map::FindAccessor(String* name) { - for (DescriptorReader r(instance_descriptors()); !r.eos(); r.advance()) { - if (r.Equals(name) && r.type() == CALLBACKS) return r.GetCallbacks(); + DescriptorArray* descs = instance_descriptors(); + for (int i = 0; i < descs->number_of_descriptors(); i++) { + if (name->Equals(descs->GetKey(i)) && descs->GetType(i) == CALLBACKS) { + return descs->GetCallbacks(i); + } } return NULL; } @@ -2839,16 +2852,15 @@ Object* JSObject::LookupAccessor(String* name, bool is_getter) { Object* JSObject::SlowReverseLookup(Object* value) { if (HasFastProperties()) { - for (DescriptorReader r(map()->instance_descriptors()); - !r.eos(); - r.advance()) { - if (r.type() == FIELD) { - if (FastPropertyAt(r.GetFieldIndex()) == value) { - return r.GetKey(); + DescriptorArray* descs = map()->instance_descriptors(); + for (int i = 0; i < descs->number_of_descriptors(); i++) { + if (descs->GetType(i) == FIELD) { + if (FastPropertyAt(descs->GetFieldIndex(i)) == value) { + return descs->GetKey(i); } - } else if (r.type() == CONSTANT_FUNCTION) { - if (r.GetConstantFunction() == value) { - return r.GetKey(); + } else if (descs->GetType(i) == CONSTANT_FUNCTION) { + if (descs->GetConstantFunction(i) == value) { + return descs->GetKey(i); } } } @@ -3162,13 +3174,13 @@ Object* DescriptorArray::CopyInsert(Descriptor* descriptor, int transitions = 0; int null_descriptors = 0; if (remove_transitions) { - for (DescriptorReader r(this); !r.eos(); r.advance()) { - if (r.IsTransition()) transitions++; - if (r.IsNullDescriptor()) null_descriptors++; + for (int i = 0; i < number_of_descriptors(); i++) { + if (IsTransition(i)) transitions++; + if (IsNullDescriptor(i)) null_descriptors++; } } else { - for (DescriptorReader r(this); !r.eos(); r.advance()) { - if (r.IsNullDescriptor()) null_descriptors++; + for (int i = 0; i < number_of_descriptors(); i++) { + if (IsNullDescriptor(i)) null_descriptors++; } } int new_size = number_of_descriptors() - transitions - null_descriptors; @@ -3216,32 +3228,31 @@ Object* DescriptorArray::CopyInsert(Descriptor* descriptor, // Copy the descriptors, filtering out transitions and null descriptors, // and inserting or replacing a descriptor. - DescriptorWriter w(new_descriptors); - DescriptorReader r(this); uint32_t descriptor_hash = descriptor->GetKey()->Hash(); + int from_index = 0; + int to_index = 0; - for (; !r.eos(); r.advance()) { - if (r.GetKey()->Hash() > descriptor_hash || - r.GetKey() == descriptor->GetKey()) break; - if (r.IsNullDescriptor()) continue; - if (remove_transitions && r.IsTransition()) continue; - w.WriteFrom(&r); - } - w.Write(descriptor); - if (replacing) { - ASSERT(r.GetKey() == descriptor->GetKey()); - r.advance(); - } else { - ASSERT(r.eos() || - r.GetKey()->Hash() > descriptor_hash || - r.IsNullDescriptor()); + for (; from_index < number_of_descriptors(); from_index++) { + String* key = GetKey(from_index); + if (key->Hash() > descriptor_hash || key == descriptor->GetKey()) { + break; + } + if (IsNullDescriptor(from_index)) continue; + if (remove_transitions && IsTransition(from_index)) continue; + new_descriptors->CopyFrom(to_index++, this, from_index); } - for (; !r.eos(); r.advance()) { - if (r.IsNullDescriptor()) continue; - if (remove_transitions && r.IsTransition()) continue; - w.WriteFrom(&r); + + new_descriptors->Set(to_index++, descriptor); + if (replacing) from_index++; + + for (; from_index < number_of_descriptors(); from_index++) { + if (IsNullDescriptor(from_index)) continue; + if (remove_transitions && IsTransition(from_index)) continue; + new_descriptors->CopyFrom(to_index++, this, from_index); } - ASSERT(w.eos()); + + ASSERT(to_index == new_descriptors->number_of_descriptors()); + SLOW_ASSERT(new_descriptors->IsSortedNoDuplicates()); return new_descriptors; } @@ -3254,8 +3265,8 @@ Object* DescriptorArray::RemoveTransitions() { // Compute the size of the map transition entries to be removed. int num_removed = 0; - for (DescriptorReader r(this); !r.eos(); r.advance()) { - if (!r.IsProperty()) num_removed++; + for (int i = 0; i < number_of_descriptors(); i++) { + if (!IsProperty(i)) num_removed++; } // Allocate the new descriptor array. @@ -3264,11 +3275,11 @@ Object* DescriptorArray::RemoveTransitions() { DescriptorArray* new_descriptors = DescriptorArray::cast(result); // Copy the content. - DescriptorWriter w(new_descriptors); - for (DescriptorReader r(this); !r.eos(); r.advance()) { - if (r.IsProperty()) w.WriteFrom(&r); + int next_descriptor = 0; + for (int i = 0; i < number_of_descriptors(); i++) { + if (IsProperty(i)) new_descriptors->CopyFrom(next_descriptor++, this, i); } - ASSERT(w.eos()); + ASSERT(next_descriptor == new_descriptors->number_of_descriptors()); return new_descriptors; } @@ -4575,10 +4586,10 @@ void String::PrintOn(FILE* file) { void Map::CreateBackPointers() { DescriptorArray* descriptors = instance_descriptors(); - for (DescriptorReader r(descriptors); !r.eos(); r.advance()) { - if (r.type() == MAP_TRANSITION) { + for (int i = 0; i < descriptors->number_of_descriptors(); i++) { + if (descriptors->GetType(i) == MAP_TRANSITION) { // Get target. - Map* target = Map::cast(r.GetValue()); + Map* target = Map::cast(descriptors->GetValue(i)); #ifdef DEBUG // Verify target. Object* source_prototype = prototype(); @@ -4589,7 +4600,7 @@ void Map::CreateBackPointers() { ASSERT(target_prototype->IsJSObject() || target_prototype->IsNull()); ASSERT(source_prototype->IsMap() || - source_prototype == target_prototype); + source_prototype == target_prototype); #endif // Point target back to source. set_prototype() will not let us set // the prototype to a map, as we do here. @@ -4604,7 +4615,7 @@ void Map::ClearNonLiveTransitions(Object* real_prototype) { // low-level accessors to get and modify their data. DescriptorArray* d = reinterpret_cast<DescriptorArray*>( *RawField(this, Map::kInstanceDescriptorsOffset)); - if (d == Heap::empty_descriptor_array()) return; + if (d == Heap::raw_unchecked_empty_descriptor_array()) return; Smi* NullDescriptorDetails = PropertyDetails(NONE, NULL_DESCRIPTOR).AsSmi(); FixedArray* contents = reinterpret_cast<FixedArray*>( @@ -5825,11 +5836,10 @@ Object* JSObject::GetPropertyPostInterceptor(JSObject* receiver, } -bool JSObject::GetPropertyWithInterceptorProper( +Object* JSObject::GetPropertyWithInterceptorProper( JSObject* receiver, String* name, - PropertyAttributes* attributes, - Object** result_object) { + PropertyAttributes* attributes) { HandleScope scope; Handle<InterceptorInfo> interceptor(GetNamedInterceptor()); Handle<JSObject> receiver_handle(receiver); @@ -5850,17 +5860,14 @@ bool JSObject::GetPropertyWithInterceptorProper( VMState state(EXTERNAL); result = getter(v8::Utils::ToLocal(name_handle), info); } - if (Top::has_scheduled_exception()) { - return false; - } - if (!result.IsEmpty()) { + if (!Top::has_scheduled_exception() && !result.IsEmpty()) { *attributes = NONE; - *result_object = *v8::Utils::OpenHandle(*result); - return true; + return *v8::Utils::OpenHandle(*result); } } - return false; + *attributes = ABSENT; + return Heap::undefined_value(); } @@ -5874,12 +5881,13 @@ Object* JSObject::GetInterceptorPropertyWithLookupHint( Handle<JSObject> holder_handle(this); Handle<String> name_handle(name); - Object* result = NULL; - if (GetPropertyWithInterceptorProper(receiver, name, attributes, &result)) { + Object* result = GetPropertyWithInterceptorProper(receiver, + name, + attributes); + if (*attributes != ABSENT) { return result; - } else { - RETURN_IF_SCHEDULED_EXCEPTION(); } + RETURN_IF_SCHEDULED_EXCEPTION(); int property_index = lookup_hint->value(); if (property_index >= 0) { @@ -5924,12 +5932,11 @@ Object* JSObject::GetPropertyWithInterceptor( Handle<JSObject> holder_handle(this); Handle<String> name_handle(name); - Object* result = NULL; - if (GetPropertyWithInterceptorProper(receiver, name, attributes, &result)) { + Object* result = GetPropertyWithInterceptorProper(receiver, name, attributes); + if (*attributes != ABSENT) { return result; - } else { - RETURN_IF_SCHEDULED_EXCEPTION(); } + RETURN_IF_SCHEDULED_EXCEPTION(); result = holder_handle->GetPropertyPostInterceptor( *receiver_handle, @@ -6011,13 +6018,11 @@ bool JSObject::HasRealNamedCallbackProperty(String* key) { int JSObject::NumberOfLocalProperties(PropertyAttributes filter) { if (HasFastProperties()) { + DescriptorArray* descs = map()->instance_descriptors(); int result = 0; - for (DescriptorReader r(map()->instance_descriptors()); - !r.eos(); - r.advance()) { - PropertyDetails details = r.GetDetails(); - if (details.IsProperty() && - (details.attributes() & filter) == 0) { + for (int i = 0; i < descs->number_of_descriptors(); i++) { + PropertyDetails details = descs->GetDetails(i); + if (details.IsProperty() && (details.attributes() & filter) == 0) { result++; } } @@ -6150,16 +6155,11 @@ void FixedArray::SortPairs(FixedArray* numbers, uint32_t len) { // purpose of this function is to provide reflection information for the object // mirrors. void JSObject::GetLocalPropertyNames(FixedArray* storage, int index) { - ASSERT(storage->length() >= - NumberOfLocalProperties(static_cast<PropertyAttributes>(NONE)) - - index); + ASSERT(storage->length() >= (NumberOfLocalProperties(NONE) - index)); if (HasFastProperties()) { - for (DescriptorReader r(map()->instance_descriptors()); - !r.eos(); - r.advance()) { - if (r.IsProperty()) { - storage->set(index++, r.GetKey()); - } + DescriptorArray* descs = map()->instance_descriptors(); + for (int i = 0; i < descs->number_of_descriptors(); i++) { + if (descs->IsProperty(i)) storage->set(index++, descs->GetKey(i)); } ASSERT(storage->length() >= index); } else { @@ -6834,6 +6834,26 @@ Object* GlobalObject::GetPropertyCell(LookupResult* result) { } +Object* GlobalObject::EnsurePropertyCell(String* name) { + ASSERT(!HasFastProperties()); + int entry = property_dictionary()->FindEntry(name); + if (entry == StringDictionary::kNotFound) { + Object* cell = Heap::AllocateJSGlobalPropertyCell(Heap::the_hole_value()); + if (cell->IsFailure()) return cell; + PropertyDetails details(NONE, NORMAL); + details = details.AsDeleted(); + Object* dictionary = property_dictionary()->Add(name, cell, details); + if (dictionary->IsFailure()) return dictionary; + set_properties(StringDictionary::cast(dictionary)); + return cell; + } else { + Object* value = property_dictionary()->ValueAt(entry); + ASSERT(value->IsJSGlobalPropertyCell()); + return value; + } +} + + Object* SymbolTable::LookupString(String* string, Object** s) { SymbolKey key(string); return LookupKey(&key, s); @@ -7193,7 +7213,7 @@ Object* Dictionary<Shape, Key>::AddEntry(Key key, uint32_t entry = Dictionary<Shape, Key>::FindInsertionEntry(hash); // Insert element at empty or deleted entry - if (details.index() == 0 && Shape::kIsEnumerable) { + if (!details.IsDeleted() && details.index() == 0 && Shape::kIsEnumerable) { // Assign an enumeration index to the property and update // SetNextEnumerationIndex. int index = NextEnumerationIndex(); @@ -7266,7 +7286,9 @@ int Dictionary<Shape, Key>::NumberOfElementsFilterAttributes( for (int i = 0; i < capacity; i++) { Object* k = HashTable<Shape, Key>::KeyAt(i); if (HashTable<Shape, Key>::IsKey(k)) { - PropertyAttributes attr = DetailsAt(i).attributes(); + PropertyDetails details = DetailsAt(i); + if (details.IsDeleted()) continue; + PropertyAttributes attr = details.attributes(); if ((attr & filter) == 0) result++; } } @@ -7290,7 +7312,9 @@ void Dictionary<Shape, Key>::CopyKeysTo(FixedArray* storage, for (int i = 0; i < capacity; i++) { Object* k = HashTable<Shape, Key>::KeyAt(i); if (HashTable<Shape, Key>::IsKey(k)) { - PropertyAttributes attr = DetailsAt(i).attributes(); + PropertyDetails details = DetailsAt(i); + if (details.IsDeleted()) continue; + PropertyAttributes attr = details.attributes(); if ((attr & filter) == 0) storage->set(index++, k); } } @@ -7308,13 +7332,12 @@ void StringDictionary::CopyEnumKeysTo(FixedArray* storage, Object* k = KeyAt(i); if (IsKey(k)) { PropertyDetails details = DetailsAt(i); - if (!details.IsDontEnum()) { - storage->set(index, k); - sort_array->set(index, - Smi::FromInt(details.index()), - SKIP_WRITE_BARRIER); - index++; - } + if (details.IsDeleted() || details.IsDontEnum()) continue; + storage->set(index, k); + sort_array->set(index, + Smi::FromInt(details.index()), + SKIP_WRITE_BARRIER); + index++; } } storage->SortPairs(sort_array, sort_array->length()); @@ -7331,6 +7354,8 @@ void Dictionary<Shape, Key>::CopyKeysTo(FixedArray* storage) { for (int i = 0; i < capacity; i++) { Object* k = HashTable<Shape, Key>::KeyAt(i); if (HashTable<Shape, Key>::IsKey(k)) { + PropertyDetails details = DetailsAt(i); + if (details.IsDeleted()) continue; storage->set(index++, k); } } @@ -7403,7 +7428,7 @@ Object* StringDictionary::TransformPropertiesToFastFor( if (fields->IsFailure()) return fields; // Fill in the instance descriptor and the fields. - DescriptorWriter w(descriptors); + int next_descriptor = 0; int current_offset = 0; for (int i = 0; i < capacity; i++) { Object* k = KeyAt(i); @@ -7420,7 +7445,7 @@ Object* StringDictionary::TransformPropertiesToFastFor( JSFunction::cast(value), details.attributes(), details.index()); - w.Write(&d); + descriptors->Set(next_descriptor++, &d); } else if (type == NORMAL) { if (current_offset < inobject_props) { obj->InObjectPropertyAtPut(current_offset, @@ -7434,13 +7459,13 @@ Object* StringDictionary::TransformPropertiesToFastFor( current_offset++, details.attributes(), details.index()); - w.Write(&d); + descriptors->Set(next_descriptor++, &d); } else if (type == CALLBACKS) { CallbacksDescriptor d(String::cast(key), value, details.attributes(), details.index()); - w.Write(&d); + descriptors->Set(next_descriptor++, &d); } else { UNREACHABLE(); } diff --git a/V8Binding/v8/src/objects.h b/V8Binding/v8/src/objects.h index ebd0bb4..5c76e4a 100644 --- a/V8Binding/v8/src/objects.h +++ b/V8Binding/v8/src/objects.h @@ -297,97 +297,202 @@ enum PropertyNormalizationMode { V(JS_FUNCTION_TYPE) \ + // Since string types are not consecutive, this macro is used to // iterate over them. #define STRING_TYPE_LIST(V) \ - V(SHORT_SYMBOL_TYPE, SeqTwoByteString::kAlignedSize, short_symbol) \ - V(MEDIUM_SYMBOL_TYPE, SeqTwoByteString::kAlignedSize, medium_symbol) \ - V(LONG_SYMBOL_TYPE, SeqTwoByteString::kAlignedSize, long_symbol) \ - V(SHORT_ASCII_SYMBOL_TYPE, SeqAsciiString::kAlignedSize, short_ascii_symbol) \ + V(SHORT_SYMBOL_TYPE, \ + SeqTwoByteString::kAlignedSize, \ + short_symbol, \ + ShortSymbol) \ + V(MEDIUM_SYMBOL_TYPE, \ + SeqTwoByteString::kAlignedSize, \ + medium_symbol, \ + MediumSymbol) \ + V(LONG_SYMBOL_TYPE, \ + SeqTwoByteString::kAlignedSize, \ + long_symbol, \ + LongSymbol) \ + V(SHORT_ASCII_SYMBOL_TYPE, \ + SeqAsciiString::kAlignedSize, \ + short_ascii_symbol, \ + ShortAsciiSymbol) \ V(MEDIUM_ASCII_SYMBOL_TYPE, \ SeqAsciiString::kAlignedSize, \ - medium_ascii_symbol) \ - V(LONG_ASCII_SYMBOL_TYPE, SeqAsciiString::kAlignedSize, long_ascii_symbol) \ - V(SHORT_CONS_SYMBOL_TYPE, ConsString::kSize, short_cons_symbol) \ - V(MEDIUM_CONS_SYMBOL_TYPE, ConsString::kSize, medium_cons_symbol) \ - V(LONG_CONS_SYMBOL_TYPE, ConsString::kSize, long_cons_symbol) \ - V(SHORT_CONS_ASCII_SYMBOL_TYPE, ConsString::kSize, short_cons_ascii_symbol) \ - V(MEDIUM_CONS_ASCII_SYMBOL_TYPE, ConsString::kSize, medium_cons_ascii_symbol)\ - V(LONG_CONS_ASCII_SYMBOL_TYPE, ConsString::kSize, long_cons_ascii_symbol) \ - V(SHORT_SLICED_SYMBOL_TYPE, SlicedString::kSize, short_sliced_symbol) \ - V(MEDIUM_SLICED_SYMBOL_TYPE, SlicedString::kSize, medium_sliced_symbol) \ - V(LONG_SLICED_SYMBOL_TYPE, SlicedString::kSize, long_sliced_symbol) \ + medium_ascii_symbol, \ + MediumAsciiSymbol) \ + V(LONG_ASCII_SYMBOL_TYPE, \ + SeqAsciiString::kAlignedSize, \ + long_ascii_symbol, \ + LongAsciiSymbol) \ + V(SHORT_CONS_SYMBOL_TYPE, \ + ConsString::kSize, \ + short_cons_symbol, \ + ShortConsSymbol) \ + V(MEDIUM_CONS_SYMBOL_TYPE, \ + ConsString::kSize, \ + medium_cons_symbol, \ + MediumConsSymbol) \ + V(LONG_CONS_SYMBOL_TYPE, \ + ConsString::kSize, \ + long_cons_symbol, \ + LongConsSymbol) \ + V(SHORT_CONS_ASCII_SYMBOL_TYPE, \ + ConsString::kSize, \ + short_cons_ascii_symbol, \ + ShortConsAsciiSymbol) \ + V(MEDIUM_CONS_ASCII_SYMBOL_TYPE, \ + ConsString::kSize, \ + medium_cons_ascii_symbol, \ + MediumConsAsciiSymbol) \ + V(LONG_CONS_ASCII_SYMBOL_TYPE, \ + ConsString::kSize, \ + long_cons_ascii_symbol, \ + LongConsAsciiSymbol) \ + V(SHORT_SLICED_SYMBOL_TYPE, \ + SlicedString::kSize, \ + short_sliced_symbol, \ + ShortSlicedSymbol) \ + V(MEDIUM_SLICED_SYMBOL_TYPE, \ + SlicedString::kSize, \ + medium_sliced_symbol, \ + MediumSlicedSymbol) \ + V(LONG_SLICED_SYMBOL_TYPE, \ + SlicedString::kSize, \ + long_sliced_symbol, \ + LongSlicedSymbol) \ V(SHORT_SLICED_ASCII_SYMBOL_TYPE, \ SlicedString::kSize, \ - short_sliced_ascii_symbol) \ + short_sliced_ascii_symbol, \ + ShortSlicedAsciiSymbol) \ V(MEDIUM_SLICED_ASCII_SYMBOL_TYPE, \ SlicedString::kSize, \ - medium_sliced_ascii_symbol) \ + medium_sliced_ascii_symbol, \ + MediumSlicedAsciiSymbol) \ V(LONG_SLICED_ASCII_SYMBOL_TYPE, \ SlicedString::kSize, \ - long_sliced_ascii_symbol) \ + long_sliced_ascii_symbol, \ + LongSlicedAsciiSymbol) \ V(SHORT_EXTERNAL_SYMBOL_TYPE, \ ExternalTwoByteString::kSize, \ - short_external_symbol) \ + short_external_symbol, \ + ShortExternalSymbol) \ V(MEDIUM_EXTERNAL_SYMBOL_TYPE, \ ExternalTwoByteString::kSize, \ - medium_external_symbol) \ + medium_external_symbol, \ + MediumExternalSymbol) \ V(LONG_EXTERNAL_SYMBOL_TYPE, \ ExternalTwoByteString::kSize, \ - long_external_symbol) \ + long_external_symbol, \ + LongExternalSymbol) \ V(SHORT_EXTERNAL_ASCII_SYMBOL_TYPE, \ ExternalAsciiString::kSize, \ - short_external_ascii_symbol) \ + short_external_ascii_symbol, \ + ShortExternalAsciiSymbol) \ V(MEDIUM_EXTERNAL_ASCII_SYMBOL_TYPE, \ ExternalAsciiString::kSize, \ - medium_external_ascii_symbol) \ + medium_external_ascii_symbol, \ + MediumExternalAsciiSymbol) \ V(LONG_EXTERNAL_ASCII_SYMBOL_TYPE, \ ExternalAsciiString::kSize, \ - long_external_ascii_symbol) \ - V(SHORT_STRING_TYPE, SeqTwoByteString::kAlignedSize, short_string) \ - V(MEDIUM_STRING_TYPE, SeqTwoByteString::kAlignedSize, medium_string) \ - V(LONG_STRING_TYPE, SeqTwoByteString::kAlignedSize, long_string) \ - V(SHORT_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize, short_ascii_string) \ + long_external_ascii_symbol, \ + LongExternalAsciiSymbol) \ + V(SHORT_STRING_TYPE, \ + SeqTwoByteString::kAlignedSize, \ + short_string, \ + ShortString) \ + V(MEDIUM_STRING_TYPE, \ + SeqTwoByteString::kAlignedSize, \ + medium_string, \ + MediumString) \ + V(LONG_STRING_TYPE, \ + SeqTwoByteString::kAlignedSize, \ + long_string, \ + LongString) \ + V(SHORT_ASCII_STRING_TYPE, \ + SeqAsciiString::kAlignedSize, \ + short_ascii_string, \ + ShortAsciiString) \ V(MEDIUM_ASCII_STRING_TYPE, \ SeqAsciiString::kAlignedSize, \ - medium_ascii_string) \ - V(LONG_ASCII_STRING_TYPE, SeqAsciiString::kAlignedSize, long_ascii_string) \ - V(SHORT_CONS_STRING_TYPE, ConsString::kSize, short_cons_string) \ - V(MEDIUM_CONS_STRING_TYPE, ConsString::kSize, medium_cons_string) \ - V(LONG_CONS_STRING_TYPE, ConsString::kSize, long_cons_string) \ - V(SHORT_CONS_ASCII_STRING_TYPE, ConsString::kSize, short_cons_ascii_string) \ - V(MEDIUM_CONS_ASCII_STRING_TYPE, ConsString::kSize, medium_cons_ascii_string)\ - V(LONG_CONS_ASCII_STRING_TYPE, ConsString::kSize, long_cons_ascii_string) \ - V(SHORT_SLICED_STRING_TYPE, SlicedString::kSize, short_sliced_string) \ - V(MEDIUM_SLICED_STRING_TYPE, SlicedString::kSize, medium_sliced_string) \ - V(LONG_SLICED_STRING_TYPE, SlicedString::kSize, long_sliced_string) \ + medium_ascii_string, \ + MediumAsciiString) \ + V(LONG_ASCII_STRING_TYPE, \ + SeqAsciiString::kAlignedSize, \ + long_ascii_string, \ + LongAsciiString) \ + V(SHORT_CONS_STRING_TYPE, \ + ConsString::kSize, \ + short_cons_string, \ + ShortConsString) \ + V(MEDIUM_CONS_STRING_TYPE, \ + ConsString::kSize, \ + medium_cons_string, \ + MediumConsString) \ + V(LONG_CONS_STRING_TYPE, \ + ConsString::kSize, \ + long_cons_string, \ + LongConsString) \ + V(SHORT_CONS_ASCII_STRING_TYPE, \ + ConsString::kSize, \ + short_cons_ascii_string, \ + ShortConsAsciiString) \ + V(MEDIUM_CONS_ASCII_STRING_TYPE, \ + ConsString::kSize, \ + medium_cons_ascii_string, \ + MediumConsAsciiString) \ + V(LONG_CONS_ASCII_STRING_TYPE, \ + ConsString::kSize, \ + long_cons_ascii_string, \ + LongConsAsciiString) \ + V(SHORT_SLICED_STRING_TYPE, \ + SlicedString::kSize, \ + short_sliced_string, \ + ShortSlicedString) \ + V(MEDIUM_SLICED_STRING_TYPE, \ + SlicedString::kSize, \ + medium_sliced_string, \ + MediumSlicedString) \ + V(LONG_SLICED_STRING_TYPE, \ + SlicedString::kSize, \ + long_sliced_string, \ + LongSlicedString) \ V(SHORT_SLICED_ASCII_STRING_TYPE, \ SlicedString::kSize, \ - short_sliced_ascii_string) \ + short_sliced_ascii_string, \ + ShortSlicedAsciiString) \ V(MEDIUM_SLICED_ASCII_STRING_TYPE, \ SlicedString::kSize, \ - medium_sliced_ascii_string) \ + medium_sliced_ascii_string, \ + MediumSlicedAsciiString) \ V(LONG_SLICED_ASCII_STRING_TYPE, \ SlicedString::kSize, \ - long_sliced_ascii_string) \ + long_sliced_ascii_string, \ + LongSlicedAsciiString) \ V(SHORT_EXTERNAL_STRING_TYPE, \ ExternalTwoByteString::kSize, \ - short_external_string) \ + short_external_string, \ + ShortExternalString) \ V(MEDIUM_EXTERNAL_STRING_TYPE, \ ExternalTwoByteString::kSize, \ - medium_external_string) \ + medium_external_string, \ + MediumExternalString) \ V(LONG_EXTERNAL_STRING_TYPE, \ ExternalTwoByteString::kSize, \ - long_external_string) \ + long_external_string, \ + LongExternalString) \ V(SHORT_EXTERNAL_ASCII_STRING_TYPE, \ ExternalAsciiString::kSize, \ - short_external_ascii_string) \ + short_external_ascii_string, \ + ShortExternalAsciiString) \ V(MEDIUM_EXTERNAL_ASCII_STRING_TYPE, \ ExternalAsciiString::kSize, \ - medium_external_ascii_string) \ + medium_external_ascii_string, \ + MediumExternalAsciiString) \ V(LONG_EXTERNAL_ASCII_STRING_TYPE, \ ExternalAsciiString::kSize, \ - long_external_ascii_string) + long_external_ascii_string, \ + LongExternalAsciiString) // A struct is a simple object a set of object-valued fields. Including an // object type in this causes the compiler to generate most of the boilerplate @@ -823,12 +928,14 @@ class Smi: public Object { // Failure is used for reporting out of memory situations and // propagating exceptions through the runtime system. Failure objects -// are transient and cannot occur as part of the objects graph. +// are transient and cannot occur as part of the object graph. // // Failures are a single word, encoded as follows: // +-------------------------+---+--+--+ // |rrrrrrrrrrrrrrrrrrrrrrrrr|sss|tt|11| // +-------------------------+---+--+--+ +// 3 7 6 4 32 10 +// 1 // // The low two bits, 0-1, are the failure tag, 11. The next two bits, // 2-3, are a failure type tag 'tt' with possible values: @@ -839,18 +946,13 @@ class Smi: public Object { // // The next three bits, 4-6, are an allocation space tag 'sss'. The // allocation space tag is 000 for all failure types except -// RETRY_AFTER_GC. For RETRY_AFTER_GC, the possible values are -// (the encoding is found in globals.h): -// 000 NEW_SPACE -// 001 OLD_SPACE -// 010 CODE_SPACE -// 011 MAP_SPACE -// 100 LO_SPACE +// RETRY_AFTER_GC. For RETRY_AFTER_GC, the possible values are the +// allocation spaces (the encoding is found in globals.h). // -// The remaining bits is the number of words requested by the -// allocation request that failed, and is zeroed except for -// RETRY_AFTER_GC failures. The 25 bits (on a 32 bit platform) gives -// a representable range of 2^27 bytes (128MB). +// The remaining bits is the size of the allocation request in units +// of the pointer size, and is zeroed except for RETRY_AFTER_GC +// failures. The 25 bits (on a 32 bit platform) gives a representable +// range of 2^27 bytes (128MB). // Failure type tag info. const int kFailureTypeTagSize = 2; @@ -980,14 +1082,6 @@ class MapWord BASE_EMBEDDED { inline Address ToEncodedAddress(); - private: - // HeapObject calls the private constructor and directly reads the value. - friend class HeapObject; - - explicit MapWord(uintptr_t value) : value_(value) {} - - uintptr_t value_; - // Bits used by the marking phase of the garbage collector. // // The first word of a heap object is normally a map pointer. The last two @@ -1029,6 +1123,14 @@ class MapWord BASE_EMBEDDED { // 0xFFE00000 static const uint32_t kForwardingOffsetMask = ~(kMapPageIndexMask | kMapPageOffsetMask); + + private: + // HeapObject calls the private constructor and directly reads the value. + friend class HeapObject; + + explicit MapWord(uintptr_t value) : value_(value) {} + + uintptr_t value_; }; @@ -1593,13 +1695,11 @@ class JSObject: public HeapObject { void LookupInDescriptor(String* name, LookupResult* result); - // Attempts to get property with a named interceptor getter. Returns - // |true| and stores result into |result| if succesful, otherwise - // returns |false| - bool GetPropertyWithInterceptorProper(JSObject* receiver, - String* name, - PropertyAttributes* attributes, - Object** result); + // Attempts to get property with a named interceptor getter. + // Sets |attributes| to ABSENT if interceptor didn't return anything + Object* GetPropertyWithInterceptorProper(JSObject* receiver, + String* name, + PropertyAttributes* attributes); DISALLOW_IMPLICIT_CONSTRUCTORS(JSObject); }; @@ -1750,15 +1850,28 @@ class DescriptorArray: public FixedArray { // using the supplied storage for the small "bridge". void SetEnumCache(FixedArray* bridge_storage, FixedArray* new_cache); - // Accessors for fetching instance descriptor at descriptor number.. + // Accessors for fetching instance descriptor at descriptor number. inline String* GetKey(int descriptor_number); inline Object* GetValue(int descriptor_number); inline Smi* GetDetails(int descriptor_number); + inline PropertyType GetType(int descriptor_number); + inline int GetFieldIndex(int descriptor_number); + inline JSFunction* GetConstantFunction(int descriptor_number); + inline Object* GetCallbacksObject(int descriptor_number); + inline AccessorDescriptor* GetCallbacks(int descriptor_number); + inline bool IsProperty(int descriptor_number); + inline bool IsTransition(int descriptor_number); + inline bool IsNullDescriptor(int descriptor_number); + inline bool IsDontEnum(int descriptor_number); // Accessor for complete descriptor. inline void Get(int descriptor_number, Descriptor* desc); inline void Set(int descriptor_number, Descriptor* desc); + // Transfer complete descriptor from another descriptor array to + // this one. + inline void CopyFrom(int index, DescriptorArray* src, int src_index); + // Copy the descriptor array, insert a new descriptor and optionally // remove map transitions. If the descriptor is already present, it is // replaced. If a replaced descriptor is a real property (not a transition @@ -3140,6 +3253,9 @@ class GlobalObject: public JSObject { // Retrieve the property cell used to store a property. Object* GetPropertyCell(LookupResult* result); + // Ensure that the global object has a cell for the given property name. + Object* EnsurePropertyCell(String* name); + // Casting. static inline GlobalObject* cast(Object* obj); @@ -3272,6 +3388,13 @@ class JSRegExp: public JSObject { inline Object* DataAt(int index); // Set implementation data after the object has been prepared. inline void SetDataAt(int index, Object* value); + static int code_index(bool is_ascii) { + if (is_ascii) { + return kIrregexpASCIICodeIndex; + } else { + return kIrregexpUC16CodeIndex; + } + } static inline JSRegExp* cast(Object* obj); diff --git a/V8Binding/v8/src/parser.cc b/V8Binding/v8/src/parser.cc index e1d9b71..89d6d5b 100644 --- a/V8Binding/v8/src/parser.cc +++ b/V8Binding/v8/src/parser.cc @@ -1576,10 +1576,10 @@ VariableProxy* AstBuildingParser::Declare(Handle<String> name, // to the calling function context. if (top_scope_->is_function_scope()) { // Declare the variable in the function scope. - var = top_scope_->LookupLocal(name); + var = top_scope_->LocalLookup(name); if (var == NULL) { // Declare the name. - var = top_scope_->Declare(name, mode); + var = top_scope_->DeclareLocal(name, mode); } else { // The name was declared before; check for conflicting // re-declarations. If the previous declaration was a const or the @@ -2045,7 +2045,7 @@ Statement* Parser::ParseContinueStatement(bool* ok) { // 'continue' Identifier? ';' Expect(Token::CONTINUE, CHECK_OK); - Handle<String> label(static_cast<String**>(NULL)); + Handle<String> label = Handle<String>::null(); Token::Value tok = peek(); if (!scanner_.has_line_terminator_before_next() && tok != Token::SEMICOLON && tok != Token::RBRACE && tok != Token::EOS) { @@ -3466,8 +3466,8 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name, while (!done) { Handle<String> param_name = ParseIdentifier(CHECK_OK); if (!is_pre_parsing_) { - top_scope_->AddParameter(top_scope_->Declare(param_name, - Variable::VAR)); + top_scope_->AddParameter(top_scope_->DeclareLocal(param_name, + Variable::VAR)); num_parameters++; } done = (peek() == Token::RPAREN); diff --git a/V8Binding/v8/src/platform-posix.cc b/V8Binding/v8/src/platform-posix.cc index 741ba28..6174522 100644 --- a/V8Binding/v8/src/platform-posix.cc +++ b/V8Binding/v8/src/platform-posix.cc @@ -42,16 +42,15 @@ #include <netinet/in.h> #include <netdb.h> -#include "v8.h" - -#include "platform.h" - #if defined(ANDROID) #define LOG_TAG "v8" -#undef LOG -#include <utils/Log.h> +#include <utils/Log.h> // LOG_PRI_VA #endif +#include "v8.h" + +#include "platform.h" + namespace v8 { namespace internal { @@ -132,7 +131,7 @@ void OS::Print(const char* format, ...) { void OS::VPrint(const char* format, va_list args) { #if defined(ANDROID) - LOGV(format, args); + LOG_PRI_VA(ANDROID_LOG_INFO, LOG_TAG, format, args); #else vprintf(format, args); #endif @@ -149,7 +148,7 @@ void OS::PrintError(const char* format, ...) { void OS::VPrintError(const char* format, va_list args) { #if defined(ANDROID) - LOGV(format, args); + LOG_PRI_VA(ANDROID_LOG_ERROR, LOG_TAG, format, args); #else vfprintf(stderr, format, args); #endif diff --git a/V8Binding/v8/src/property.cc b/V8Binding/v8/src/property.cc index 2915c4a..caa7397 100644 --- a/V8Binding/v8/src/property.cc +++ b/V8Binding/v8/src/property.cc @@ -31,20 +31,6 @@ namespace v8 { namespace internal { -void DescriptorWriter::Write(Descriptor* desc) { - ASSERT(desc->key_->IsSymbol()); - descriptors_->Set(pos_, desc); - advance(); -} - - -void DescriptorWriter::WriteFrom(DescriptorReader* reader) { - Descriptor desc; - reader->Get(&desc); - Write(&desc); -} - - #ifdef DEBUG void LookupResult::Print() { if (!IsValid()) { diff --git a/V8Binding/v8/src/property.h b/V8Binding/v8/src/property.h index 69e5640..1869719 100644 --- a/V8Binding/v8/src/property.h +++ b/V8Binding/v8/src/property.h @@ -95,8 +95,6 @@ class Descriptor BASE_EMBEDDED { value_(value), details_(attributes, type, index) { } - friend class DescriptorWriter; - friend class DescriptorReader; friend class DescriptorArray; }; @@ -324,92 +322,6 @@ class LookupResult BASE_EMBEDDED { }; -// The DescriptorStream is an abstraction for iterating over a map's -// instance descriptors. -class DescriptorStream BASE_EMBEDDED { - public: - explicit DescriptorStream(DescriptorArray* descriptors, int pos) { - descriptors_ = descriptors; - pos_ = pos; - limit_ = descriptors_->number_of_descriptors(); - } - - // Tells whether we have reached the end of the steam. - bool eos() { return pos_ >= limit_; } - - int next_position() { return pos_ + 1; } - void advance() { pos_ = next_position(); } - - protected: - DescriptorArray* descriptors_; - int pos_; // Current position. - int limit_; // Limit for position. -}; - - -class DescriptorReader: public DescriptorStream { - public: - explicit DescriptorReader(DescriptorArray* descriptors, int pos = 0) - : DescriptorStream(descriptors, pos) {} - - String* GetKey() { return descriptors_->GetKey(pos_); } - Object* GetValue() { return descriptors_->GetValue(pos_); } - PropertyDetails GetDetails() { - return PropertyDetails(descriptors_->GetDetails(pos_)); - } - - int GetFieldIndex() { return Descriptor::IndexFromValue(GetValue()); } - - bool IsDontEnum() { return GetDetails().IsDontEnum(); } - - PropertyType type() { return GetDetails().type(); } - - // Tells whether the type is a transition. - bool IsTransition() { - PropertyType t = type(); - ASSERT(t != INTERCEPTOR); - return t == MAP_TRANSITION || t == CONSTANT_TRANSITION; - } - - bool IsNullDescriptor() { - return type() == NULL_DESCRIPTOR; - } - - bool IsProperty() { - return type() < FIRST_PHANTOM_PROPERTY_TYPE; - } - - JSFunction* GetConstantFunction() { return JSFunction::cast(GetValue()); } - - AccessorDescriptor* GetCallbacks() { - ASSERT(type() == CALLBACKS); - Proxy* p = Proxy::cast(GetCallbacksObject()); - return reinterpret_cast<AccessorDescriptor*>(p->proxy()); - } - - Object* GetCallbacksObject() { - ASSERT(type() == CALLBACKS); - return GetValue(); - } - - bool Equals(String* name) { return name->Equals(GetKey()); } - - void Get(Descriptor* desc) { - descriptors_->Get(pos_, desc); - } -}; - -class DescriptorWriter: public DescriptorStream { - public: - explicit DescriptorWriter(DescriptorArray* descriptors) - : DescriptorStream(descriptors, 0) {} - - // Append a descriptor to this stream. - void Write(Descriptor* desc); - // Read a descriptor from the reader and append it to this stream. - void WriteFrom(DescriptorReader* reader); -}; - } } // namespace v8::internal #endif // V8_PROPERTY_H_ diff --git a/V8Binding/v8/src/runtime.cc b/V8Binding/v8/src/runtime.cc index aeda068..350d391 100644 --- a/V8Binding/v8/src/runtime.cc +++ b/V8Binding/v8/src/runtime.cc @@ -1016,16 +1016,16 @@ static Object* Runtime_RegExpExec(Arguments args) { ASSERT(args.length() == 4); CONVERT_ARG_CHECKED(JSRegExp, regexp, 0); CONVERT_ARG_CHECKED(String, subject, 1); - // Due to the way the JS files are constructed this must be less than the + // Due to the way the JS calls are constructed this must be less than the // length of a string, i.e. it is always a Smi. We check anyway for security. - CONVERT_CHECKED(Smi, index, args[2]); + CONVERT_SMI_CHECKED(index, args[2]); CONVERT_ARG_CHECKED(JSArray, last_match_info, 3); RUNTIME_ASSERT(last_match_info->HasFastElements()); - RUNTIME_ASSERT(index->value() >= 0); - RUNTIME_ASSERT(index->value() <= subject->length()); + RUNTIME_ASSERT(index >= 0); + RUNTIME_ASSERT(index <= subject->length()); Handle<Object> result = RegExpImpl::Exec(regexp, subject, - index->value(), + index, last_match_info); if (result.is_null()) return Failure::Exception(); return *result; @@ -2598,15 +2598,13 @@ static Object* Runtime_KeyedGetProperty(Arguments args) { Object* value = receiver->FastPropertyAt(offset); return value->IsTheHole() ? Heap::undefined_value() : value; } - // Lookup cache miss. Perform lookup and update the cache if - // appropriate. + // Lookup cache miss. Perform lookup and update the cache if appropriate. LookupResult result; receiver->LocalLookup(key, &result); if (result.IsProperty() && result.IsLoaded() && result.type() == FIELD) { int offset = result.GetFieldIndex(); KeyedLookupCache::Update(receiver_map, key, offset); - Object* value = receiver->FastPropertyAt(offset); - return value->IsTheHole() ? Heap::undefined_value() : value; + return receiver->FastPropertyAt(offset); } } else { // Attempt dictionary lookup. @@ -2615,10 +2613,10 @@ static Object* Runtime_KeyedGetProperty(Arguments args) { if ((entry != StringDictionary::kNotFound) && (dictionary->DetailsAt(entry).type() == NORMAL)) { Object* value = dictionary->ValueAt(entry); - if (receiver->IsGlobalObject()) { - value = JSGlobalPropertyCell::cast(value)->value(); - } - return value; + if (!receiver->IsGlobalObject()) return value; + value = JSGlobalPropertyCell::cast(value)->value(); + if (!value->IsTheHole()) return value; + // If value is the hole do the general lookup. } } } @@ -4155,16 +4153,21 @@ static Object* Runtime_Math_pow(Arguments args) { } CONVERT_DOUBLE_CHECKED(y, args[1]); - if (y == 0.5) { - // It's not uncommon to use Math.pow(x, 0.5) to compute the square - // root of a number. To speed up such computations, we explictly - // check for this case and use the sqrt() function which is faster - // than pow(). - return Heap::AllocateHeapNumber(sqrt(x)); - } else if (y == -0.5) { - // Optimized using Math.pow(x, -0.5) == 1 / Math.pow(x, 0.5). - return Heap::AllocateHeapNumber(1.0 / sqrt(x)); - } else if (y == 0) { + + if (!isinf(x)) { + if (y == 0.5) { + // It's not uncommon to use Math.pow(x, 0.5) to compute the + // square root of a number. To speed up such computations, we + // explictly check for this case and use the sqrt() function + // which is faster than pow(). + return Heap::AllocateHeapNumber(sqrt(x)); + } else if (y == -0.5) { + // Optimized using Math.pow(x, -0.5) == 1 / Math.pow(x, 0.5). + return Heap::AllocateHeapNumber(1.0 / sqrt(x)); + } + } + + if (y == 0) { return Smi::FromInt(1); } else if (isnan(y) || ((x == 1 || x == -1) && isinf(y))) { return Heap::nan_value(); @@ -4313,7 +4316,7 @@ static Object* Runtime_NewObject(Arguments args) { #ifdef ENABLE_DEBUGGER_SUPPORT // Handle stepping into constructors if step into is active. if (Debug::StepInActive()) { - Debug::HandleStepIn(function, 0, true); + Debug::HandleStepIn(function, Handle<Object>::null(), 0, true); } #endif diff --git a/V8Binding/v8/src/scopes.cc b/V8Binding/v8/src/scopes.cc index 88b1c66..78ed035 100644 --- a/V8Binding/v8/src/scopes.cc +++ b/V8Binding/v8/src/scopes.cc @@ -71,28 +71,28 @@ static bool Match(void* key1, void* key2) { // Dummy constructor -LocalsMap::LocalsMap(bool gotta_love_static_overloading) : HashMap() {} +VariableMap::VariableMap(bool gotta_love_static_overloading) : HashMap() {} -LocalsMap::LocalsMap() : HashMap(Match, &LocalsMapAllocator, 8) {} -LocalsMap::~LocalsMap() {} +VariableMap::VariableMap() : HashMap(Match, &LocalsMapAllocator, 8) {} +VariableMap::~VariableMap() {} -Variable* LocalsMap::Declare(Scope* scope, - Handle<String> name, - Variable::Mode mode, - bool is_valid_LHS, - Variable::Kind kind) { +Variable* VariableMap::Declare(Scope* scope, + Handle<String> name, + Variable::Mode mode, + bool is_valid_lhs, + Variable::Kind kind) { HashMap::Entry* p = HashMap::Lookup(name.location(), name->Hash(), true); if (p->value == NULL) { // The variable has not been declared yet -> insert it. ASSERT(p->key == name.location()); - p->value = new Variable(scope, name, mode, is_valid_LHS, kind); + p->value = new Variable(scope, name, mode, is_valid_lhs, kind); } return reinterpret_cast<Variable*>(p->value); } -Variable* LocalsMap::Lookup(Handle<String> name) { +Variable* VariableMap::Lookup(Handle<String> name) { HashMap::Entry* p = HashMap::Lookup(name.location(), name->Hash(), false); if (p != NULL) { ASSERT(*reinterpret_cast<String**>(p->key) == *name); @@ -110,7 +110,7 @@ Variable* LocalsMap::Lookup(Handle<String> name) { // Dummy constructor Scope::Scope() : inner_scopes_(0), - locals_(false), + variables_(false), temps_(0), params_(0), dynamics_(NULL), @@ -168,27 +168,26 @@ void Scope::Initialize(bool inside_with) { // instead load them directly from the stack. Currently, the only // such parameter is 'this' which is passed on the stack when // invoking scripts - { Variable* var = - locals_.Declare(this, Factory::this_symbol(), Variable::VAR, - false, Variable::THIS); - var->rewrite_ = new Slot(var, Slot::PARAMETER, -1); - receiver_ = new VariableProxy(Factory::this_symbol(), true, false); - receiver_->BindTo(var); - } + Variable* var = + variables_.Declare(this, Factory::this_symbol(), Variable::VAR, + false, Variable::THIS); + var->rewrite_ = new Slot(var, Slot::PARAMETER, -1); + receiver_ = new VariableProxy(Factory::this_symbol(), true, false); + receiver_->BindTo(var); if (is_function_scope()) { // Declare 'arguments' variable which exists in all functions. - // Note that it may never be accessed, in which case it won't - // be allocated during variable allocation. - locals_.Declare(this, Factory::arguments_symbol(), Variable::VAR, - true, Variable::ARGUMENTS); + // Note that it might never be accessed, in which case it won't be + // allocated during variable allocation. + variables_.Declare(this, Factory::arguments_symbol(), Variable::VAR, + true, Variable::ARGUMENTS); } } -Variable* Scope::LookupLocal(Handle<String> name) { - return locals_.Lookup(name); +Variable* Scope::LocalLookup(Handle<String> name) { + return variables_.Lookup(name); } @@ -196,7 +195,7 @@ Variable* Scope::Lookup(Handle<String> name) { for (Scope* scope = this; scope != NULL; scope = scope->outer_scope()) { - Variable* var = scope->LookupLocal(name); + Variable* var = scope->LocalLookup(name); if (var != NULL) return var; } return NULL; @@ -210,18 +209,25 @@ Variable* Scope::DeclareFunctionVar(Handle<String> name) { } -Variable* Scope::Declare(Handle<String> name, Variable::Mode mode) { +Variable* Scope::DeclareLocal(Handle<String> name, Variable::Mode mode) { // DYNAMIC variables are introduces during variable allocation, // INTERNAL variables are allocated explicitly, and TEMPORARY // variables are allocated via NewTemporary(). ASSERT(mode == Variable::VAR || mode == Variable::CONST); - return locals_.Declare(this, name, mode, true, Variable::NORMAL); + return variables_.Declare(this, name, mode, true, Variable::NORMAL); +} + + +Variable* Scope::DeclareGlobal(Handle<String> name) { + ASSERT(is_global_scope()); + return variables_.Declare(this, name, Variable::DYNAMIC, true, + Variable::NORMAL); } void Scope::AddParameter(Variable* var) { ASSERT(is_function_scope()); - ASSERT(LookupLocal(var->name()) == var); + ASSERT(LocalLookup(var->name()) == var); params_.Add(var); } @@ -291,7 +297,9 @@ void Scope::CollectUsedVariables(List<Variable*, Allocator>* locals) { locals->Add(var); } } - for (LocalsMap::Entry* p = locals_.Start(); p != NULL; p = locals_.Next(p)) { + for (VariableMap::Entry* p = variables_.Start(); + p != NULL; + p = variables_.Next(p)) { Variable* var = reinterpret_cast<Variable*>(p->value); if (var->var_uses()->is_used()) { locals->Add(var); @@ -410,8 +418,8 @@ static void PrintVar(PrettyPrinter* printer, int indent, Variable* var) { } -static void PrintMap(PrettyPrinter* printer, int indent, LocalsMap* map) { - for (LocalsMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) { +static void PrintMap(PrettyPrinter* printer, int indent, VariableMap* map) { + for (VariableMap::Entry* p = map->Start(); p != NULL; p = map->Next(p)) { Variable* var = reinterpret_cast<Variable*>(p->value); PrintVar(printer, indent, var); } @@ -478,7 +486,7 @@ void Scope::Print(int n) { } Indent(n1, "// local vars\n"); - PrintMap(&printer, n1, &locals_); + PrintMap(&printer, n1, &variables_); Indent(n1, "// dynamic vars\n"); if (dynamics_ != NULL) { @@ -502,7 +510,7 @@ void Scope::Print(int n) { Variable* Scope::NonLocal(Handle<String> name, Variable::Mode mode) { if (dynamics_ == NULL) dynamics_ = new DynamicScopePart(); - LocalsMap* map = dynamics_->GetMap(mode); + VariableMap* map = dynamics_->GetMap(mode); Variable* var = map->Lookup(name); if (var == NULL) { // Declare a new non-local. @@ -530,7 +538,7 @@ Variable* Scope::LookupRecursive(Handle<String> name, bool guess = scope_calls_eval_; // Try to find the variable in this scope. - Variable* var = LookupLocal(name); + Variable* var = LocalLookup(name); if (var != NULL) { // We found a variable. If this is not an inner lookup, we are done. @@ -621,8 +629,7 @@ void Scope::ResolveVariable(Scope* global_scope, scope_calls_eval_ || outer_scope_calls_eval_)) { // We must have a global variable. ASSERT(global_scope != NULL); - var = new Variable(global_scope, proxy->name(), - Variable::DYNAMIC, true, Variable::NORMAL); + var = global_scope->DeclareGlobal(proxy->name()); } else if (scope_inside_with_) { // If we are inside a with statement we give up and look up @@ -706,26 +713,26 @@ bool Scope::PropagateScopeInfo(bool outer_scope_calls_eval, bool Scope::MustAllocate(Variable* var) { - // Give var a read/write use if there is a chance it might be - // accessed via an eval() call, or if it is a global variable. - // This is only possible if the variable has a visible name. + // Give var a read/write use if there is a chance it might be accessed + // via an eval() call. This is only possible if the variable has a + // visible name. if ((var->is_this() || var->name()->length() > 0) && (var->is_accessed_from_inner_scope_ || scope_calls_eval_ || inner_scope_calls_eval_ || - scope_contains_with_ || var->is_global())) { + scope_contains_with_)) { var->var_uses()->RecordAccess(1); } - return var->var_uses()->is_used(); + // Global variables do not need to be allocated. + return !var->is_global() && var->var_uses()->is_used(); } bool Scope::MustAllocateInContext(Variable* var) { // If var is accessed from an inner scope, or if there is a - // possibility that it might be accessed from the current or - // an inner scope (through an eval() call), it must be allocated - // in the context. - // Exceptions: Global variables and temporary variables must - // never be allocated in the (FixedArray part of the) context. + // possibility that it might be accessed from the current or an inner + // scope (through an eval() call), it must be allocated in the + // context. Exception: temporary variables are not allocated in the + // context. return var->mode() != Variable::TEMPORARY && (var->is_accessed_from_inner_scope_ || @@ -755,7 +762,7 @@ void Scope::AllocateHeapSlot(Variable* var) { void Scope::AllocateParameterLocals() { ASSERT(is_function_scope()); - Variable* arguments = LookupLocal(Factory::arguments_symbol()); + Variable* arguments = LocalLookup(Factory::arguments_symbol()); ASSERT(arguments != NULL); // functions have 'arguments' declared implicitly if (MustAllocate(arguments) && !HasArgumentsParameter()) { // 'arguments' is used. Unless there is also a parameter called @@ -865,7 +872,7 @@ void Scope::AllocateNonParameterLocal(Variable* var) { ASSERT(var->rewrite_ == NULL || (!var->IsVariable(Factory::result_symbol())) || (var->slot() == NULL || var->slot()->type() != Slot::LOCAL)); - if (MustAllocate(var) && var->rewrite_ == NULL) { + if (var->rewrite_ == NULL && MustAllocate(var)) { if (MustAllocateInContext(var)) { AllocateHeapSlot(var); } else { @@ -876,27 +883,21 @@ void Scope::AllocateNonParameterLocal(Variable* var) { void Scope::AllocateNonParameterLocals() { - // Each variable occurs exactly once in the locals_ list; all - // variables that have no rewrite yet are non-parameter locals. - - // Sort them according to use such that the locals with more uses - // get allocated first. - if (FLAG_usage_computation) { - // This is currently not implemented. - } - + // All variables that have no rewrite yet are non-parameter locals. for (int i = 0; i < temps_.length(); i++) { AllocateNonParameterLocal(temps_[i]); } - for (LocalsMap::Entry* p = locals_.Start(); p != NULL; p = locals_.Next(p)) { + for (VariableMap::Entry* p = variables_.Start(); + p != NULL; + p = variables_.Next(p)) { Variable* var = reinterpret_cast<Variable*>(p->value); AllocateNonParameterLocal(var); } - // Note: For now, function_ must be allocated at the very end. If - // it gets allocated in the context, it must be the last slot in the - // context, because of the current ScopeInfo implementation (see + // For now, function_ must be allocated at the very end. If it gets + // allocated in the context, it must be the last slot in the context, + // because of the current ScopeInfo implementation (see // ScopeInfo::ScopeInfo(FunctionScope* scope) constructor). if (function_ != NULL) { AllocateNonParameterLocal(function_); diff --git a/V8Binding/v8/src/scopes.h b/V8Binding/v8/src/scopes.h index ea4e0f7..5767d9f 100644 --- a/V8Binding/v8/src/scopes.h +++ b/V8Binding/v8/src/scopes.h @@ -35,19 +35,22 @@ namespace v8 { namespace internal { -// A hash map to support fast local variable declaration and lookup. -class LocalsMap: public HashMap { +// A hash map to support fast variable declaration and lookup. +class VariableMap: public HashMap { public: - LocalsMap(); + VariableMap(); // Dummy constructor. This constructor doesn't set up the map // properly so don't use it unless you have a good reason. - explicit LocalsMap(bool gotta_love_static_overloading); + explicit VariableMap(bool gotta_love_static_overloading); - virtual ~LocalsMap(); + virtual ~VariableMap(); - Variable* Declare(Scope* scope, Handle<String> name, Variable::Mode mode, - bool is_valid_LHS, Variable::Kind kind); + Variable* Declare(Scope* scope, + Handle<String> name, + Variable::Mode mode, + bool is_valid_lhs, + Variable::Kind kind); Variable* Lookup(Handle<String> name); }; @@ -59,14 +62,14 @@ class LocalsMap: public HashMap { // and setup time for scopes that don't need them. class DynamicScopePart : public ZoneObject { public: - LocalsMap* GetMap(Variable::Mode mode) { + VariableMap* GetMap(Variable::Mode mode) { int index = mode - Variable::DYNAMIC; ASSERT(index >= 0 && index < 3); return &maps_[index]; } private: - LocalsMap maps_[3]; + VariableMap maps_[3]; }; @@ -105,7 +108,7 @@ class Scope: public ZoneObject { // Declarations // Lookup a variable in this scope. Returns the variable or NULL if not found. - virtual Variable* LookupLocal(Handle<String> name); + virtual Variable* LocalLookup(Handle<String> name); // Lookup a variable in this scope or outer scopes. // Returns the variable or NULL if not found. @@ -116,9 +119,15 @@ class Scope: public ZoneObject { // outer scope. Only possible for function scopes; at most one variable. Variable* DeclareFunctionVar(Handle<String> name); - // Declare a variable in this scope. If the variable has been + // Declare a local variable in this scope. If the variable has been // declared before, the previously declared variable is returned. - virtual Variable* Declare(Handle<String> name, Variable::Mode mode); + virtual Variable* DeclareLocal(Handle<String> name, Variable::Mode mode); + + // Declare an implicit global variable in this scope which must be a + // global scope. The variable was introduced (possibly from an inner + // scope) by a reference to an unresolved variable with no intervening + // with statements or eval calls. + Variable* DeclareGlobal(Handle<String> name); // Add a parameter to the parameter list. The parameter must have been // declared via Declare. The same parameter may occur more then once in @@ -288,25 +297,28 @@ class Scope: public ZoneObject { Handle<String> scope_name_; // The variables declared in this scope: - // all user-declared variables (incl. parameters) - LocalsMap locals_; - // compiler-allocated (user-invisible) temporaries + // + // All user-declared variables (incl. parameters). For global scopes + // variables may be implicitly 'declared' by being used (possibly in + // an inner scope) with no intervening with statements or eval calls. + VariableMap variables_; + // Compiler-allocated (user-invisible) temporaries. ZoneList<Variable*> temps_; - // parameter list in source order + // Parameter list in source order. ZoneList<Variable*> params_; - // variables that must be looked up dynamically + // Variables that must be looked up dynamically. DynamicScopePart* dynamics_; - // unresolved variables referred to from this scope + // Unresolved variables referred to from this scope. ZoneList<VariableProxy*> unresolved_; - // declarations + // Declarations. ZoneList<Declaration*> decls_; - // convenience variable + // Convenience variable. VariableProxy* receiver_; - // function variable, if any; function scopes only + // Function variable, if any; function scopes only. Variable* function_; - // convenience variable; function scopes only + // Convenience variable; function scopes only. VariableProxy* arguments_; - // convenience variable; function scopes only + // Convenience variable; function scopes only. VariableProxy* arguments_shadow_; // Illegal redeclaration. diff --git a/V8Binding/v8/src/serialize.cc b/V8Binding/v8/src/serialize.cc index f633b06..592cf5a 100644 --- a/V8Binding/v8/src/serialize.cc +++ b/V8Binding/v8/src/serialize.cc @@ -42,47 +42,44 @@ namespace v8 { namespace internal { -// Encoding: a RelativeAddress must be able to fit in a pointer: -// it is encoded as an Address with (from MS to LS bits): -// 27 bits identifying a word in the space, in one of three formats: -// - MAP and OLD spaces: 16 bits of page number, 11 bits of word offset in page -// - NEW space: 27 bits of word offset -// - LO space: 27 bits of page number -// 3 bits to encode the AllocationSpace (special values for code in LO space) -// 2 bits identifying this as a HeapObject +// 32-bit encoding: a RelativeAddress must be able to fit in a +// pointer: it is encoded as an Address with (from LS to MS bits): +// - 2 bits identifying this as a HeapObject. +// - 4 bits to encode the AllocationSpace (including special values for +// code and fixed arrays in LO space) +// - 27 bits identifying a word in the space, in one of three formats: +// - paged spaces: 16 bits of page number, 11 bits of word offset in page +// - NEW space: 27 bits of word offset +// - LO space: 27 bits of page number const int kSpaceShift = kHeapObjectTagSize; -const int kSpaceBits = kSpaceTagSize; -const int kSpaceMask = kSpaceTagMask; - -// These value are used instead of space numbers when serializing/ -// deserializing. They indicate an object that is in large object space, but -// should be treated specially. -// Make the pages executable on platforms that support it: -const int kLOSpaceExecutable = LAST_SPACE + 1; -// Reserve space for write barrier bits (for objects that can contain -// references to new space): -const int kLOSpacePointer = LAST_SPACE + 2; - +const int kSpaceBits = 4; +const int kSpaceMask = (1 << kSpaceBits) - 1; const int kOffsetShift = kSpaceShift + kSpaceBits; const int kOffsetBits = 11; const int kOffsetMask = (1 << kOffsetBits) - 1; -const int kPageBits = 32 - (kOffsetBits + kSpaceBits + kHeapObjectTagSize); const int kPageShift = kOffsetShift + kOffsetBits; +const int kPageBits = 32 - (kOffsetBits + kSpaceBits + kHeapObjectTagSize); const int kPageMask = (1 << kPageBits) - 1; const int kPageAndOffsetShift = kOffsetShift; const int kPageAndOffsetBits = kPageBits + kOffsetBits; const int kPageAndOffsetMask = (1 << kPageAndOffsetBits) - 1; +// These values are special allocation space tags used for +// serialization. +// Mar the pages executable on platforms that support it. +const int kLargeCode = LAST_SPACE + 1; +// Allocate extra remembered-set bits. +const int kLargeFixedArray = LAST_SPACE + 2; + static inline AllocationSpace GetSpace(Address addr) { const intptr_t encoded = reinterpret_cast<intptr_t>(addr); int space_number = (static_cast<int>(encoded >> kSpaceShift) & kSpaceMask); - if (space_number == kLOSpaceExecutable) space_number = LO_SPACE; - else if (space_number == kLOSpacePointer) space_number = LO_SPACE; + if (space_number > LAST_SPACE) space_number = LO_SPACE; return static_cast<AllocationSpace>(space_number); } @@ -91,7 +88,7 @@ static inline bool IsLargeExecutableObject(Address addr) { const intptr_t encoded = reinterpret_cast<intptr_t>(addr); const int space_number = (static_cast<int>(encoded >> kSpaceShift) & kSpaceMask); - return (space_number == kLOSpaceExecutable); + return (space_number == kLargeCode); } @@ -99,7 +96,7 @@ static inline bool IsLargeFixedArray(Address addr) { const intptr_t encoded = reinterpret_cast<intptr_t>(addr); const int space_number = (static_cast<int>(encoded >> kSpaceShift) & kSpaceMask); - return (space_number == kLOSpacePointer); + return (space_number == kLargeFixedArray); } @@ -147,6 +144,9 @@ class RelativeAddress { int page_index, int page_offset) : space_(space), page_index_(page_index), page_offset_(page_offset) { + // Assert that the space encoding (plus the two pseudo-spaces for + // special large objects) fits in the available bits. + ASSERT(((LAST_SPACE + 2) & ~kSpaceMask) == 0); ASSERT(space <= LAST_SPACE && space >= 0); } @@ -154,8 +154,7 @@ class RelativeAddress { Address Encode() const; AllocationSpace space() const { - if (space_ == kLOSpaceExecutable) return LO_SPACE; - if (space_ == kLOSpacePointer) return LO_SPACE; + if (space_ > LAST_SPACE) return LO_SPACE; return static_cast<AllocationSpace>(space_); } int page_index() const { return page_index_; } @@ -165,7 +164,8 @@ class RelativeAddress { return space_ == CODE_SPACE || space_ == OLD_POINTER_SPACE || space_ == OLD_DATA_SPACE || - space_ == MAP_SPACE; + space_ == MAP_SPACE || + space_ == CELL_SPACE; } void next_address(int offset) { page_offset_ += offset; } @@ -180,11 +180,11 @@ class RelativeAddress { void set_to_large_code_object() { ASSERT(space_ == LO_SPACE); - space_ = kLOSpaceExecutable; + space_ = kLargeCode; } void set_to_large_fixed_array() { ASSERT(space_ == LO_SPACE); - space_ = kLOSpacePointer; + space_ = kLargeFixedArray; } @@ -201,6 +201,7 @@ Address RelativeAddress::Encode() const { int result = 0; switch (space_) { case MAP_SPACE: + case CELL_SPACE: case OLD_POINTER_SPACE: case OLD_DATA_SPACE: case CODE_SPACE: @@ -216,8 +217,8 @@ Address RelativeAddress::Encode() const { result = word_offset << kPageAndOffsetShift; break; case LO_SPACE: - case kLOSpaceExecutable: - case kLOSpacePointer: + case kLargeCode: + case kLargeFixedArray: ASSERT_EQ(0, page_offset_); ASSERT_EQ(0, page_index_ & ~kPageAndOffsetMask); result = page_index_ << kPageAndOffsetShift; @@ -235,6 +236,7 @@ void RelativeAddress::Verify() { ASSERT(page_offset_ >= 0 && page_index_ >= 0); switch (space_) { case MAP_SPACE: + case CELL_SPACE: case OLD_POINTER_SPACE: case OLD_DATA_SPACE: case CODE_SPACE: @@ -245,8 +247,8 @@ void RelativeAddress::Verify() { ASSERT(page_index_ == 0); break; case LO_SPACE: - case kLOSpaceExecutable: - case kLOSpacePointer: + case kLargeCode: + case kLargeFixedArray: ASSERT(page_offset_ == 0); break; } @@ -291,6 +293,7 @@ class SimulatedHeapSpace { void SimulatedHeapSpace::InitEmptyHeap(AllocationSpace space) { switch (space) { case MAP_SPACE: + case CELL_SPACE: case OLD_POINTER_SPACE: case OLD_DATA_SPACE: case CODE_SPACE: @@ -307,12 +310,15 @@ void SimulatedHeapSpace::InitEmptyHeap(AllocationSpace space) { void SimulatedHeapSpace::InitCurrentHeap(AllocationSpace space) { switch (space) { case MAP_SPACE: + case CELL_SPACE: case OLD_POINTER_SPACE: case OLD_DATA_SPACE: case CODE_SPACE: { PagedSpace* ps; if (space == MAP_SPACE) { ps = Heap::map_space(); + } else if (space == CELL_SPACE) { + ps = Heap::cell_space(); } else if (space == OLD_POINTER_SPACE) { ps = Heap::old_pointer_space(); } else if (space == OLD_DATA_SPACE) { @@ -1121,6 +1127,8 @@ void Serializer::PutHeader() { writer_->PutInt(Heap::code_space()->Size() + Heap::new_space()->Size()); writer_->PutC('|'); writer_->PutInt(Heap::map_space()->Size()); + writer_->PutC('|'); + writer_->PutInt(Heap::cell_space()->Size()); writer_->PutC(']'); // Write global handles. writer_->PutC('G'); @@ -1303,6 +1311,7 @@ static const int kInitArraySize = 32; Deserializer::Deserializer(const byte* str, int len) : reader_(str, len), map_pages_(kInitArraySize), + cell_pages_(kInitArraySize), old_pointer_pages_(kInitArraySize), old_data_pages_(kInitArraySize), code_pages_(kInitArraySize), @@ -1475,6 +1484,8 @@ void Deserializer::GetHeader() { InitPagedSpace(Heap::code_space(), reader_.GetInt(), &code_pages_); reader_.ExpectC('|'); InitPagedSpace(Heap::map_space(), reader_.GetInt(), &map_pages_); + reader_.ExpectC('|'); + InitPagedSpace(Heap::cell_space(), reader_.GetInt(), &cell_pages_); reader_.ExpectC(']'); // Create placeholders for global handles later to be fill during // IterateRoots. @@ -1607,6 +1618,9 @@ Object* Deserializer::Resolve(Address encoded) { case MAP_SPACE: return ResolvePaged(PageIndex(encoded), PageOffset(encoded), Heap::map_space(), &map_pages_); + case CELL_SPACE: + return ResolvePaged(PageIndex(encoded), PageOffset(encoded), + Heap::cell_space(), &cell_pages_); case OLD_POINTER_SPACE: return ResolvePaged(PageIndex(encoded), PageOffset(encoded), Heap::old_pointer_space(), &old_pointer_pages_); diff --git a/V8Binding/v8/src/serialize.h b/V8Binding/v8/src/serialize.h index 7f4eb63..1b24065 100644 --- a/V8Binding/v8/src/serialize.h +++ b/V8Binding/v8/src/serialize.h @@ -320,10 +320,11 @@ class Deserializer: public ObjectVisitor { bool has_log_; // The file has log information. // Resolve caches the following: - List<Page*> map_pages_; // All pages in the map space. + List<Page*> map_pages_; // All pages in the map space. + List<Page*> cell_pages_; // All pages in the cell space. List<Page*> old_pointer_pages_; // All pages in the old pointer space. - List<Page*> old_data_pages_; // All pages in the old data space. - List<Page*> code_pages_; + List<Page*> old_data_pages_; // All pages in the old data space. + List<Page*> code_pages_; // All pages in the code space. List<Object*> large_objects_; // All known large objects. // A list of global handles at deserialization time. List<Object**> global_handles_; diff --git a/V8Binding/v8/src/spaces.cc b/V8Binding/v8/src/spaces.cc index 077bcab..2393281 100644 --- a/V8Binding/v8/src/spaces.cc +++ b/V8Binding/v8/src/spaces.cc @@ -37,8 +37,8 @@ namespace internal { // For contiguous spaces, top should be in the space (or at the end) and limit // should be the end of the space. #define ASSERT_SEMISPACE_ALLOCATION_INFO(info, space) \ - ASSERT((space).low() <= (info).top \ - && (info).top <= (space).high() \ + ASSERT((space).low() <= (info).top \ + && (info).top <= (space).high() \ && (info).limit == (space).high()) @@ -786,6 +786,77 @@ void PagedSpace::Print() { } #endif +#ifdef DEBUG +// We do not assume that the PageIterator works, because it depends on the +// invariants we are checking during verification. +void PagedSpace::Verify(ObjectVisitor* visitor) { + // The allocation pointer should be valid, and it should be in a page in the + // space. + ASSERT(allocation_info_.VerifyPagedAllocation()); + Page* top_page = Page::FromAllocationTop(allocation_info_.top); + ASSERT(MemoryAllocator::IsPageInSpace(top_page, this)); + + // Loop over all the pages. + bool above_allocation_top = false; + Page* current_page = first_page_; + while (current_page->is_valid()) { + if (above_allocation_top) { + // We don't care what's above the allocation top. + } else { + // Unless this is the last page in the space containing allocated + // objects, the allocation top should be at a constant offset from the + // object area end. + Address top = current_page->AllocationTop(); + if (current_page == top_page) { + ASSERT(top == allocation_info_.top); + // The next page will be above the allocation top. + above_allocation_top = true; + } else { + ASSERT(top == current_page->ObjectAreaEnd() - page_extra_); + } + + // It should be packed with objects from the bottom to the top. + Address current = current_page->ObjectAreaStart(); + while (current < top) { + HeapObject* object = HeapObject::FromAddress(current); + + // The first word should be a map, and we expect all map pointers to + // be in map space. + Map* map = object->map(); + ASSERT(map->IsMap()); + ASSERT(Heap::map_space()->Contains(map)); + + // Perform space-specific object verification. + VerifyObject(object); + + // The object itself should look OK. + object->Verify(); + + // All the interior pointers should be contained in the heap and + // have their remembered set bits set if required as determined + // by the visitor. + int size = object->Size(); + if (object->IsCode()) { + Code::cast(object)->ConvertICTargetsFromAddressToObject(); + object->IterateBody(map->instance_type(), size, visitor); + Code::cast(object)->ConvertICTargetsFromObjectToAddress(); + } else { + object->IterateBody(map->instance_type(), size, visitor); + } + + current += size; + } + + // The allocation pointer should not be in the middle of an object. + ASSERT(current == top); + } + + current_page = current_page->next_page(); + } +} +#endif + + // ----------------------------------------------------------------------------- // NewSpace implementation @@ -1141,7 +1212,7 @@ static void ReportHistogram(bool print_spill) { // Summarize string types. int string_number = 0; int string_bytes = 0; -#define INCREMENT(type, size, name) \ +#define INCREMENT(type, size, name, camel_name) \ string_number += heap_histograms[type].number(); \ string_bytes += heap_histograms[type].bytes(); STRING_TYPE_LIST(INCREMENT) @@ -1185,8 +1256,8 @@ static void DoReportStatistics(HistogramInfo* info, const char* description) { // Lump all the string types together. int string_number = 0; int string_bytes = 0; -#define INCREMENT(type, size, name) \ - string_number += info[type].number(); \ +#define INCREMENT(type, size, name, camel_name) \ + string_number += info[type].number(); \ string_bytes += info[type].bytes(); STRING_TYPE_LIST(INCREMENT) #undef INCREMENT @@ -1265,13 +1336,13 @@ void FreeListNode::set_size(int size_in_bytes) { // If the block is too small (eg, one or two words), to hold both a size // field and a next pointer, we give it a filler map that gives it the // correct size. - if (size_in_bytes > ByteArray::kHeaderSize) { - set_map(Heap::byte_array_map()); + if (size_in_bytes > ByteArray::kAlignedSize) { + set_map(Heap::raw_unchecked_byte_array_map()); ByteArray::cast(this)->set_length(ByteArray::LengthFor(size_in_bytes)); } else if (size_in_bytes == kPointerSize) { - set_map(Heap::one_word_filler_map()); + set_map(Heap::raw_unchecked_one_pointer_filler_map()); } else if (size_in_bytes == 2 * kPointerSize) { - set_map(Heap::two_word_filler_map()); + set_map(Heap::raw_unchecked_two_pointer_filler_map()); } else { UNREACHABLE(); } @@ -1280,16 +1351,26 @@ void FreeListNode::set_size(int size_in_bytes) { Address FreeListNode::next() { - ASSERT(map() == Heap::byte_array_map()); - ASSERT(Size() >= kNextOffset + kPointerSize); - return Memory::Address_at(address() + kNextOffset); + ASSERT(map() == Heap::raw_unchecked_byte_array_map() || + map() == Heap::raw_unchecked_two_pointer_filler_map()); + if (map() == Heap::raw_unchecked_byte_array_map()) { + ASSERT(Size() >= kNextOffset + kPointerSize); + return Memory::Address_at(address() + kNextOffset); + } else { + return Memory::Address_at(address() + kPointerSize); + } } void FreeListNode::set_next(Address next) { - ASSERT(map() == Heap::byte_array_map()); - ASSERT(Size() >= kNextOffset + kPointerSize); - Memory::Address_at(address() + kNextOffset) = next; + ASSERT(map() == Heap::raw_unchecked_byte_array_map() || + map() == Heap::raw_unchecked_two_pointer_filler_map()); + if (map() == Heap::raw_unchecked_byte_array_map()) { + ASSERT(Size() >= kNextOffset + kPointerSize); + Memory::Address_at(address() + kNextOffset) = next; + } else { + Memory::Address_at(address() + kPointerSize) = next; + } } @@ -1445,42 +1526,42 @@ bool OldSpaceFreeList::Contains(FreeListNode* node) { #endif -MapSpaceFreeList::MapSpaceFreeList(AllocationSpace owner) { - owner_ = owner; +FixedSizeFreeList::FixedSizeFreeList(AllocationSpace owner, int object_size) + : owner_(owner), object_size_(object_size) { Reset(); } -void MapSpaceFreeList::Reset() { +void FixedSizeFreeList::Reset() { available_ = 0; head_ = NULL; } -void MapSpaceFreeList::Free(Address start) { +void FixedSizeFreeList::Free(Address start) { #ifdef DEBUG - for (int i = 0; i < Map::kSize; i += kPointerSize) { + for (int i = 0; i < object_size_; i += kPointerSize) { Memory::Address_at(start + i) = kZapValue; } #endif ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep. FreeListNode* node = FreeListNode::FromAddress(start); - node->set_size(Map::kSize); + node->set_size(object_size_); node->set_next(head_); head_ = node->address(); - available_ += Map::kSize; + available_ += object_size_; } -Object* MapSpaceFreeList::Allocate() { +Object* FixedSizeFreeList::Allocate() { if (head_ == NULL) { - return Failure::RetryAfterGC(Map::kSize, owner_); + return Failure::RetryAfterGC(object_size_, owner_); } ASSERT(!FLAG_always_compact); // We only use the freelists with mark-sweep. FreeListNode* node = FreeListNode::FromAddress(head_); head_ = node->next(); - available_ -= Map::kSize; + available_ -= object_size_; return node; } @@ -1494,7 +1575,6 @@ void OldSpace::PrepareForMarkCompact(bool will_compact) { // the space is considered 'available' and we will rediscover live data // and waste during the collection. MCResetRelocationInfo(); - mc_end_of_relocation_ = bottom(); ASSERT(Available() == Capacity()); } else { // During a non-compacting collection, everything below the linear @@ -1510,24 +1590,6 @@ void OldSpace::PrepareForMarkCompact(bool will_compact) { } -void OldSpace::MCAdjustRelocationEnd(Address address, int size_in_bytes) { - ASSERT(Contains(address)); - Address current_top = mc_end_of_relocation_; - Page* current_page = Page::FromAllocationTop(current_top); - - // No more objects relocated to this page? Move to the next. - ASSERT(current_top <= current_page->mc_relocation_top); - if (current_top == current_page->mc_relocation_top) { - // The space should already be properly expanded. - Page* next_page = current_page->next_page(); - CHECK(next_page->is_valid()); - mc_end_of_relocation_ = next_page->ObjectAreaStart(); - } - ASSERT(mc_end_of_relocation_ == address); - mc_end_of_relocation_ += size_in_bytes; -} - - void OldSpace::MCCommitRelocationInfo() { // Update fast allocation info. allocation_info_.top = mc_forwarding_info_.top; @@ -1624,76 +1686,6 @@ HeapObject* OldSpace::AllocateInNextPage(Page* current_page, #ifdef DEBUG -// We do not assume that the PageIterator works, because it depends on the -// invariants we are checking during verification. -void OldSpace::Verify() { - // The allocation pointer should be valid, and it should be in a page in the - // space. - ASSERT(allocation_info_.VerifyPagedAllocation()); - Page* top_page = Page::FromAllocationTop(allocation_info_.top); - ASSERT(MemoryAllocator::IsPageInSpace(top_page, this)); - - // Loop over all the pages. - bool above_allocation_top = false; - Page* current_page = first_page_; - while (current_page->is_valid()) { - if (above_allocation_top) { - // We don't care what's above the allocation top. - } else { - // Unless this is the last page in the space containing allocated - // objects, the allocation top should be at the object area end. - Address top = current_page->AllocationTop(); - if (current_page == top_page) { - ASSERT(top == allocation_info_.top); - // The next page will be above the allocation top. - above_allocation_top = true; - } else { - ASSERT(top == current_page->ObjectAreaEnd()); - } - - // It should be packed with objects from the bottom to the top. - Address current = current_page->ObjectAreaStart(); - while (current < top) { - HeapObject* object = HeapObject::FromAddress(current); - - // The first word should be a map, and we expect all map pointers to - // be in map space. - Map* map = object->map(); - ASSERT(map->IsMap()); - ASSERT(Heap::map_space()->Contains(map)); - - // The object should not be a map. - ASSERT(!object->IsMap()); - - // The object itself should look OK. - object->Verify(); - - // All the interior pointers should be contained in the heap and have - // their remembered set bits set if they point to new space. Code - // objects do not have remembered set bits that we care about. - VerifyPointersAndRSetVisitor rset_visitor; - VerifyPointersVisitor no_rset_visitor; - int size = object->Size(); - if (object->IsCode()) { - Code::cast(object)->ConvertICTargetsFromAddressToObject(); - object->IterateBody(map->instance_type(), size, &no_rset_visitor); - Code::cast(object)->ConvertICTargetsFromObjectToAddress(); - } else { - object->IterateBody(map->instance_type(), size, &rset_visitor); - } - - current += size; - } - - // The allocation pointer should not be in the middle of an object. - ASSERT(current == top); - } - - current_page = current_page->next_page(); - } -} - - struct CommentStatistic { const char* comment; int size; @@ -1856,7 +1848,7 @@ void OldSpace::ReportStatistics() { int bitpos = intoff*kBitsPerByte + bitoff; Address slot = p->OffsetToAddress(bitpos << kObjectAlignmentBits); Object** obj = reinterpret_cast<Object**>(slot); - if (*obj == Heap::fixed_array_map()) { + if (*obj == Heap::raw_unchecked_fixed_array_map()) { rset_marked_arrays++; FixedArray* fa = FixedArray::cast(HeapObject::FromAddress(slot)); @@ -1987,25 +1979,13 @@ void OldSpace::PrintRSet() { DoPrintRSet("old"); } #endif // ----------------------------------------------------------------------------- -// MapSpace implementation +// FixedSpace implementation -void MapSpace::PrepareForMarkCompact(bool will_compact) { +void FixedSpace::PrepareForMarkCompact(bool will_compact) { if (will_compact) { // Reset relocation info. MCResetRelocationInfo(); - // Initialize map index entry. - int page_count = 0; - PageIterator it(this, PageIterator::ALL_PAGES); - while (it.has_next()) { - ASSERT_MAP_PAGE_INDEX(page_count); - - Page* p = it.next(); - ASSERT(p->mc_page_index == page_count); - - page_addresses_[page_count++] = p->address(); - } - // During a compacting collection, everything in the space is considered // 'available' (set by the call to MCResetRelocationInfo) and we will // rediscover live and wasted bytes during the collection. @@ -2023,7 +2003,7 @@ void MapSpace::PrepareForMarkCompact(bool will_compact) { } -void MapSpace::MCCommitRelocationInfo() { +void FixedSpace::MCCommitRelocationInfo() { // Update fast allocation info. allocation_info_.top = mc_forwarding_info_.top; allocation_info_.limit = mc_forwarding_info_.limit; @@ -2053,7 +2033,8 @@ void MapSpace::MCCommitRelocationInfo() { // Slow case for normal allocation. Try in order: (1) allocate in the next // page in the space, (2) allocate off the space's free list, (3) expand the // space, (4) fail. -HeapObject* MapSpace::SlowAllocateRaw(int size_in_bytes) { +HeapObject* FixedSpace::SlowAllocateRaw(int size_in_bytes) { + ASSERT_EQ(object_size_in_bytes_, size_in_bytes); // Linear allocation in this space has failed. If there is another page // in the space, move to that page and allocate there. This allocation // should succeed. @@ -2062,10 +2043,10 @@ HeapObject* MapSpace::SlowAllocateRaw(int size_in_bytes) { return AllocateInNextPage(current_page, size_in_bytes); } - // There is no next page in this space. Try free list allocation. The - // map space free list implicitly assumes that all free blocks are map - // sized. - if (size_in_bytes == Map::kSize) { + // There is no next page in this space. Try free list allocation. + // The fixed space free list implicitly assumes that all free blocks + // are of the fixed size. + if (size_in_bytes == object_size_in_bytes_) { Object* result = free_list_.Allocate(); if (!result->IsFailure()) { accounting_stats_.AllocateBytes(size_in_bytes); @@ -2094,81 +2075,19 @@ HeapObject* MapSpace::SlowAllocateRaw(int size_in_bytes) { // Move to the next page (there is assumed to be one) and allocate there. // The top of page block is always wasted, because it is too small to hold a // map. -HeapObject* MapSpace::AllocateInNextPage(Page* current_page, - int size_in_bytes) { +HeapObject* FixedSpace::AllocateInNextPage(Page* current_page, + int size_in_bytes) { ASSERT(current_page->next_page()->is_valid()); - ASSERT(current_page->ObjectAreaEnd() - allocation_info_.top == kPageExtra); - accounting_stats_.WasteBytes(kPageExtra); + ASSERT(current_page->ObjectAreaEnd() - allocation_info_.top == page_extra_); + ASSERT_EQ(object_size_in_bytes_, size_in_bytes); + accounting_stats_.WasteBytes(page_extra_); SetAllocationInfo(&allocation_info_, current_page->next_page()); return AllocateLinearly(&allocation_info_, size_in_bytes); } #ifdef DEBUG -// We do not assume that the PageIterator works, because it depends on the -// invariants we are checking during verification. -void MapSpace::Verify() { - // The allocation pointer should be valid, and it should be in a page in the - // space. - ASSERT(allocation_info_.VerifyPagedAllocation()); - Page* top_page = Page::FromAllocationTop(allocation_info_.top); - ASSERT(MemoryAllocator::IsPageInSpace(top_page, this)); - - // Loop over all the pages. - bool above_allocation_top = false; - Page* current_page = first_page_; - while (current_page->is_valid()) { - if (above_allocation_top) { - // We don't care what's above the allocation top. - } else { - // Unless this is the last page in the space containing allocated - // objects, the allocation top should be at a constant offset from the - // object area end. - Address top = current_page->AllocationTop(); - if (current_page == top_page) { - ASSERT(top == allocation_info_.top); - // The next page will be above the allocation top. - above_allocation_top = true; - } else { - ASSERT(top == current_page->ObjectAreaEnd() - kPageExtra); - } - - // It should be packed with objects from the bottom to the top. - Address current = current_page->ObjectAreaStart(); - while (current < top) { - HeapObject* object = HeapObject::FromAddress(current); - - // The first word should be a map, and we expect all map pointers to - // be in map space. - Map* map = object->map(); - ASSERT(map->IsMap()); - ASSERT(Heap::map_space()->Contains(map)); - - // The object should be a map or a byte array. - ASSERT(object->IsMap() || object->IsByteArray()); - - // The object itself should look OK. - object->Verify(); - - // All the interior pointers should be contained in the heap and - // have their remembered set bits set if they point to new space. - VerifyPointersAndRSetVisitor visitor; - int size = object->Size(); - object->IterateBody(map->instance_type(), size, &visitor); - - current += size; - } - - // The allocation pointer should not be in the middle of an object. - ASSERT(current == top); - } - - current_page = current_page->next_page(); - } -} - - -void MapSpace::ReportStatistics() { +void FixedSpace::ReportStatistics() { int pct = Available() * 100 / Capacity(); PrintF(" capacity: %d, waste: %d, available: %d, %%%d\n", Capacity(), Waste(), Available(), pct); @@ -2215,7 +2134,50 @@ void MapSpace::ReportStatistics() { } -void MapSpace::PrintRSet() { DoPrintRSet("map"); } +void FixedSpace::PrintRSet() { DoPrintRSet(name_); } +#endif + + +// ----------------------------------------------------------------------------- +// MapSpace implementation + +void MapSpace::PrepareForMarkCompact(bool will_compact) { + // Call prepare of the super class. + FixedSpace::PrepareForMarkCompact(will_compact); + + if (will_compact) { + // Initialize map index entry. + int page_count = 0; + PageIterator it(this, PageIterator::ALL_PAGES); + while (it.has_next()) { + ASSERT_MAP_PAGE_INDEX(page_count); + + Page* p = it.next(); + ASSERT(p->mc_page_index == page_count); + + page_addresses_[page_count++] = p->address(); + } + } +} + + +#ifdef DEBUG +void MapSpace::VerifyObject(HeapObject* object) { + // The object should be a map or a free-list node. + ASSERT(object->IsMap() || object->IsByteArray()); +} +#endif + + +// ----------------------------------------------------------------------------- +// GlobalPropertyCellSpace implementation + +#ifdef DEBUG +void CellSpace::VerifyObject(HeapObject* object) { + // The object should be a global object property cell or a free-list node. + ASSERT(object->IsJSGlobalPropertyCell() || + object->map() == Heap::two_pointer_filler_map()); +} #endif diff --git a/V8Binding/v8/src/spaces.h b/V8Binding/v8/src/spaces.h index 676652b..3352d30 100644 --- a/V8Binding/v8/src/spaces.h +++ b/V8Binding/v8/src/spaces.h @@ -302,7 +302,6 @@ class Space : public Malloced { virtual int Size() = 0; #ifdef DEBUG - virtual void Verify() = 0; virtual void Print() = 0; #endif @@ -394,6 +393,9 @@ class MemoryAllocator : public AllStatic { // Returns the maximum available bytes of heaps. static int Available() { return capacity_ < size_ ? 0 : capacity_ - size_; } + // Returns allocated spaces in bytes. + static int Size() { return size_; } + // Returns maximum available bytes that the old space can have. static int MaxAvailable() { return (Available() / Page::kPageSize) * Page::kObjectAreaSize; @@ -836,6 +838,13 @@ class PagedSpace : public Space { // Print meta info and objects in this space. virtual void Print(); + // Verify integrity of this space. + virtual void Verify(ObjectVisitor* visitor); + + // Overridden by subclasses to verify space-specific object + // properties (e.g., only maps or free-list nodes are in map space). + virtual void VerifyObject(HeapObject* obj) {} + // Report code object related statistics void CollectCodeStatistics(); static void ReportCodeStatistics(); @@ -862,6 +871,12 @@ class PagedSpace : public Space { // Relocation information during mark-compact collections. AllocationInfo mc_forwarding_info_; + // Bytes of each page that cannot be allocated. Possibly non-zero + // for pages in spaces with only fixed-size objects. Always zero + // for pages in spaces with variable sized objects (those pages are + // padded with free-list nodes). + int page_extra_; + // Sets allocation pointer to a page bottom. static void SetAllocationInfo(AllocationInfo* alloc_info, Page* p); @@ -1315,8 +1330,7 @@ class OldSpaceFreeList BASE_EMBEDDED { private: // The size range of blocks, in bytes. (Smaller allocations are allowed, but // will always result in waste.) - static const int kMinBlockSize = - POINTER_SIZE_ALIGN(ByteArray::kHeaderSize) + kPointerSize; + static const int kMinBlockSize = 2 * kPointerSize; static const int kMaxBlockSize = Page::kMaxHeapObjectSize; // The identity of the owning space, for building allocation Failure @@ -1391,9 +1405,9 @@ class OldSpaceFreeList BASE_EMBEDDED { // The free list for the map space. -class MapSpaceFreeList BASE_EMBEDDED { +class FixedSizeFreeList BASE_EMBEDDED { public: - explicit MapSpaceFreeList(AllocationSpace owner); + FixedSizeFreeList(AllocationSpace owner, int object_size); // Clear the free list. void Reset(); @@ -1402,12 +1416,12 @@ class MapSpaceFreeList BASE_EMBEDDED { int available() { return available_; } // Place a node on the free list. The block starting at 'start' (assumed to - // have size Map::kSize) is placed on the free list. Bookkeeping + // have size object_size_) is placed on the free list. Bookkeeping // information will be written to the block, ie, its contents will be // destroyed. The start address should be word aligned. void Free(Address start); - // Allocate a map-sized block from the free list. The block is unitialized. + // Allocate a fixed sized block from the free list. The block is unitialized. // A failure is returned if no block is available. Object* Allocate(); @@ -1422,7 +1436,10 @@ class MapSpaceFreeList BASE_EMBEDDED { // objects. AllocationSpace owner_; - DISALLOW_COPY_AND_ASSIGN(MapSpaceFreeList); + // The size of the objects in this space. + int object_size_; + + DISALLOW_COPY_AND_ASSIGN(FixedSizeFreeList); }; @@ -1437,6 +1454,7 @@ class OldSpace : public PagedSpace { AllocationSpace id, Executability executable) : PagedSpace(max_capacity, id, executable), free_list_(id) { + page_extra_ = 0; } // The bytes available on the free list (ie, not above the linear allocation @@ -1460,20 +1478,11 @@ class OldSpace : public PagedSpace { // clears the free list. virtual void PrepareForMarkCompact(bool will_compact); - // Adjust the top of relocation pointer to point to the end of the object - // given by 'address' and 'size_in_bytes'. Move it to the next page if - // necessary, ensure that it points to the address, then increment it by the - // size. - void MCAdjustRelocationEnd(Address address, int size_in_bytes); - // Updates the allocation pointer to the relocation top after a mark-compact // collection. virtual void MCCommitRelocationInfo(); #ifdef DEBUG - // Verify integrity of this space. - virtual void Verify(); - // Reports statistics for the space void ReportStatistics(); // Dump the remembered sets in the space to stdout. @@ -1492,39 +1501,41 @@ class OldSpace : public PagedSpace { // The space's free list. OldSpaceFreeList free_list_; - // During relocation, we keep a pointer to the most recently relocated - // object in order to know when to move to the next page. - Address mc_end_of_relocation_; - public: TRACK_MEMORY("OldSpace") }; // ----------------------------------------------------------------------------- -// Old space for all map objects +// Old space for objects of a fixed size -class MapSpace : public PagedSpace { +class FixedSpace : public PagedSpace { public: - // Creates a map space object with a maximum capacity. - explicit MapSpace(int max_capacity, AllocationSpace id) - : PagedSpace(max_capacity, id, NOT_EXECUTABLE), free_list_(id) { } + FixedSpace(int max_capacity, + AllocationSpace id, + int object_size_in_bytes, + const char* name) + : PagedSpace(max_capacity, id, NOT_EXECUTABLE), + object_size_in_bytes_(object_size_in_bytes), + name_(name), + free_list_(id, object_size_in_bytes) { + page_extra_ = Page::kObjectAreaSize % object_size_in_bytes; + } // The top of allocation in a page in this space. Undefined if page is unused. virtual Address PageAllocationTop(Page* page) { return page == TopPageOf(allocation_info_) ? top() - : page->ObjectAreaEnd() - kPageExtra; + : page->ObjectAreaEnd() - page_extra_; } - // Give a map-sized block of memory to the space's free list. + int object_size_in_bytes() { return object_size_in_bytes_; } + + // Give a fixed sized block of memory to the space's free list. void Free(Address start) { free_list_.Free(start); accounting_stats_.DeallocateBytes(Map::kSize); } - // Given an index, returns the page address. - Address PageAddress(int page_index) { return page_addresses_[page_index]; } - // Prepares for a mark-compact GC. virtual void PrepareForMarkCompact(bool will_compact); @@ -1533,21 +1544,13 @@ class MapSpace : public PagedSpace { virtual void MCCommitRelocationInfo(); #ifdef DEBUG - // Verify integrity of this space. - virtual void Verify(); - // Reports statistic info of the space void ReportStatistics(); + // Dump the remembered sets in the space to stdout. void PrintRSet(); #endif - // Constants. - static const int kMapPageIndexBits = 10; - static const int kMaxMapPageIndex = (1 << kMapPageIndexBits) - 1; - - static const int kPageExtra = Page::kObjectAreaSize % Map::kSize; - protected: // Virtual function in the superclass. Slow path of AllocateRaw. HeapObject* SlowAllocateRaw(int size_in_bytes); @@ -1557,9 +1560,41 @@ class MapSpace : public PagedSpace { HeapObject* AllocateInNextPage(Page* current_page, int size_in_bytes); private: + // The size of objects in this space. + int object_size_in_bytes_; + + // The name of this space. + const char* name_; + // The space's free list. - MapSpaceFreeList free_list_; + FixedSizeFreeList free_list_; +}; + + +// ----------------------------------------------------------------------------- +// Old space for all map objects + +class MapSpace : public FixedSpace { + public: + // Creates a map space object with a maximum capacity. + MapSpace(int max_capacity, AllocationSpace id) + : FixedSpace(max_capacity, id, Map::kSize, "map") {} + + // Prepares for a mark-compact GC. + virtual void PrepareForMarkCompact(bool will_compact); + // Given an index, returns the page address. + Address PageAddress(int page_index) { return page_addresses_[page_index]; } + + // Constants. + static const int kMaxMapPageIndex = (1 << MapWord::kMapPageIndexBits) - 1; + + protected: +#ifdef DEBUG + virtual void VerifyObject(HeapObject* obj); +#endif + + private: // An array of page start address in a map space. Address page_addresses_[kMaxMapPageIndex + 1]; @@ -1569,6 +1604,25 @@ class MapSpace : public PagedSpace { // ----------------------------------------------------------------------------- +// Old space for all global object property cell objects + +class CellSpace : public FixedSpace { + public: + // Creates a property cell space object with a maximum capacity. + CellSpace(int max_capacity, AllocationSpace id) + : FixedSpace(max_capacity, id, JSGlobalPropertyCell::kSize, "cell") {} + + protected: +#ifdef DEBUG + virtual void VerifyObject(HeapObject* obj); +#endif + + public: + TRACK_MEMORY("MapSpace") +}; + + +// ----------------------------------------------------------------------------- // Large objects ( > Page::kMaxHeapObjectSize ) are allocated and managed by // the large object space. A large object is allocated from OS heap with // extra padding bytes (Page::kPageSize + Page::kObjectStartOffset). diff --git a/V8Binding/v8/src/string-stream.cc b/V8Binding/v8/src/string-stream.cc index 44ba297..9a137e3 100644 --- a/V8Binding/v8/src/string-stream.cc +++ b/V8Binding/v8/src/string-stream.cc @@ -343,10 +343,11 @@ void StringStream::PrintUsingMap(JSObject* js_object) { Add("<Invalid map>\n"); return; } - for (DescriptorReader r(map->instance_descriptors()); !r.eos(); r.advance()) { - switch (r.type()) { + DescriptorArray* descs = map->instance_descriptors(); + for (int i = 0; i < descs->number_of_descriptors(); i++) { + switch (descs->GetType(i)) { case FIELD: { - Object* key = r.GetKey(); + Object* key = descs->GetKey(i); if (key->IsString() || key->IsNumber()) { int len = 3; if (key->IsString()) { @@ -360,7 +361,7 @@ void StringStream::PrintUsingMap(JSObject* js_object) { key->ShortPrint(); } Add(": "); - Object* value = js_object->FastPropertyAt(r.GetFieldIndex()); + Object* value = js_object->FastPropertyAt(descs->GetFieldIndex(i)); Add("%o\n", value); } } diff --git a/V8Binding/v8/src/stub-cache.cc b/V8Binding/v8/src/stub-cache.cc index 49b20e2..7eb8cd3 100644 --- a/V8Binding/v8/src/stub-cache.cc +++ b/V8Binding/v8/src/stub-cache.cc @@ -173,14 +173,19 @@ Object* StubCache::ComputeLoadNormal(String* name, JSObject* receiver) { Object* StubCache::ComputeLoadGlobal(String* name, - GlobalObject* receiver, + JSObject* receiver, + GlobalObject* holder, JSGlobalPropertyCell* cell, bool is_dont_delete) { Code::Flags flags = Code::ComputeMonomorphicFlags(Code::LOAD_IC, NORMAL); Object* code = receiver->map()->FindInCodeCache(name, flags); if (code->IsUndefined()) { LoadStubCompiler compiler; - code = compiler.CompileLoadGlobal(receiver, cell, name, is_dont_delete); + code = compiler.CompileLoadGlobal(receiver, + holder, + cell, + name, + is_dont_delete); if (code->IsFailure()) return code; LOG(CodeCreateEvent(Logger::LOAD_IC_TAG, Code::cast(code), name)); Object* result = receiver->map()->UpdateCodeCache(name, Code::cast(code)); @@ -445,7 +450,7 @@ Object* StubCache::ComputeCallConstant(int argc, if (!function->is_compiled()) return Failure::InternalError(); // Compile the stub - only create stubs for fully compiled functions. CallStubCompiler compiler(argc, in_loop); - code = compiler.CompileCallConstant(object, holder, function, check); + code = compiler.CompileCallConstant(object, holder, function, name, check); if (code->IsFailure()) return code; ASSERT_EQ(flags, Code::cast(code)->flags()); LOG(CodeCreateEvent(Logger::CALL_IC_TAG, Code::cast(code), name)); @@ -537,7 +542,8 @@ Object* StubCache::ComputeCallNormal(int argc, Object* StubCache::ComputeCallGlobal(int argc, InLoopFlag in_loop, String* name, - GlobalObject* receiver, + JSObject* receiver, + GlobalObject* holder, JSGlobalPropertyCell* cell, JSFunction* function) { Code::Flags flags = @@ -550,7 +556,7 @@ Object* StubCache::ComputeCallGlobal(int argc, // caches. if (!function->is_compiled()) return Failure::InternalError(); CallStubCompiler compiler(argc, in_loop); - code = compiler.CompileCallGlobal(receiver, cell, function, name); + code = compiler.CompileCallGlobal(receiver, holder, cell, function, name); if (code->IsFailure()) return code; ASSERT_EQ(flags, Code::cast(code)->flags()); LOG(CodeCreateEvent(Logger::CALL_IC_TAG, Code::cast(code), name)); @@ -562,10 +568,11 @@ Object* StubCache::ComputeCallGlobal(int argc, static Object* GetProbeValue(Code::Flags flags) { - NumberDictionary* dictionary = Heap::non_monomorphic_cache(); + // Use raw_unchecked... so we don't get assert failures during GC. + NumberDictionary* dictionary = Heap::raw_unchecked_non_monomorphic_cache(); int entry = dictionary->FindEntry(flags); if (entry != -1) return dictionary->ValueAt(entry); - return Heap::undefined_value(); + return Heap::raw_unchecked_undefined_value(); } @@ -579,7 +586,7 @@ static Object* ProbeCache(Code::Flags flags) { Heap::non_monomorphic_cache()->AtNumberPut(flags, Heap::undefined_value()); if (result->IsFailure()) return result; - Heap::set_non_monomorphic_cache(NumberDictionary::cast(result)); + Heap::public_set_non_monomorphic_cache(NumberDictionary::cast(result)); return probe; } @@ -950,6 +957,10 @@ Object* StubCompiler::CompileCallDebugPrepareStepIn(Code::Flags flags) { Object* StubCompiler::GetCodeWithFlags(Code::Flags flags, const char* name) { + // Check for allocation failures during stub compilation. + if (failure_->IsFailure()) return failure_; + + // Create code object in the heap. CodeDesc desc; masm_.GetCode(&desc); Object* result = Heap::CreateCode(desc, NULL, flags, masm_.CodeObject()); diff --git a/V8Binding/v8/src/stub-cache.h b/V8Binding/v8/src/stub-cache.h index 9abf370..8bee370 100644 --- a/V8Binding/v8/src/stub-cache.h +++ b/V8Binding/v8/src/stub-cache.h @@ -79,7 +79,8 @@ class StubCache : public AllStatic { static Object* ComputeLoadGlobal(String* name, - GlobalObject* receiver, + JSObject* receiver, + GlobalObject* holder, JSGlobalPropertyCell* cell, bool is_dont_delete); @@ -164,7 +165,8 @@ class StubCache : public AllStatic { static Object* ComputeCallGlobal(int argc, InLoopFlag in_loop, String* name, - GlobalObject* receiver, + JSObject* receiver, + GlobalObject* holder, JSGlobalPropertyCell* cell, JSFunction* function); @@ -322,7 +324,7 @@ class StubCompiler BASE_EMBEDDED { JSARRAY_HAS_FAST_ELEMENTS_CHECK }; - StubCompiler() : scope_(), masm_(NULL, 256) { } + StubCompiler() : scope_(), masm_(NULL, 256), failure_(NULL) { } Object* CompileCallInitialize(Code::Flags flags); Object* CompileCallPreMonomorphic(Code::Flags flags); @@ -342,40 +344,7 @@ class StubCompiler BASE_EMBEDDED { static void GenerateFastPropertyLoad(MacroAssembler* masm, Register dst, Register src, JSObject* holder, int index); - static void GenerateLoadField(MacroAssembler* masm, - JSObject* object, - JSObject* holder, - Register receiver, - Register scratch1, - Register scratch2, - int index, - Label* miss_label); - static void GenerateLoadCallback(MacroAssembler* masm, - JSObject* object, - JSObject* holder, - Register receiver, - Register name, - Register scratch1, - Register scratch2, - AccessorInfo* callback, - Label* miss_label); - static void GenerateLoadConstant(MacroAssembler* masm, - JSObject* object, - JSObject* holder, - Register receiver, - Register scratch1, - Register scratch2, - Object* value, - Label* miss_label); - static void GenerateLoadInterceptor(MacroAssembler* masm, - JSObject* object, - JSObject* holder, - Smi* lookup_hint, - Register receiver, - Register name, - Register scratch1, - Register scratch2, - Label* miss_label); + static void GenerateLoadArrayLength(MacroAssembler* masm, Register receiver, Register scratch, @@ -410,10 +379,60 @@ class StubCompiler BASE_EMBEDDED { Object* GetCodeWithFlags(Code::Flags flags, String* name); MacroAssembler* masm() { return &masm_; } + void set_failure(Failure* failure) { failure_ = failure; } + + // Check the integrity of the prototype chain to make sure that the + // current IC is still valid. + Register CheckPrototypes(JSObject* object, + Register object_reg, + JSObject* holder, + Register holder_reg, + Register scratch, + String* name, + Label* miss); + + void GenerateLoadField(JSObject* object, + JSObject* holder, + Register receiver, + Register scratch1, + Register scratch2, + int index, + String* name, + Label* miss); + + void GenerateLoadCallback(JSObject* object, + JSObject* holder, + Register receiver, + Register name_reg, + Register scratch1, + Register scratch2, + AccessorInfo* callback, + String* name, + Label* miss); + + void GenerateLoadConstant(JSObject* object, + JSObject* holder, + Register receiver, + Register scratch1, + Register scratch2, + Object* value, + String* name, + Label* miss); + + void GenerateLoadInterceptor(JSObject* object, + JSObject* holder, + Smi* lookup_hint, + Register receiver, + Register name_reg, + Register scratch1, + Register scratch2, + String* name, + Label* miss); private: HandleScope scope_; MacroAssembler masm_; + Failure* failure_; }; @@ -435,8 +454,9 @@ class LoadStubCompiler: public StubCompiler { JSObject* holder, String* name); - Object* CompileLoadGlobal(GlobalObject* object, - JSGlobalPropertyCell* holder, + Object* CompileLoadGlobal(JSObject* object, + GlobalObject* holder, + JSGlobalPropertyCell* cell, String* name, bool is_dont_delete); @@ -515,11 +535,13 @@ class CallStubCompiler: public StubCompiler { Object* CompileCallConstant(Object* object, JSObject* holder, JSFunction* function, + String* name, CheckType check); Object* CompileCallInterceptor(Object* object, JSObject* holder, String* name); - Object* CompileCallGlobal(GlobalObject* object, + Object* CompileCallGlobal(JSObject* object, + GlobalObject* holder, JSGlobalPropertyCell* cell, JSFunction* function, String* name); diff --git a/V8Binding/v8/src/unicode.cc b/V8Binding/v8/src/unicode.cc index 4a9e070..ef13593 100644 --- a/V8Binding/v8/src/unicode.cc +++ b/V8Binding/v8/src/unicode.cc @@ -194,18 +194,13 @@ static int LookupMapping(const int32_t* table, uchar Utf8::CalculateValue(const byte* str, unsigned length, unsigned* cursor) { - static const uchar kMaxOneByteChar = 0x7F; - static const uchar kMaxTwoByteChar = 0x7FF; - static const uchar kMaxThreeByteChar = 0xFFFF; - static const uchar kMaxFourByteChar = 0x1FFFFF; - // We only get called for non-ascii characters. if (length == 1) { *cursor += 1; return kBadChar; } - int first = str[0]; - int second = str[1] ^ 0x80; + byte first = str[0]; + byte second = str[1] ^ 0x80; if (second & 0xC0) { *cursor += 1; return kBadChar; @@ -227,7 +222,7 @@ uchar Utf8::CalculateValue(const byte* str, *cursor += 1; return kBadChar; } - int third = str[2] ^ 0x80; + byte third = str[2] ^ 0x80; if (third & 0xC0) { *cursor += 1; return kBadChar; @@ -245,7 +240,7 @@ uchar Utf8::CalculateValue(const byte* str, *cursor += 1; return kBadChar; } - int fourth = str[3] ^ 0x80; + byte fourth = str[3] ^ 0x80; if (fourth & 0xC0) { *cursor += 1; return kBadChar; diff --git a/V8Binding/v8/src/variables.h b/V8Binding/v8/src/variables.h index c0d1435..3f1f11b 100644 --- a/V8Binding/v8/src/variables.h +++ b/V8Binding/v8/src/variables.h @@ -143,6 +143,12 @@ class Variable: public ZoneObject { ARGUMENTS }; + Variable(Scope* scope, + Handle<String> name, + Mode mode, + bool is_valid_lhs, + Kind kind); + // Printing support static const char* Mode2String(Mode mode); @@ -196,9 +202,6 @@ class Variable: public ZoneObject { SmiAnalysis* type() { return &type_; } private: - Variable(Scope* scope, Handle<String> name, Mode mode, bool is_valid_LHS, - Kind kind); - Scope* scope_; Handle<String> name_; Mode mode_; @@ -216,13 +219,10 @@ class Variable: public ZoneObject { SmiAnalysis type_; // Code generation. - // rewrite_ is usually a Slot or a Property, but maybe any expression. + // rewrite_ is usually a Slot or a Property, but may be any expression. Expression* rewrite_; - friend class VariableProxy; - friend class Scope; - friend class LocalsMap; - friend class AstBuildingParser; + friend class Scope; // Has explicit access to rewrite_. }; diff --git a/V8Binding/v8/src/version.cc b/V8Binding/v8/src/version.cc index 7e009fb..48e43de 100644 --- a/V8Binding/v8/src/version.cc +++ b/V8Binding/v8/src/version.cc @@ -33,10 +33,10 @@ // NOTE these macros are used by the SCons build script so their names // cannot be changed without changing the SCons build script. #define MAJOR_VERSION 1 -#define MINOR_VERSION 2 -#define BUILD_NUMBER 12 +#define MINOR_VERSION 3 +#define BUILD_NUMBER 0 #define PATCH_LEVEL 0 -#define CANDIDATE_VERSION false +#define CANDIDATE_VERSION true // Define SONAME to have the SCons build the put a specific SONAME into the // shared library instead the generic SONAME generated from the V8 version diff --git a/V8Binding/v8/src/x64/assembler-x64.cc b/V8Binding/v8/src/x64/assembler-x64.cc index 2ccfd15..c4ee454 100644 --- a/V8Binding/v8/src/x64/assembler-x64.cc +++ b/V8Binding/v8/src/x64/assembler-x64.cc @@ -73,45 +73,8 @@ XMMRegister xmm14 = { 14 }; XMMRegister xmm15 = { 15 }; -Operand::Operand(Register base, int32_t disp): rex_(0) { - len_ = 1; - if (base.is(rsp) || base.is(r12)) { - // SIB byte is needed to encode (rsp + offset) or (r12 + offset). - set_sib(times_1, rsp, base); - } - - if (disp == 0 && !base.is(rbp) && !base.is(r13)) { - set_modrm(0, base); - } else if (is_int8(disp)) { - set_modrm(1, base); - set_disp8(disp); - } else { - set_modrm(2, base); - set_disp32(disp); - } -} - - -Operand::Operand(Register base, - Register index, - ScaleFactor scale, - int32_t disp): rex_(0) { - ASSERT(!index.is(rsp)); - len_ = 1; - set_sib(scale, index, base); - if (disp == 0 && !base.is(rbp) && !base.is(r13)) { - // This call to set_modrm doesn't overwrite the REX.B (or REX.X) bits - // possibly set by set_sib. - set_modrm(0, rsp); - } else if (is_int8(disp)) { - set_modrm(1, rsp); - set_disp8(disp); - } else { - set_modrm(2, rsp); - set_disp32(disp); - } -} - +// ----------------------------------------------------------------------------- +// Implementation of CpuFeatures // The required user mode extensions in X64 are (from AMD64 ABI Table A.1): // fpu, tsc, cx8, cmov, mmx, sse, sse2, fxsr, syscall @@ -193,6 +156,71 @@ void CpuFeatures::Probe() { ASSERT(IsSupported(CMOV)); } + +// ----------------------------------------------------------------------------- +// Implementation of RelocInfo + +// Patch the code at the current PC with a call to the target address. +// Additional guard int3 instructions can be added if required. +void RelocInfo::PatchCodeWithCall(Address target, int guard_bytes) { + // Call instruction takes up 13 bytes and int3 takes up one byte. + Address patch_site = pc_; + Memory::uint16_at(patch_site) = 0xBA49u; // movq r10, imm64 + // Write "0x00, call r10" starting at last byte of address. We overwrite + // the 0x00 later, and this lets us write a uint32. + Memory::uint32_at(patch_site + 9) = 0xD2FF4900u; // 0x00, call r10 + Memory::Address_at(patch_site + 2) = target; + + // Add the requested number of int3 instructions after the call. + for (int i = 0; i < guard_bytes; i++) { + *(patch_site + 13 + i) = 0xCC; // int3 + } +} + + +// ----------------------------------------------------------------------------- +// Implementation of Operand + +Operand::Operand(Register base, int32_t disp): rex_(0) { + len_ = 1; + if (base.is(rsp) || base.is(r12)) { + // SIB byte is needed to encode (rsp + offset) or (r12 + offset). + set_sib(times_1, rsp, base); + } + + if (disp == 0 && !base.is(rbp) && !base.is(r13)) { + set_modrm(0, base); + } else if (is_int8(disp)) { + set_modrm(1, base); + set_disp8(disp); + } else { + set_modrm(2, base); + set_disp32(disp); + } +} + + +Operand::Operand(Register base, + Register index, + ScaleFactor scale, + int32_t disp): rex_(0) { + ASSERT(!index.is(rsp)); + len_ = 1; + set_sib(scale, index, base); + if (disp == 0 && !base.is(rbp) && !base.is(r13)) { + // This call to set_modrm doesn't overwrite the REX.B (or REX.X) bits + // possibly set by set_sib. + set_modrm(0, rsp); + } else if (is_int8(disp)) { + set_modrm(1, rsp); + set_disp8(disp); + } else { + set_modrm(2, rsp); + set_disp32(disp); + } +} + + // ----------------------------------------------------------------------------- // Implementation of Assembler @@ -427,6 +455,17 @@ void Assembler::arithmetic_op_32(byte opcode, Register dst, Register src) { } +void Assembler::arithmetic_op_32(byte opcode, + const Operand& dst, + Register src) { + EnsureSpace ensure_space(this); + last_pc_ = pc_; + emit_optional_rex_32(src, dst); + emit(opcode); + emit_operand(src, dst); +} + + void Assembler::immediate_arithmetic_op(byte subcode, Register dst, Immediate src) { @@ -568,6 +607,23 @@ void Assembler::shift_32(Register dst, int subcode) { } +void Assembler::shift_32(Register dst, Immediate shift_amount, int subcode) { + EnsureSpace ensure_space(this); + last_pc_ = pc_; + ASSERT(is_uint6(shift_amount.value_)); // illegal shift count + if (shift_amount.value_ == 1) { + emit_optional_rex_32(dst); + emit(0xD1); + emit_modrm(subcode, dst); + } else { + emit_optional_rex_32(dst); + emit(0xC1); + emit_modrm(subcode, dst); + emit(shift_amount.value_); + } +} + + void Assembler::bt(const Operand& dst, Register src) { EnsureSpace ensure_space(this); last_pc_ = pc_; @@ -1068,6 +1124,19 @@ void Assembler::movq(Register dst, void* value, RelocInfo::Mode rmode) { void Assembler::movq(Register dst, int64_t value, RelocInfo::Mode rmode) { + // Non-relocatable values might not need a 64-bit representation. + if (rmode == RelocInfo::NONE) { + // Sadly, there is no zero or sign extending move for 8-bit immediates. + if (is_int32(value)) { + movq(dst, Immediate(static_cast<int32_t>(value))); + return; + } else if (is_uint32(value)) { + movl(dst, Immediate(static_cast<int32_t>(value))); + return; + } + // Value cannot be represented by 32 bits, so do a full 64 bit immediate + // value. + } EnsureSpace ensure_space(this); last_pc_ = pc_; emit_rex_64(dst); @@ -1097,16 +1166,24 @@ void Assembler::movq(const Operand& dst, Immediate value) { void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) { - EnsureSpace ensure_space(this); - last_pc_ = pc_; - ASSERT(!Heap::InNewSpace(*value)); - emit_rex_64(dst); - emit(0xB8 | dst.low_bits()); - if (value->IsHeapObject()) { - emitq(reinterpret_cast<uintptr_t>(value.location()), mode); + // If there is no relocation info, emit the value of the handle efficiently + // (possibly using less that 8 bytes for the value). + if (mode == RelocInfo::NONE) { + // There is no possible reason to store a heap pointer without relocation + // info, so it must be a smi. + ASSERT(value->IsSmi()); + // Smis never have more than 32 significant bits, but they might + // have garbage in the high bits. + movq(dst, + Immediate(static_cast<int32_t>(reinterpret_cast<intptr_t>(*value)))); } else { - ASSERT_EQ(RelocInfo::NONE, mode); - emitq(reinterpret_cast<uintptr_t>(*value), RelocInfo::NONE); + EnsureSpace ensure_space(this); + last_pc_ = pc_; + ASSERT(value->IsHeapObject()); + ASSERT(!Heap::InNewSpace(*value)); + emit_rex_64(dst); + emit(0xB8 | dst.low_bits()); + emitq(reinterpret_cast<uintptr_t>(value.location()), mode); } } @@ -1449,7 +1526,7 @@ void Assembler::testb(Register reg, Immediate mask) { last_pc_ = pc_; if (reg.is(rax)) { emit(0xA8); - emit(mask); + emit(mask.value_); // Low byte emitted. } else { if (reg.code() > 3) { // Register is not one of al, bl, cl, dl. Its encoding needs REX. @@ -1473,6 +1550,15 @@ void Assembler::testb(const Operand& op, Immediate mask) { } +void Assembler::testl(Register dst, Register src) { + EnsureSpace ensure_space(this); + last_pc_ = pc_; + emit_optional_rex_32(dst, src); + emit(0x85); + emit_modrm(dst, src); +} + + void Assembler::testl(Register reg, Immediate mask) { EnsureSpace ensure_space(this); last_pc_ = pc_; diff --git a/V8Binding/v8/src/x64/assembler-x64.h b/V8Binding/v8/src/x64/assembler-x64.h index d99401b..e895332 100644 --- a/V8Binding/v8/src/x64/assembler-x64.h +++ b/V8Binding/v8/src/x64/assembler-x64.h @@ -562,6 +562,26 @@ class Assembler : public Malloced { immediate_arithmetic_op_8(0x7, dst, src); } + void cmpl(Register dst, Register src) { + arithmetic_op_32(0x3B, dst, src); + } + + void cmpl(Register dst, const Operand& src) { + arithmetic_op_32(0x3B, src, dst); + } + + void cmpl(const Operand& dst, Register src) { + arithmetic_op_32(0x39, dst, src); + } + + void cmpl(Register dst, Immediate src) { + immediate_arithmetic_op_32(0x7, dst, src); + } + + void cmpl(const Operand& dst, Immediate src) { + immediate_arithmetic_op_32(0x7, dst, src); + } + void cmpq(Register dst, Register src) { arithmetic_op(0x3B, dst, src); } @@ -578,10 +598,6 @@ class Assembler : public Malloced { immediate_arithmetic_op(0x7, dst, src); } - void cmpl(Register dst, Immediate src) { - immediate_arithmetic_op_32(0x7, dst, src); - } - void cmpq(const Operand& dst, Immediate src) { immediate_arithmetic_op(0x7, dst, src); } @@ -674,11 +690,22 @@ class Assembler : public Malloced { shift(dst, shift_amount, 0x7); } + // Shifts dst right, duplicating sign bit, by shift_amount bits. + // Shifting by 1 is handled efficiently. + void sarl(Register dst, Immediate shift_amount) { + shift_32(dst, shift_amount, 0x7); + } + // Shifts dst right, duplicating sign bit, by cl % 64 bits. void sar(Register dst) { shift(dst, 0x7); } + // Shifts dst right, duplicating sign bit, by cl % 64 bits. + void sarl(Register dst) { + shift_32(dst, 0x7); + } + void shl(Register dst, Immediate shift_amount) { shift(dst, shift_amount, 0x4); } @@ -740,6 +767,7 @@ class Assembler : public Malloced { void testb(Register reg, Immediate mask); void testb(const Operand& op, Immediate mask); + void testl(Register dst, Register src); void testl(Register reg, Immediate mask); void testl(const Operand& op, Immediate mask); void testq(const Operand& op, Register reg); @@ -1086,6 +1114,7 @@ class Assembler : public Malloced { // ModR/M byte. void arithmetic_op(byte opcode, Register dst, Register src); void arithmetic_op_32(byte opcode, Register dst, Register src); + void arithmetic_op_32(byte opcode, const Operand& dst, Register src); void arithmetic_op(byte opcode, Register reg, const Operand& op); void immediate_arithmetic_op(byte subcode, Register dst, Immediate src); void immediate_arithmetic_op(byte subcode, const Operand& dst, Immediate src); @@ -1105,6 +1134,7 @@ class Assembler : public Malloced { Immediate src); // Emit machine code for a shift operation. void shift(Register dst, Immediate shift_amount, int subcode); + void shift_32(Register dst, Immediate shift_amount, int subcode); // Shift dst by cl % 64 bits. void shift(Register dst, int subcode); void shift_32(Register dst, int subcode); diff --git a/V8Binding/v8/src/x64/codegen-x64.cc b/V8Binding/v8/src/x64/codegen-x64.cc index 54138a2..e3e32e6 100644 --- a/V8Binding/v8/src/x64/codegen-x64.cc +++ b/V8Binding/v8/src/x64/codegen-x64.cc @@ -355,26 +355,22 @@ void CodeGenerator::GenerateReturnSequence(Result* return_value) { // receiver. frame_->Exit(); masm_->ret((scope_->num_parameters() + 1) * kPointerSize); + // Add padding that will be overwritten by a debugger breakpoint. + // frame_->Exit() generates "movq rsp, rbp; pop rbp" length 5. + // "ret k" has length 2. + const int kPadding = Debug::kX64JSReturnSequenceLength - 5 - 2; + for (int i = 0; i < kPadding; ++i) { + masm_->int3(); + } DeleteFrame(); - // TODO(x64): introduce kX64JSReturnSequenceLength and enable assert. - // Check that the size of the code used for returning matches what is // expected by the debugger. - // ASSERT_EQ(Debug::kIa32JSReturnSequenceLength, - // masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); + ASSERT_EQ(Debug::kX64JSReturnSequenceLength, + masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); } -void CodeGenerator::GenerateFastCaseSwitchJumpTable(SwitchStatement* a, - int b, - int c, - Label* d, - Vector<Label*> e, - Vector<Label> f) { - UNIMPLEMENTED(); -} - #ifdef DEBUG bool CodeGenerator::HasValidEntryRegisters() { return (allocator()->count(rax) == (frame()->is_used(rax) ? 1 : 0)) @@ -1276,7 +1272,7 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) { frame_->EmitPush(rax); // <- slot 3 frame_->EmitPush(rdx); // <- slot 2 - __ movq(rax, FieldOperand(rdx, FixedArray::kLengthOffset)); + __ movsxlq(rax, FieldOperand(rdx, FixedArray::kLengthOffset)); __ shl(rax, Immediate(kSmiTagSize)); frame_->EmitPush(rax); // <- slot 1 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 @@ -1288,7 +1284,7 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) { frame_->EmitPush(rax); // <- slot 2 // Push the length of the array and the initial index onto the stack. - __ movq(rax, FieldOperand(rax, FixedArray::kLengthOffset)); + __ movsxlq(rax, FieldOperand(rax, FixedArray::kLengthOffset)); __ shl(rax, Immediate(kSmiTagSize)); frame_->EmitPush(rax); // <- slot 1 frame_->EmitPush(Immediate(Smi::FromInt(0))); // <- slot 0 @@ -1301,15 +1297,14 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) { node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); __ movq(rax, frame_->ElementAt(0)); // load the current count - __ cmpq(rax, frame_->ElementAt(1)); // compare to the array length + __ cmpl(rax, frame_->ElementAt(1)); // compare to the array length node->break_target()->Branch(above_equal); // Get the i'th entry of the array. __ movq(rdx, frame_->ElementAt(2)); ASSERT(kSmiTagSize == 1 && kSmiTag == 0); // Multiplier is times_4 since rax is already a Smi. - __ movq(rbx, Operand(rdx, rax, times_4, - FixedArray::kHeaderSize - kHeapObjectTag)); + __ movq(rbx, FieldOperand(rdx, rax, times_4, FixedArray::kHeaderSize)); // Get the expected map from the stack or a zero map in the // permanent slow case rax: current iteration count rbx: i'th entry @@ -2459,13 +2454,13 @@ void CodeGenerator::VisitCallEval(CallEval* node) { // receiver. Use a scratch register to avoid destroying the result. Result scratch = allocator_->Allocate(); ASSERT(scratch.is_valid()); - __ movl(scratch.reg(), + __ movq(scratch.reg(), FieldOperand(result.reg(), FixedArray::OffsetOfElementAt(0))); frame_->SetElementAt(arg_count + 1, &scratch); // We can reuse the result register now. frame_->Spill(result.reg()); - __ movl(result.reg(), + __ movq(result.reg(), FieldOperand(result.reg(), FixedArray::OffsetOfElementAt(1))); frame_->SetElementAt(arg_count, &result); @@ -2734,12 +2729,6 @@ class DeferredPrefixCountOperation: public DeferredCode { void DeferredPrefixCountOperation::Generate() { - // Undo the optimistic smi operation. - if (is_increment_) { - __ subq(dst_, Immediate(Smi::FromInt(1))); - } else { - __ addq(dst_, Immediate(Smi::FromInt(1))); - } __ push(dst_); __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); __ push(rax); @@ -2775,12 +2764,6 @@ class DeferredPostfixCountOperation: public DeferredCode { void DeferredPostfixCountOperation::Generate() { - // Undo the optimistic smi operation. - if (is_increment_) { - __ subq(dst_, Immediate(Smi::FromInt(1))); - } else { - __ addq(dst_, Immediate(Smi::FromInt(1))); - } __ push(dst_); __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); @@ -2837,19 +2820,6 @@ void CodeGenerator::VisitCountOperation(CountOperation* node) { // Ensure the new value is writable. frame_->Spill(new_value.reg()); - // In order to combine the overflow and the smi tag check, we need - // to be able to allocate a byte register. We attempt to do so - // without spilling. If we fail, we will generate separate overflow - // and smi tag checks. - // - // We allocate and clear the temporary register before - // performing the count operation since clearing the register using - // xor will clear the overflow flag. - Result tmp = allocator_->AllocateWithoutSpilling(); - - // Clear scratch register to prepare it for setcc after the operation below. - __ xor_(kScratchRegister, kScratchRegister); - DeferredCode* deferred = NULL; if (is_postfix) { deferred = new DeferredPostfixCountOperation(new_value.reg(), @@ -2860,25 +2830,26 @@ void CodeGenerator::VisitCountOperation(CountOperation* node) { is_increment); } + Result tmp = allocator_->AllocateWithoutSpilling(); + ASSERT(kSmiTagMask == 1 && kSmiTag == 0); + __ movl(tmp.reg(), Immediate(kSmiTagMask)); + // Smi test. + __ movq(kScratchRegister, new_value.reg()); if (is_increment) { - __ addq(new_value.reg(), Immediate(Smi::FromInt(1))); + __ addl(kScratchRegister, Immediate(Smi::FromInt(1))); } else { - __ subq(new_value.reg(), Immediate(Smi::FromInt(1))); + __ subl(kScratchRegister, Immediate(Smi::FromInt(1))); } - - // If the count operation didn't overflow and the result is a valid - // smi, we're done. Otherwise, we jump to the deferred slow-case - // code. - - // We combine the overflow and the smi tag check. - __ setcc(overflow, kScratchRegister); - __ or_(kScratchRegister, new_value.reg()); - __ testl(kScratchRegister, Immediate(kSmiTagMask)); + // deferred->Branch(overflow); + __ cmovl(overflow, kScratchRegister, tmp.reg()); + __ testl(kScratchRegister, tmp.reg()); tmp.Unuse(); deferred->Branch(not_zero); + __ movq(new_value.reg(), kScratchRegister); deferred->BindExit(); + // Postfix: store the old value in the allocated slot under the // reference. if (is_postfix) frame_->SetElementAt(target.size(), &old_value); @@ -3144,11 +3115,9 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) { __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), Immediate(1 << Map::kIsUndetectable)); destination()->false_target()->Branch(not_zero); - __ movb(kScratchRegister, - FieldOperand(kScratchRegister, Map::kInstanceTypeOffset)); - __ cmpb(kScratchRegister, Immediate(FIRST_JS_OBJECT_TYPE)); + __ CmpInstanceType(kScratchRegister, FIRST_JS_OBJECT_TYPE); destination()->false_target()->Branch(below); - __ cmpb(kScratchRegister, Immediate(LAST_JS_OBJECT_TYPE)); + __ CmpInstanceType(kScratchRegister, LAST_JS_OBJECT_TYPE); answer.Unuse(); destination()->Split(below_equal); } else { @@ -3246,10 +3215,25 @@ void CodeGenerator::GenerateIsArray(ZoneList<Expression*>* args) { void CodeGenerator::GenerateIsConstructCall(ZoneList<Expression*>* args) { - // TODO(X64): Optimize this like it's done on IA-32. ASSERT(args->length() == 0); - Result answer = frame_->CallRuntime(Runtime::kIsConstructCall, 0); - frame_->Push(&answer); + + // Get the frame pointer for the calling frame. + Result fp = allocator()->Allocate(); + __ movq(fp.reg(), Operand(rbp, StandardFrameConstants::kCallerFPOffset)); + + // Skip the arguments adaptor frame if it exists. + Label check_frame_marker; + __ cmpq(Operand(fp.reg(), StandardFrameConstants::kContextOffset), + Immediate(ArgumentsAdaptorFrame::SENTINEL)); + __ j(not_equal, &check_frame_marker); + __ movq(fp.reg(), Operand(fp.reg(), StandardFrameConstants::kCallerFPOffset)); + + // Check the marker in the calling frame. + __ bind(&check_frame_marker); + __ cmpq(Operand(fp.reg(), StandardFrameConstants::kMarkerOffset), + Immediate(Smi::FromInt(StackFrame::CONSTRUCT))); + fp.Unuse(); + destination()->Split(equal); } @@ -3361,7 +3345,21 @@ void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* args) { void CodeGenerator::GenerateFastMathOp(MathOp op, ZoneList<Expression*>* args) { - UNIMPLEMENTED(); + // TODO(X64): Use inline floating point in the fast case. + ASSERT(args->length() == 1); + + // Load number. + Load(args->at(0)); + Result answer; + switch (op) { + case SIN: + answer = frame_->CallRuntime(Runtime::kMath_sin, 1); + break; + case COS: + answer = frame_->CallRuntime(Runtime::kMath_cos, 1); + break; + } + frame_->Push(&answer); } @@ -3379,27 +3377,22 @@ void CodeGenerator::GenerateClassOf(ZoneList<Expression*>* args) { // Check that the object is a JS object but take special care of JS // functions to make sure they have 'Function' as their class. - { Result tmp = allocator()->Allocate(); - __ movq(obj.reg(), FieldOperand(obj.reg(), HeapObject::kMapOffset)); - __ movb(tmp.reg(), FieldOperand(obj.reg(), Map::kInstanceTypeOffset)); - __ cmpb(tmp.reg(), Immediate(FIRST_JS_OBJECT_TYPE)); - null.Branch(less); - // As long as JS_FUNCTION_TYPE is the last instance type and it is - // right after LAST_JS_OBJECT_TYPE, we can avoid checking for - // LAST_JS_OBJECT_TYPE. - ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); - ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); - __ cmpb(tmp.reg(), Immediate(JS_FUNCTION_TYPE)); - function.Branch(equal); - } + __ CmpObjectType(obj.reg(), FIRST_JS_OBJECT_TYPE, obj.reg()); + null.Branch(less); + + // As long as JS_FUNCTION_TYPE is the last instance type and it is + // right after LAST_JS_OBJECT_TYPE, we can avoid checking for + // LAST_JS_OBJECT_TYPE. + ASSERT(LAST_TYPE == JS_FUNCTION_TYPE); + ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1); + __ CmpInstanceType(obj.reg(), JS_FUNCTION_TYPE); + function.Branch(equal); // Check if the constructor in the map is a function. - { Result tmp = allocator()->Allocate(); - __ movq(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset)); - __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, tmp.reg()); - non_function_constructor.Branch(not_equal); - } + __ movq(obj.reg(), FieldOperand(obj.reg(), Map::kConstructorOffset)); + __ CmpObjectType(obj.reg(), JS_FUNCTION_TYPE, kScratchRegister); + non_function_constructor.Branch(not_equal); // The obj register now contains the constructor function. Grab the // instance class name from there. @@ -3803,8 +3796,28 @@ Operand CodeGenerator::SlotOperand(Slot* slot, Register tmp) { Operand CodeGenerator::ContextSlotOperandCheckExtensions(Slot* slot, Result tmp, JumpTarget* slow) { - UNIMPLEMENTED(); - return Operand(rsp, 0); + ASSERT(slot->type() == Slot::CONTEXT); + ASSERT(tmp.is_register()); + Register context = rsi; + + for (Scope* s = scope(); s != slot->var()->scope(); s = s->outer_scope()) { + if (s->num_heap_slots() > 0) { + if (s->calls_eval()) { + // Check that extension is NULL. + __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), + Immediate(0)); + slow->Branch(not_equal, not_taken); + } + __ movq(tmp.reg(), ContextOperand(context, Context::CLOSURE_INDEX)); + __ movq(tmp.reg(), FieldOperand(tmp.reg(), JSFunction::kContextOffset)); + context = tmp.reg(); + } + } + // Check that last extension is NULL. + __ cmpq(ContextOperand(context, Context::EXTENSION_INDEX), Immediate(0)); + slow->Branch(not_equal, not_taken); + __ movq(tmp.reg(), ContextOperand(context, Context::FCONTEXT_INDEX)); + return ContextOperand(tmp.reg(), slot->index()); } @@ -4316,12 +4329,8 @@ void CodeGenerator::Comparison(Condition cc, left_side = Result(left_reg); right_side = Result(right_val); // Test smi equality and comparison by signed int comparison. - if (IsUnsafeSmi(right_side.handle())) { - right_side.ToRegister(); - __ cmpq(left_side.reg(), right_side.reg()); - } else { - __ Cmp(left_side.reg(), right_side.handle()); - } + // Both sides are smis, so we can use an Immediate. + __ cmpl(left_side.reg(), Immediate(Smi::cast(*right_side.handle()))); left_side.Unuse(); right_side.Unuse(); dest->Split(cc); @@ -4373,7 +4382,8 @@ void CodeGenerator::Comparison(Condition cc, // When non-smi, call out to the compare stub. CompareStub stub(cc, strict); Result answer = frame_->CallStub(&stub, &left_side, &right_side); - __ testq(answer.reg(), answer.reg()); // Both zero and sign flag right. + // The result is a Smi, which is negative, zero, or positive. + __ testl(answer.reg(), answer.reg()); // Both zero and sign flag right. answer.Unuse(); dest->Split(cc); } else { @@ -4393,11 +4403,7 @@ void CodeGenerator::Comparison(Condition cc, // When non-smi, call out to the compare stub. CompareStub stub(cc, strict); Result answer = frame_->CallStub(&stub, &left_side, &right_side); - if (cc == equal) { - __ testq(answer.reg(), answer.reg()); - } else { - __ cmpq(answer.reg(), Immediate(0)); - } + __ testl(answer.reg(), answer.reg()); // Sets both zero and sign flags. answer.Unuse(); dest->true_target()->Branch(cc); dest->false_target()->Jump(); @@ -4405,7 +4411,7 @@ void CodeGenerator::Comparison(Condition cc, is_smi.Bind(); left_side = Result(left_reg); right_side = Result(right_reg); - __ cmpq(left_side.reg(), right_side.reg()); + __ cmpl(left_side.reg(), right_side.reg()); right_side.Unuse(); left_side.Unuse(); dest->Split(cc); @@ -5056,12 +5062,12 @@ void CodeGenerator::LikelySmiBinaryOperation(Token::Value op, // Perform the operation. switch (op) { case Token::SAR: - __ sar(answer.reg()); + __ sarl(answer.reg()); // No checks of result necessary break; case Token::SHR: { Label result_ok; - __ shr(answer.reg()); + __ shrl(answer.reg()); // Check that the *unsigned* result fits in a smi. Neither of // the two high-order bits can be set: // * 0x80000000: high bit would be lost when smi tagging. @@ -5084,7 +5090,7 @@ void CodeGenerator::LikelySmiBinaryOperation(Token::Value op, Label result_ok; __ shl(answer.reg()); // Check that the *signed* result fits in a smi. - __ cmpq(answer.reg(), Immediate(0xc0000000)); + __ cmpl(answer.reg(), Immediate(0xc0000000)); __ j(positive, &result_ok); ASSERT(kSmiTag == 0); __ shl(rcx, Immediate(kSmiTagSize)); @@ -5421,6 +5427,7 @@ void ToBooleanStub::Generate(MacroAssembler* masm) { __ j(equal, &false_result); // Get the map and type of the heap object. + // We don't use CmpObjectType because we manipulate the type field. __ movq(rdx, FieldOperand(rax, HeapObject::kMapOffset)); __ movzxbq(rcx, FieldOperand(rdx, Map::kInstanceTypeOffset)); @@ -5446,6 +5453,7 @@ void ToBooleanStub::Generate(MacroAssembler* masm) { __ bind(¬_string); // HeapNumber => false iff +0, -0, or NaN. + // These three cases set C3 when compared to zero in the FPU. __ Cmp(rdx, Factory::heap_number_map()); __ j(not_equal, &true_result); // TODO(x64): Don't use fp stack, use MMX registers? @@ -5455,9 +5463,9 @@ void ToBooleanStub::Generate(MacroAssembler* masm) { __ fucompp(); // Compare and pop both values. __ movq(kScratchRegister, rax); __ fnstsw_ax(); // Store fp status word in ax, no checking for exceptions. - __ testb(rax, Immediate(0x08)); // Test FP condition flag C3. + __ testl(rax, Immediate(0x4000)); // Test FP condition flag C3, bit 16. __ movq(rax, kScratchRegister); - __ j(zero, &false_result); + __ j(not_zero, &false_result); // Fall through to |true_result|. // Return 1/0 for true/false in rax. @@ -5617,7 +5625,7 @@ void CompareStub::Generate(MacroAssembler* masm) { // The representation of NaN values has all exponent bits (52..62) set, // and not all mantissa bits (0..51) clear. // Read double representation into rax. - __ movq(rbx, 0x7ff0000000000000, RelocInfo::NONE); + __ movq(rbx, V8_UINT64_C(0x7ff0000000000000), RelocInfo::NONE); __ movq(rax, FieldOperand(rdx, HeapNumber::kValueOffset)); // Test that exponent bits are all set. __ or_(rbx, rax); @@ -5627,7 +5635,8 @@ void CompareStub::Generate(MacroAssembler* masm) { __ shl(rax, Immediate(12)); // If all bits in the mantissa are zero the number is Infinity, and // we return zero. Otherwise it is a NaN, and we return non-zero. - // So just return rax. + // We cannot just return rax because only eax is tested on return. + __ setcc(not_zero, rax); __ ret(0); __ bind(¬_identical); @@ -5665,7 +5674,7 @@ void CompareStub::Generate(MacroAssembler* masm) { Factory::heap_number_map()); // If heap number, handle it in the slow case. __ j(equal, &slow); - // Return non-equal (ebx is not zero) + // Return non-equal. ebx (the lower half of rbx) is not zero. __ movq(rax, rbx); __ ret(0); @@ -5681,7 +5690,7 @@ void CompareStub::Generate(MacroAssembler* masm) { Label first_non_object; __ CmpObjectType(rax, FIRST_JS_OBJECT_TYPE, rcx); __ j(below, &first_non_object); - // Return non-zero (rax is not zero) + // Return non-zero (eax (not rax) is not zero) Label return_not_equal; ASSERT(kHeapObjectTag != 0); __ bind(&return_not_equal); @@ -5745,7 +5754,7 @@ void CompareStub::Generate(MacroAssembler* masm) { BranchIfNonSymbol(masm, &call_builtin, rdx, kScratchRegister); // We've already checked for object identity, so if both operands - // are symbols they aren't equal. Register rax already holds a + // are symbols they aren't equal. Register eax (not rax) already holds a // non-zero value, which indicates not equal, so just return. __ ret(2 * kPointerSize); } @@ -6647,12 +6656,12 @@ void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) { // Move the second operand into register ecx. __ movq(rcx, rbx); // Remove tags from operands (but keep sign). - __ sar(rax, Immediate(kSmiTagSize)); - __ sar(rcx, Immediate(kSmiTagSize)); + __ sarl(rax, Immediate(kSmiTagSize)); + __ sarl(rcx, Immediate(kSmiTagSize)); // Perform the operation. switch (op_) { case Token::SAR: - __ sar(rax); + __ sarl(rax); // No checks of result necessary break; case Token::SHR: @@ -6663,19 +6672,17 @@ void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) { // - 0x40000000: this number would convert to negative when // Smi tagging these two cases can only happen with shifts // by 0 or 1 when handed a valid smi. - __ testq(rax, Immediate(0xc0000000)); + __ testl(rax, Immediate(0xc0000000)); __ j(not_zero, slow); break; case Token::SHL: __ shll(rax); - // TODO(Smi): Significant change if Smi changes. // Check that the *signed* result fits in a smi. // It does, if the 30th and 31st bits are equal, since then // shifting the SmiTag in at the bottom doesn't change the sign. ASSERT(kSmiTagSize == 1); __ cmpl(rax, Immediate(0xc0000000)); __ j(sign, slow); - __ movsxlq(rax, rax); // Extend new sign of eax into rax. break; default: UNREACHABLE(); @@ -6787,7 +6794,6 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) { __ testl(rax, Immediate(1)); __ j(not_zero, &operand_conversion_failure); } else { - // TODO(X64): Verify that SSE3 is always supported, drop this code. // Check if right operand is int32. __ fist_s(Operand(rsp, 0 * kPointerSize)); __ fild_s(Operand(rsp, 0 * kPointerSize)); @@ -6814,9 +6820,9 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) { case Token::BIT_OR: __ or_(rax, rcx); break; case Token::BIT_AND: __ and_(rax, rcx); break; case Token::BIT_XOR: __ xor_(rax, rcx); break; - case Token::SAR: __ sar(rax); break; - case Token::SHL: __ shl(rax); break; - case Token::SHR: __ shr(rax); break; + case Token::SAR: __ sarl(rax); break; + case Token::SHL: __ shll(rax); break; + case Token::SHR: __ shrl(rax); break; default: UNREACHABLE(); } if (op_ == Token::SHR) { diff --git a/V8Binding/v8/src/x64/codegen-x64.h b/V8Binding/v8/src/x64/codegen-x64.h index 0e8505a..bb4b538 100644 --- a/V8Binding/v8/src/x64/codegen-x64.h +++ b/V8Binding/v8/src/x64/codegen-x64.h @@ -543,58 +543,6 @@ class CodeGenerator: public AstVisitor { inline void GenerateMathSin(ZoneList<Expression*>* args); inline void GenerateMathCos(ZoneList<Expression*>* args); - // Methods and constants for fast case switch statement support. - // - // Only allow fast-case switch if the range of labels is at most - // this factor times the number of case labels. - // Value is derived from comparing the size of code generated by the normal - // switch code for Smi-labels to the size of a single pointer. If code - // quality increases this number should be decreased to match. - static const int kFastSwitchMaxOverheadFactor = 5; - - // Minimal number of switch cases required before we allow jump-table - // optimization. - static const int kFastSwitchMinCaseCount = 5; - - // The limit of the range of a fast-case switch, as a factor of the number - // of cases of the switch. Each platform should return a value that - // is optimal compared to the default code generated for a switch statement - // on that platform. - int FastCaseSwitchMaxOverheadFactor(); - - // The minimal number of cases in a switch before the fast-case switch - // optimization is enabled. Each platform should return a value that - // is optimal compared to the default code generated for a switch statement - // on that platform. - int FastCaseSwitchMinCaseCount(); - - // Allocate a jump table and create code to jump through it. - // Should call GenerateFastCaseSwitchCases to generate the code for - // all the cases at the appropriate point. - void GenerateFastCaseSwitchJumpTable(SwitchStatement* node, - int min_index, - int range, - Label* fail_label, - Vector<Label*> case_targets, - Vector<Label> case_labels); - - // Generate the code for cases for the fast case switch. - // Called by GenerateFastCaseSwitchJumpTable. - void GenerateFastCaseSwitchCases(SwitchStatement* node, - Vector<Label> case_labels, - VirtualFrame* start_frame); - - // Fast support for constant-Smi switches. - void GenerateFastCaseSwitchStatement(SwitchStatement* node, - int min_index, - int range, - int default_index); - - // Fast support for constant-Smi switches. Tests whether switch statement - // permits optimization and calls GenerateFastCaseSwitch if it does. - // Returns true if the fast-case switch was generated, and false if not. - bool TryGenerateFastCaseSwitchStatement(SwitchStatement* node); - // Methods used to indicate which source code is generated for. Source // positions are collected by the assembler and emitted with the relocation // information. diff --git a/V8Binding/v8/src/x64/debug-x64.cc b/V8Binding/v8/src/x64/debug-x64.cc index 3b10132..e94e781 100644 --- a/V8Binding/v8/src/x64/debug-x64.cc +++ b/V8Binding/v8/src/x64/debug-x64.cc @@ -38,8 +38,10 @@ namespace internal { #ifdef ENABLE_DEBUGGER_SUPPORT bool Debug::IsDebugBreakAtReturn(v8::internal::RelocInfo* rinfo) { - UNIMPLEMENTED(); - return false; + ASSERT(RelocInfo::IsJSReturn(rinfo->rmode())); + // 11th byte of patch is 0x49, 11th byte of JS return is 0xCC (int3). + ASSERT(*(rinfo->pc() + 10) == 0x49 || *(rinfo->pc() + 10) == 0xCC); + return (*(rinfo->pc() + 10) == 0x49); } void Debug::GenerateCallICDebugBreak(MacroAssembler* masm) { diff --git a/V8Binding/v8/src/x64/disasm-x64.cc b/V8Binding/v8/src/x64/disasm-x64.cc index 767b124..f962c01 100644 --- a/V8Binding/v8/src/x64/disasm-x64.cc +++ b/V8Binding/v8/src/x64/disasm-x64.cc @@ -25,64 +25,1408 @@ // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +#include <assert.h> +#include <stdio.h> +#include <stdarg.h> + #include "v8.h" #include "disasm.h" namespace disasm { -Disassembler::Disassembler(NameConverter const& converter) - : converter_(converter) { - UNIMPLEMENTED(); +enum OperandOrder { + UNSET_OP_ORDER = 0, REG_OPER_OP_ORDER, OPER_REG_OP_ORDER +}; + +//------------------------------------------------------------------ +// Tables +//------------------------------------------------------------------ +struct ByteMnemonic { + int b; // -1 terminates, otherwise must be in range (0..255) + OperandOrder op_order_; + const char* mnem; +}; + + +static ByteMnemonic two_operands_instr[] = { + { 0x03, REG_OPER_OP_ORDER, "add" }, + { 0x21, OPER_REG_OP_ORDER, "and" }, + { 0x23, REG_OPER_OP_ORDER, "and" }, + { 0x3B, REG_OPER_OP_ORDER, "cmp" }, + { 0x8D, REG_OPER_OP_ORDER, "lea" }, + { 0x09, OPER_REG_OP_ORDER, "or" }, + { 0x0B, REG_OPER_OP_ORDER, "or" }, + { 0x1B, REG_OPER_OP_ORDER, "sbb" }, + { 0x29, OPER_REG_OP_ORDER, "sub" }, + { 0x2B, REG_OPER_OP_ORDER, "sub" }, + { 0x85, REG_OPER_OP_ORDER, "test" }, + { 0x31, OPER_REG_OP_ORDER, "xor" }, + { 0x33, REG_OPER_OP_ORDER, "xor" }, + { 0x87, REG_OPER_OP_ORDER, "xchg" }, + { 0x8A, REG_OPER_OP_ORDER, "movb" }, + { 0x8B, REG_OPER_OP_ORDER, "mov" }, + { -1, UNSET_OP_ORDER, "" } +}; + + +static ByteMnemonic zero_operands_instr[] = { + { 0xC3, UNSET_OP_ORDER, "ret" }, + { 0xC9, UNSET_OP_ORDER, "leave" }, + { 0x90, UNSET_OP_ORDER, "nop" }, + { 0xF4, UNSET_OP_ORDER, "hlt" }, + { 0xCC, UNSET_OP_ORDER, "int3" }, + { 0x60, UNSET_OP_ORDER, "pushad" }, + { 0x61, UNSET_OP_ORDER, "popad" }, + { 0x9C, UNSET_OP_ORDER, "pushfd" }, + { 0x9D, UNSET_OP_ORDER, "popfd" }, + { 0x9E, UNSET_OP_ORDER, "sahf" }, + { 0x99, UNSET_OP_ORDER, "cdq" }, + { 0x9B, UNSET_OP_ORDER, "fwait" }, + { -1, UNSET_OP_ORDER, "" } +}; + + +static ByteMnemonic call_jump_instr[] = { + { 0xE8, UNSET_OP_ORDER, "call" }, + { 0xE9, UNSET_OP_ORDER, "jmp" }, + { -1, UNSET_OP_ORDER, "" } +}; + + +static ByteMnemonic short_immediate_instr[] = { + { 0x05, UNSET_OP_ORDER, "add" }, + { 0x0D, UNSET_OP_ORDER, "or" }, + { 0x15, UNSET_OP_ORDER, "adc" }, + { 0x25, UNSET_OP_ORDER, "and" }, + { 0x2D, UNSET_OP_ORDER, "sub" }, + { 0x35, UNSET_OP_ORDER, "xor" }, + { 0x3D, UNSET_OP_ORDER, "cmp" }, + { -1, UNSET_OP_ORDER, "" } +}; + + +static const char* conditional_code_suffix[] = { + "o", "no", "c", "nc", "z", "nz", "a", "na", + "s", "ns", "pe", "po", "l", "ge", "le", "g" +}; + + +enum InstructionType { + NO_INSTR, + ZERO_OPERANDS_INSTR, + TWO_OPERANDS_INSTR, + JUMP_CONDITIONAL_SHORT_INSTR, + REGISTER_INSTR, + PUSHPOP_INSTR, // Has implicit 64-bit operand size. + MOVE_REG_INSTR, + CALL_JUMP_INSTR, + SHORT_IMMEDIATE_INSTR +}; + + +struct InstructionDesc { + const char* mnem; + InstructionType type; + OperandOrder op_order_; +}; + + +class InstructionTable { + public: + InstructionTable(); + const InstructionDesc& Get(byte x) const { + return instructions_[x]; + } + + private: + InstructionDesc instructions_[256]; + void Clear(); + void Init(); + void CopyTable(ByteMnemonic bm[], InstructionType type); + void SetTableRange(InstructionType type, byte start, byte end, + const char* mnem); + void AddJumpConditionalShort(); +}; + + +InstructionTable::InstructionTable() { + Clear(); + Init(); +} + + +void InstructionTable::Clear() { + for (int i = 0; i < 256; i++) { + instructions_[i].mnem = ""; + instructions_[i].type = NO_INSTR; + instructions_[i].op_order_ = UNSET_OP_ORDER; + } +} + + +void InstructionTable::Init() { + CopyTable(two_operands_instr, TWO_OPERANDS_INSTR); + CopyTable(zero_operands_instr, ZERO_OPERANDS_INSTR); + CopyTable(call_jump_instr, CALL_JUMP_INSTR); + CopyTable(short_immediate_instr, SHORT_IMMEDIATE_INSTR); + AddJumpConditionalShort(); + SetTableRange(PUSHPOP_INSTR, 0x50, 0x57, "push"); + SetTableRange(PUSHPOP_INSTR, 0x58, 0x5F, "pop"); + SetTableRange(MOVE_REG_INSTR, 0xB8, 0xBF, "mov"); +} + + +void InstructionTable::CopyTable(ByteMnemonic bm[], InstructionType type) { + for (int i = 0; bm[i].b >= 0; i++) { + InstructionDesc* id = &instructions_[bm[i].b]; + id->mnem = bm[i].mnem; + id->op_order_ = bm[i].op_order_; + assert(id->type == NO_INSTR); // Information already entered + id->type = type; + } +} + + +void InstructionTable::SetTableRange(InstructionType type, byte start, + byte end, const char* mnem) { + for (byte b = start; b <= end; b++) { + InstructionDesc* id = &instructions_[b]; + assert(id->type == NO_INSTR); // Information already entered + id->mnem = mnem; + id->type = type; + } +} + + +void InstructionTable::AddJumpConditionalShort() { + for (byte b = 0x70; b <= 0x7F; b++) { + InstructionDesc* id = &instructions_[b]; + assert(id->type == NO_INSTR); // Information already entered + id->mnem = NULL; // Computed depending on condition code. + id->type = JUMP_CONDITIONAL_SHORT_INSTR; + } +} + + +static InstructionTable instruction_table; + + +// The X64 disassembler implementation. +enum UnimplementedOpcodeAction { + CONTINUE_ON_UNIMPLEMENTED_OPCODE, + ABORT_ON_UNIMPLEMENTED_OPCODE +}; + + +class DisassemblerX64 { + public: + DisassemblerX64(const NameConverter& converter, + UnimplementedOpcodeAction unimplemented_action = + ABORT_ON_UNIMPLEMENTED_OPCODE) + : converter_(converter), + tmp_buffer_pos_(0), + abort_on_unimplemented_( + unimplemented_action == ABORT_ON_UNIMPLEMENTED_OPCODE), + rex_(0), + operand_size_(0) { + tmp_buffer_[0] = '\0'; + } + + virtual ~DisassemblerX64() { + } + + // Writes one disassembled instruction into 'buffer' (0-terminated). + // Returns the length of the disassembled machine instruction in bytes. + int InstructionDecode(v8::internal::Vector<char> buffer, byte* instruction); + + private: + + const NameConverter& converter_; + v8::internal::EmbeddedVector<char, 128> tmp_buffer_; + unsigned int tmp_buffer_pos_; + bool abort_on_unimplemented_; + // Prefixes parsed + byte rex_; + byte operand_size_; + + void setOperandSizePrefix(byte prefix) { + ASSERT_EQ(0x66, prefix); + operand_size_ = prefix; + } + + void setRex(byte rex) { + ASSERT_EQ(0x40, rex & 0xF0); + rex_ = rex; + } + + bool rex() { return rex_ != 0; } + + bool rex_b() { return (rex_ & 0x01) != 0; } + + // Actual number of base register given the low bits and the rex.b state. + int base_reg(int low_bits) { return low_bits | ((rex_ & 0x01) << 3); } + + bool rex_x() { return (rex_ & 0x02) != 0; } + + bool rex_r() { return (rex_ & 0x04) != 0; } + + bool rex_w() { return (rex_ & 0x08) != 0; } + + int operand_size() { + return rex_w() ? 64 : (operand_size_ != 0) ? 16 : 32; + } + + char operand_size_code() { + return rex_w() ? 'q' : (operand_size_ != 0) ? 'w' : 'l'; + } + + const char* NameOfCPURegister(int reg) const { + return converter_.NameOfCPURegister(reg); + } + + const char* NameOfByteCPURegister(int reg) const { + return converter_.NameOfByteCPURegister(reg); + } + + const char* NameOfXMMRegister(int reg) const { + return converter_.NameOfXMMRegister(reg); + } + + const char* NameOfAddress(byte* addr) const { + return converter_.NameOfAddress(addr); + } + + // Disassembler helper functions. + void get_modrm(byte data, + int* mod, + int* regop, + int* rm) { + *mod = (data >> 6) & 3; + *regop = ((data & 0x38) >> 3) | (rex_r() ? 8 : 0); + *rm = (data & 7) | (rex_b() ? 8 : 0); + } + + void get_sib(byte data, + int* scale, + int* index, + int* base) { + *scale = (data >> 6) & 3; + *index = ((data >> 3) & 7) | (rex_x() ? 8 : 0); + *base = data & 7 | (rex_b() ? 8 : 0); + } + + typedef const char* (DisassemblerX64::*RegisterNameMapping)(int reg) const; + + int PrintRightOperandHelper(byte* modrmp, + RegisterNameMapping register_name); + int PrintRightOperand(byte* modrmp); + int PrintRightByteOperand(byte* modrmp); + int PrintOperands(const char* mnem, + OperandOrder op_order, + byte* data); + int PrintImmediateOp(byte* data); + int F7Instruction(byte* data); + int D1D3C1Instruction(byte* data); + int JumpShort(byte* data); + int JumpConditional(byte* data); + int JumpConditionalShort(byte* data); + int SetCC(byte* data); + int FPUInstruction(byte* data); + void AppendToBuffer(const char* format, ...); + + void UnimplementedInstruction() { + if (abort_on_unimplemented_) { + UNIMPLEMENTED(); + } else { + AppendToBuffer("'Unimplemented Instruction'"); + } + } +}; + + +void DisassemblerX64::AppendToBuffer(const char* format, ...) { + v8::internal::Vector<char> buf = tmp_buffer_ + tmp_buffer_pos_; + va_list args; + va_start(args, format); + int result = v8::internal::OS::VSNPrintF(buf, format, args); + va_end(args); + tmp_buffer_pos_ += result; +} + + +int DisassemblerX64::PrintRightOperandHelper( + byte* modrmp, + RegisterNameMapping register_name) { + int mod, regop, rm; + get_modrm(*modrmp, &mod, ®op, &rm); + switch (mod) { + case 0: + if ((rm & 7) == 5) { + int32_t disp = *reinterpret_cast<int32_t*>(modrmp + 1); + AppendToBuffer("[0x%x]", disp); + return 5; + } else if ((rm & 7) == 4) { + // Codes for SIB byte. + byte sib = *(modrmp + 1); + int scale, index, base; + get_sib(sib, &scale, &index, &base); + if (index == 4 && (base & 7) == 4 && scale == 0 /*times_1*/) { + // index == rsp means no index. Only use sib byte with no index for + // rsp and r12 base. + AppendToBuffer("[%s]", (this->*register_name)(base)); + return 2; + } else if (base == 5) { + // base == rbp means no base register (when mod == 0). + int32_t disp = *reinterpret_cast<int32_t*>(modrmp + 2); + AppendToBuffer("[%s*%d+0x%x]", + (this->*register_name)(index), + 1 << scale, disp); + return 6; + } else if (index != 4 && base != 5) { + // [base+index*scale] + AppendToBuffer("[%s+%s*%d]", + (this->*register_name)(base), + (this->*register_name)(index), + 1 << scale); + return 2; + } else { + UnimplementedInstruction(); + return 1; + } + } else { + AppendToBuffer("[%s]", (this->*register_name)(rm)); + return 1; + } + break; + case 1: // fall through + case 2: + if ((rm & 7) == 4) { + byte sib = *(modrmp + 1); + int scale, index, base; + get_sib(sib, &scale, &index, &base); + int disp = (mod == 2) ? *reinterpret_cast<int32_t*>(modrmp + 2) + : *reinterpret_cast<char*>(modrmp + 2); + if (index == 4 && (base & 7) == 4 && scale == 0 /*times_1*/) { + if (-disp > 0) { + AppendToBuffer("[%s-0x%x]", (this->*register_name)(base), -disp); + } else { + AppendToBuffer("[%s+0x%x]", (this->*register_name)(base), disp); + } + } else { + if (-disp > 0) { + AppendToBuffer("[%s+%s*%d-0x%x]", + (this->*register_name)(base), + (this->*register_name)(index), + 1 << scale, + -disp); + } else { + AppendToBuffer("[%s+%s*%d+0x%x]", + (this->*register_name)(base), + (this->*register_name)(index), + 1 << scale, + disp); + } + } + return mod == 2 ? 6 : 3; + } else { + // No sib. + int disp = (mod == 2) ? *reinterpret_cast<int32_t*>(modrmp + 1) + : *reinterpret_cast<char*>(modrmp + 1); + if (-disp > 0) { + AppendToBuffer("[%s-0x%x]", (this->*register_name)(rm), -disp); + } else { + AppendToBuffer("[%s+0x%x]", (this->*register_name)(rm), disp); + } + return (mod == 2) ? 5 : 2; + } + break; + case 3: + AppendToBuffer("%s", (this->*register_name)(rm)); + return 1; + default: + UnimplementedInstruction(); + return 1; + } + UNREACHABLE(); +} + + +int DisassemblerX64::PrintRightOperand(byte* modrmp) { + return PrintRightOperandHelper(modrmp, + &DisassemblerX64::NameOfCPURegister); +} + + +int DisassemblerX64::PrintRightByteOperand(byte* modrmp) { + return PrintRightOperandHelper(modrmp, + &DisassemblerX64::NameOfByteCPURegister); } -Disassembler::~Disassembler() { - UNIMPLEMENTED(); +// Returns number of bytes used including the current *data. +// Writes instruction's mnemonic, left and right operands to 'tmp_buffer_'. +int DisassemblerX64::PrintOperands(const char* mnem, + OperandOrder op_order, + byte* data) { + byte modrm = *data; + int mod, regop, rm; + get_modrm(modrm, &mod, ®op, &rm); + int advance = 0; + switch (op_order) { + case REG_OPER_OP_ORDER: { + AppendToBuffer("%s%c %s,", + mnem, + operand_size_code(), + NameOfCPURegister(regop)); + advance = PrintRightOperand(data); + break; + } + case OPER_REG_OP_ORDER: { + AppendToBuffer("%s%c ", mnem, operand_size_code()); + advance = PrintRightOperand(data); + AppendToBuffer(",%s", NameOfCPURegister(regop)); + break; + } + default: + UNREACHABLE(); + break; + } + return advance; } -const char* NameConverter::NameOfAddress(unsigned char* addr) const { - UNIMPLEMENTED(); - return NULL; +// Returns number of bytes used by machine instruction, including *data byte. +// Writes immediate instructions to 'tmp_buffer_'. +int DisassemblerX64::PrintImmediateOp(byte* data) { + bool sign_extension_bit = (*data & 0x02) != 0; + byte modrm = *(data + 1); + int mod, regop, rm; + get_modrm(modrm, &mod, ®op, &rm); + const char* mnem = "Imm???"; + switch (regop) { + case 0: + mnem = "add"; + break; + case 1: + mnem = "or"; + break; + case 2: + mnem = "adc"; + break; + case 4: + mnem = "and"; + break; + case 5: + mnem = "sub"; + break; + case 6: + mnem = "xor"; + break; + case 7: + mnem = "cmp"; + break; + default: + UnimplementedInstruction(); + } + AppendToBuffer("%s ", mnem); + int count = PrintRightOperand(data + 1); + if (sign_extension_bit) { + AppendToBuffer(",0x%x", *(data + 1 + count)); + return 1 + count + 1 /*int8*/; + } else { + AppendToBuffer(",0x%x", *reinterpret_cast<int32_t*>(data + 1 + count)); + return 1 + count + 4 /*int32_t*/; + } +} + + +// Returns number of bytes used, including *data. +int DisassemblerX64::F7Instruction(byte* data) { + assert(*data == 0xF7); + byte modrm = *(data + 1); + int mod, regop, rm; + get_modrm(modrm, &mod, ®op, &rm); + if (mod == 3 && regop != 0) { + const char* mnem = NULL; + switch (regop) { + case 2: + mnem = "not"; + break; + case 3: + mnem = "neg"; + break; + case 4: + mnem = "mul"; + break; + case 7: + mnem = "idiv"; + break; + default: + UnimplementedInstruction(); + } + AppendToBuffer("%s%c %s", + mnem, + operand_size_code(), + NameOfCPURegister(rm)); + return 2; + } else if (mod == 3 && regop == 0) { + int32_t imm = *reinterpret_cast<int32_t*>(data + 2); + AppendToBuffer("test%c %s,0x%x", + operand_size_code(), + NameOfCPURegister(rm), + imm); + return 6; + } else if (regop == 0) { + AppendToBuffer("test%c ", operand_size_code()); + int count = PrintRightOperand(data + 1); + int32_t imm = *reinterpret_cast<int32_t*>(data + 1 + count); + AppendToBuffer(",0x%x", imm); + return 1 + count + 4 /*int32_t*/; + } else { + UnimplementedInstruction(); + return 2; + } +} + + +int DisassemblerX64::D1D3C1Instruction(byte* data) { + byte op = *data; + assert(op == 0xD1 || op == 0xD3 || op == 0xC1); + byte modrm = *(data + 1); + int mod, regop, rm; + get_modrm(modrm, &mod, ®op, &rm); + ASSERT(regop < 8); + int imm8 = -1; + int num_bytes = 2; + if (mod == 3) { + const char* mnem = NULL; + if (op == 0xD1) { + imm8 = 1; + switch (regop) { + case 2: + mnem = "rcl"; + break; + case 7: + mnem = "sar"; + break; + case 4: + mnem = "shl"; + break; + default: + UnimplementedInstruction(); + } + } else if (op == 0xC1) { + imm8 = *(data + 2); + num_bytes = 3; + switch (regop) { + case 2: + mnem = "rcl"; + break; + case 4: + mnem = "shl"; + break; + case 5: + mnem = "shr"; + break; + case 7: + mnem = "sar"; + break; + default: + UnimplementedInstruction(); + } + } else if (op == 0xD3) { + switch (regop) { + case 4: + mnem = "shl"; + break; + case 5: + mnem = "shr"; + break; + case 7: + mnem = "sar"; + break; + default: + UnimplementedInstruction(); + } + } + assert(mnem != NULL); + AppendToBuffer("%s%c %s,", + mnem, + operand_size_code(), + NameOfCPURegister(rm)); + if (imm8 > 0) { + AppendToBuffer("%d", imm8); + } else { + AppendToBuffer("cl"); + } + } else { + UnimplementedInstruction(); + } + return num_bytes; +} + + +// Returns number of bytes used, including *data. +int DisassemblerX64::JumpShort(byte* data) { + assert(*data == 0xEB); + byte b = *(data + 1); + byte* dest = data + static_cast<int8_t>(b) + 2; + AppendToBuffer("jmp %s", NameOfAddress(dest)); + return 2; +} + + +// Returns number of bytes used, including *data. +int DisassemblerX64::JumpConditional(byte* data) { + assert(*data == 0x0F); + byte cond = *(data + 1) & 0x0F; + byte* dest = data + *reinterpret_cast<int32_t*>(data + 2) + 6; + const char* mnem = conditional_code_suffix[cond]; + AppendToBuffer("j%s %s", mnem, NameOfAddress(dest)); + return 6; // includes 0x0F +} + + +// Returns number of bytes used, including *data. +int DisassemblerX64::JumpConditionalShort(byte* data) { + byte cond = *data & 0x0F; + byte b = *(data + 1); + byte* dest = data + static_cast<int8_t>(b) + 2; + const char* mnem = conditional_code_suffix[cond]; + AppendToBuffer("j%s %s", mnem, NameOfAddress(dest)); + return 2; +} + + +// Returns number of bytes used, including *data. +int DisassemblerX64::SetCC(byte* data) { + assert(*data == 0x0F); + byte cond = *(data + 1) & 0x0F; + const char* mnem = conditional_code_suffix[cond]; + AppendToBuffer("set%s%c ", mnem, operand_size_code()); + PrintRightByteOperand(data + 2); + return 3; // includes 0x0F +} + + +// Returns number of bytes used, including *data. +int DisassemblerX64::FPUInstruction(byte* data) { + byte b1 = *data; + byte b2 = *(data + 1); + if (b1 == 0xD9) { + const char* mnem = NULL; + switch (b2) { + case 0xE8: + mnem = "fld1"; + break; + case 0xEE: + mnem = "fldz"; + break; + case 0xE1: + mnem = "fabs"; + break; + case 0xE0: + mnem = "fchs"; + break; + case 0xF8: + mnem = "fprem"; + break; + case 0xF5: + mnem = "fprem1"; + break; + case 0xF7: + mnem = "fincstp"; + break; + case 0xE4: + mnem = "ftst"; + break; + } + if (mnem != NULL) { + AppendToBuffer("%s", mnem); + return 2; + } else if ((b2 & 0xF8) == 0xC8) { + AppendToBuffer("fxch st%d", b2 & 0x7); + return 2; + } else { + int mod, regop, rm; + get_modrm(*(data + 1), &mod, ®op, &rm); + const char* mnem = "?"; + switch (regop) { + case 0: + mnem = "fld_s"; + break; + case 3: + mnem = "fstp_s"; + break; + default: + UnimplementedInstruction(); + } + AppendToBuffer("%s ", mnem); + int count = PrintRightOperand(data + 1); + return count + 1; + } + } else if (b1 == 0xDD) { + if ((b2 & 0xF8) == 0xC0) { + AppendToBuffer("ffree st%d", b2 & 0x7); + return 2; + } else { + int mod, regop, rm; + get_modrm(*(data + 1), &mod, ®op, &rm); + const char* mnem = "?"; + switch (regop) { + case 0: + mnem = "fld_d"; + break; + case 3: + mnem = "fstp_d"; + break; + default: + UnimplementedInstruction(); + } + AppendToBuffer("%s ", mnem); + int count = PrintRightOperand(data + 1); + return count + 1; + } + } else if (b1 == 0xDB) { + int mod, regop, rm; + get_modrm(*(data + 1), &mod, ®op, &rm); + const char* mnem = "?"; + switch (regop) { + case 0: + mnem = "fild_s"; + break; + case 2: + mnem = "fist_s"; + break; + case 3: + mnem = "fistp_s"; + break; + default: + UnimplementedInstruction(); + } + AppendToBuffer("%s ", mnem); + int count = PrintRightOperand(data + 1); + return count + 1; + } else if (b1 == 0xDF) { + if (b2 == 0xE0) { + AppendToBuffer("fnstsw_ax"); + return 2; + } + int mod, regop, rm; + get_modrm(*(data + 1), &mod, ®op, &rm); + const char* mnem = "?"; + switch (regop) { + case 5: + mnem = "fild_d"; + break; + case 7: + mnem = "fistp_d"; + break; + default: + UnimplementedInstruction(); + } + AppendToBuffer("%s ", mnem); + int count = PrintRightOperand(data + 1); + return count + 1; + } else if (b1 == 0xDC || b1 == 0xDE) { + bool is_pop = (b1 == 0xDE); + if (is_pop && b2 == 0xD9) { + AppendToBuffer("fcompp"); + return 2; + } + const char* mnem = "FP0xDC"; + switch (b2 & 0xF8) { + case 0xC0: + mnem = "fadd"; + break; + case 0xE8: + mnem = "fsub"; + break; + case 0xC8: + mnem = "fmul"; + break; + case 0xF8: + mnem = "fdiv"; + break; + default: + UnimplementedInstruction(); + } + AppendToBuffer("%s%s st%d", mnem, is_pop ? "p" : "", b2 & 0x7); + return 2; + } else if (b1 == 0xDA && b2 == 0xE9) { + const char* mnem = "fucompp"; + AppendToBuffer("%s", mnem); + return 2; + } + AppendToBuffer("Unknown FP instruction"); + return 2; +} + +// Mnemonics for instructions 0xF0 byte. +// Returns NULL if the instruction is not handled here. +static const char* F0Mnem(byte f0byte) { + switch (f0byte) { + case 0x1F: + return "nop"; + case 0x31: + return "rdtsc"; + case 0xA2: + return "cpuid"; + case 0xBE: + return "movsxb"; + case 0xBF: + return "movsxw"; + case 0xB6: + return "movzxb"; + case 0xB7: + return "movzxw"; + case 0xAF: + return "imul"; + case 0xA5: + return "shld"; + case 0xAD: + return "shrd"; + case 0xAB: + return "bts"; + default: + return NULL; + } +} + +// Disassembled instruction '*instr' and writes it into 'out_buffer'. +int DisassemblerX64::InstructionDecode(v8::internal::Vector<char> out_buffer, + byte* instr) { + tmp_buffer_pos_ = 0; // starting to write as position 0 + byte* data = instr; + bool processed = true; // Will be set to false if the current instruction + // is not in 'instructions' table. + byte current; + + // Scan for prefixes. + while (true) { + current = *data; + if (current == 0x66) { + setOperandSizePrefix(current); + data++; + } else if ((current & 0xF0) == 0x40) { + setRex(current); + if (rex_w()) AppendToBuffer("REX.W "); + data++; + } else { + break; + } + } + + const InstructionDesc& idesc = instruction_table.Get(current); + switch (idesc.type) { + case ZERO_OPERANDS_INSTR: + AppendToBuffer(idesc.mnem); + data++; + break; + + case TWO_OPERANDS_INSTR: + data++; + data += PrintOperands(idesc.mnem, idesc.op_order_, data); + break; + + case JUMP_CONDITIONAL_SHORT_INSTR: + data += JumpConditionalShort(data); + break; + + case REGISTER_INSTR: + AppendToBuffer("%s%c %s", + idesc.mnem, + operand_size_code(), + NameOfCPURegister(base_reg(current & 0x07))); + data++; + break; + case PUSHPOP_INSTR: + AppendToBuffer("%s %s", + idesc.mnem, + NameOfCPURegister(base_reg(current & 0x07))); + data++; + break; + case MOVE_REG_INSTR: { + byte* addr = NULL; + switch (operand_size()) { + case 16: + addr = reinterpret_cast<byte*>(*reinterpret_cast<int16_t*>(data + 1)); + data += 3; + break; + case 32: + addr = reinterpret_cast<byte*>(*reinterpret_cast<int32_t*>(data + 1)); + data += 5; + break; + case 64: + addr = reinterpret_cast<byte*>(*reinterpret_cast<int64_t*>(data + 1)); + data += 9; + break; + default: + UNREACHABLE(); + } + AppendToBuffer("mov%c %s,%s", + operand_size_code(), + NameOfCPURegister(base_reg(current & 0x07)), + NameOfAddress(addr)); + break; + } + + case CALL_JUMP_INSTR: { + byte* addr = data + *reinterpret_cast<int32_t*>(data + 1) + 5; + AppendToBuffer("%s %s", idesc.mnem, NameOfAddress(addr)); + data += 5; + break; + } + + case SHORT_IMMEDIATE_INSTR: { + byte* addr = + reinterpret_cast<byte*>(*reinterpret_cast<int32_t*>(data + 1)); + AppendToBuffer("%s rax, %s", idesc.mnem, NameOfAddress(addr)); + data += 5; + break; + } + + case NO_INSTR: + processed = false; + break; + + default: + UNIMPLEMENTED(); // This type is not implemented. + } + + // The first byte didn't match any of the simple opcodes, so we + // need to do special processing on it. + if (!processed) { + switch (*data) { + case 0xC2: + AppendToBuffer("ret 0x%x", *reinterpret_cast<uint16_t*>(data + 1)); + data += 3; + break; + + case 0x69: // fall through + case 0x6B: { + int mod, regop, rm; + get_modrm(*(data + 1), &mod, ®op, &rm); + int32_t imm = *data == 0x6B ? *(data + 2) + : *reinterpret_cast<int32_t*>(data + 2); + AppendToBuffer("imul %s,%s,0x%x", NameOfCPURegister(regop), + NameOfCPURegister(rm), imm); + data += 2 + (*data == 0x6B ? 1 : 4); + } + break; + + case 0xF6: { + int mod, regop, rm; + get_modrm(*(data + 1), &mod, ®op, &rm); + if (mod == 3 && regop == 0) { + AppendToBuffer("testb %s,%d", NameOfCPURegister(rm), *(data + 2)); + } else { + UnimplementedInstruction(); + } + data += 3; + } + break; + + case 0x81: // fall through + case 0x83: // 0x81 with sign extension bit set + data += PrintImmediateOp(data); + break; + + case 0x0F: { + byte f0byte = *(data + 1); + const char* f0mnem = F0Mnem(f0byte); + if (f0byte == 0x1F) { + data += 1; + byte modrm = *data; + data += 1; + if (((modrm >> 3) & 7) == 4) { + // SIB byte present. + data += 1; + } + int mod = modrm >> 6; + if (mod == 1) { + // Byte displacement. + data += 1; + } else if (mod == 2) { + // 32-bit displacement. + data += 4; + } + AppendToBuffer("nop"); + } else if (f0byte == 0xA2 || f0byte == 0x31) { + AppendToBuffer("%s", f0mnem); + data += 2; + } else if ((f0byte & 0xF0) == 0x80) { + data += JumpConditional(data); + } else if (f0byte == 0xBE || f0byte == 0xBF || f0byte == 0xB6 || f0byte + == 0xB7 || f0byte == 0xAF) { + data += 2; + data += PrintOperands(f0mnem, REG_OPER_OP_ORDER, data); + } else if ((f0byte & 0xF0) == 0x90) { + data += SetCC(data); + } else { + data += 2; + if (f0byte == 0xAB || f0byte == 0xA5 || f0byte == 0xAD) { + // shrd, shld, bts + AppendToBuffer("%s ", f0mnem); + int mod, regop, rm; + get_modrm(*data, &mod, ®op, &rm); + data += PrintRightOperand(data); + if (f0byte == 0xAB) { + AppendToBuffer(",%s", NameOfCPURegister(regop)); + } else { + AppendToBuffer(",%s,cl", NameOfCPURegister(regop)); + } + } else { + UnimplementedInstruction(); + } + } + } + break; + + case 0x8F: { + data++; + int mod, regop, rm; + get_modrm(*data, &mod, ®op, &rm); + if (regop == 0) { + AppendToBuffer("pop "); + data += PrintRightOperand(data); + } + } + break; + + case 0xFF: { + data++; + int mod, regop, rm; + get_modrm(*data, &mod, ®op, &rm); + const char* mnem = NULL; + switch (regop) { + case 0: + mnem = "inc"; + break; + case 1: + mnem = "dec"; + break; + case 2: + mnem = "call"; + break; + case 4: + mnem = "jmp"; + break; + case 6: + mnem = "push"; + break; + default: + mnem = "???"; + } + AppendToBuffer(((regop <= 1) ? "%s%c " : "%s "), + mnem, + operand_size_code()); + data += PrintRightOperand(data); + } + break; + + case 0xC7: // imm32, fall through + case 0xC6: // imm8 + { + bool is_byte = *data == 0xC6; + data++; + + AppendToBuffer("mov%c ", is_byte ? 'b' : operand_size_code()); + data += PrintRightOperand(data); + int32_t imm = is_byte ? *data : *reinterpret_cast<int32_t*>(data); + AppendToBuffer(",0x%x", imm); + data += is_byte ? 1 : 4; + } + break; + + case 0x80: { + data++; + AppendToBuffer("cmpb "); + data += PrintRightOperand(data); + int32_t imm = *data; + AppendToBuffer(",0x%x", imm); + data++; + } + break; + + case 0x88: // 8bit, fall through + case 0x89: // 32bit + { + bool is_byte = *data == 0x88; + int mod, regop, rm; + data++; + get_modrm(*data, &mod, ®op, &rm); + AppendToBuffer("mov%c ", is_byte ? 'b' : operand_size_code()); + data += PrintRightOperand(data); + AppendToBuffer(",%s", NameOfCPURegister(regop)); + } + break; + + case 0x90: + case 0x91: + case 0x92: + case 0x93: + case 0x94: + case 0x95: + case 0x96: + case 0x97: { + int reg = current & 0x7 | (rex_b() ? 8 : 0); + if (reg == 0) { + AppendToBuffer("nop"); // Common name for xchg rax,rax. + } else { + AppendToBuffer("xchg%c rax, %s", + operand_size_code(), + NameOfByteCPURegister(reg)); + } + } + + + case 0xFE: { + data++; + int mod, regop, rm; + get_modrm(*data, &mod, ®op, &rm); + if (mod == 3 && regop == 1) { + AppendToBuffer("decb %s", NameOfCPURegister(rm)); + } else { + UnimplementedInstruction(); + } + data++; + } + break; + + case 0x68: + AppendToBuffer("push 0x%x", *reinterpret_cast<int32_t*>(data + 1)); + data += 5; + break; + + case 0x6A: + AppendToBuffer("push 0x%x", *reinterpret_cast<int8_t*>(data + 1)); + data += 2; + break; + + case 0xA8: + AppendToBuffer("test al,0x%x", *reinterpret_cast<uint8_t*>(data + 1)); + data += 2; + break; + + case 0xA9: + AppendToBuffer("test%c rax,0x%x", // CHECKME! + operand_size_code(), + *reinterpret_cast<int32_t*>(data + 1)); + data += 5; + break; + + case 0xD1: // fall through + case 0xD3: // fall through + case 0xC1: + data += D1D3C1Instruction(data); + break; + + case 0xD9: // fall through + case 0xDA: // fall through + case 0xDB: // fall through + case 0xDC: // fall through + case 0xDD: // fall through + case 0xDE: // fall through + case 0xDF: + data += FPUInstruction(data); + break; + + case 0xEB: + data += JumpShort(data); + break; + + case 0xF2: + if (*(data + 1) == 0x0F) { + byte b2 = *(data + 2); + if (b2 == 0x11) { + AppendToBuffer("movsd "); + data += 3; + int mod, regop, rm; + get_modrm(*data, &mod, ®op, &rm); + data += PrintRightOperand(data); + AppendToBuffer(",%s", NameOfXMMRegister(regop)); + } else if (b2 == 0x10) { + data += 3; + int mod, regop, rm; + get_modrm(*data, &mod, ®op, &rm); + AppendToBuffer("movsd %s,", NameOfXMMRegister(regop)); + data += PrintRightOperand(data); + } else { + const char* mnem = "?"; + switch (b2) { + case 0x2A: + mnem = "cvtsi2sd"; + break; + case 0x58: + mnem = "addsd"; + break; + case 0x59: + mnem = "mulsd"; + break; + case 0x5C: + mnem = "subsd"; + break; + case 0x5E: + mnem = "divsd"; + break; + } + data += 3; + int mod, regop, rm; + get_modrm(*data, &mod, ®op, &rm); + if (b2 == 0x2A) { + AppendToBuffer("%s %s,", mnem, NameOfXMMRegister(regop)); + data += PrintRightOperand(data); + } else { + AppendToBuffer("%s %s,%s", mnem, NameOfXMMRegister(regop), + NameOfXMMRegister(rm)); + data++; + } + } + } else { + UnimplementedInstruction(); + } + break; + + case 0xF3: + if (*(data + 1) == 0x0F && *(data + 2) == 0x2C) { + data += 3; + data += PrintOperands("cvttss2si", REG_OPER_OP_ORDER, data); + } else { + UnimplementedInstruction(); + } + break; + + case 0xF7: + data += F7Instruction(data); + break; + + default: + UnimplementedInstruction(); + } + } // !processed + + if (tmp_buffer_pos_ < sizeof tmp_buffer_) { + tmp_buffer_[tmp_buffer_pos_] = '\0'; + } + + int instr_len = data - instr; + ASSERT(instr_len > 0); // Ensure progress. + + int outp = 0; + // Instruction bytes. + for (byte* bp = instr; bp < data; bp++) { + outp += v8::internal::OS::SNPrintF(out_buffer + outp, "%02x", *bp); + } + for (int i = 6 - instr_len; i >= 0; i--) { + outp += v8::internal::OS::SNPrintF(out_buffer + outp, " "); + } + + outp += v8::internal::OS::SNPrintF(out_buffer + outp, " %s", + tmp_buffer_.start()); + return instr_len; +} + +//------------------------------------------------------------------------------ + + +static const char* cpu_regs[16] = { + "rax", "rcx", "rdx", "rbx", "rsp", "rbp", "rsi", "rdi", + "r8", "r9", "r10", "r11", "r12", "r13", "r14", "r15" +}; + + +static const char* byte_cpu_regs[16] = { + "al", "cl", "dl", "bl", "spl", "bpl", "sil", "dil", + "r8l", "r9l", "r10l", "r11l", "r12l", "r13l", "r14l", "r15l" +}; + + +static const char* xmm_regs[16] = { + "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5", "xmm6", "xmm7", + "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13", "xmm14", "xmm15" +}; + + +const char* NameConverter::NameOfAddress(byte* addr) const { + static v8::internal::EmbeddedVector<char, 32> tmp_buffer; + v8::internal::OS::SNPrintF(tmp_buffer, "%p", addr); + return tmp_buffer.start(); +} + + +const char* NameConverter::NameOfConstant(byte* addr) const { + return NameOfAddress(addr); } const char* NameConverter::NameOfCPURegister(int reg) const { - UNIMPLEMENTED(); - return NULL; + if (0 <= reg && reg < 16) + return cpu_regs[reg]; + return "noreg"; } -int Disassembler::ConstantPoolSizeAt(unsigned char* addr) { - UNIMPLEMENTED(); - return 0; +const char* NameConverter::NameOfByteCPURegister(int reg) const { + if (0 <= reg && reg < 16) + return byte_cpu_regs[reg]; + return "noreg"; } -int Disassembler::InstructionDecode(v8::internal::Vector<char> buffer, - unsigned char* instruction) { - UNIMPLEMENTED(); - return 0; +const char* NameConverter::NameOfXMMRegister(int reg) const { + if (0 <= reg && reg < 16) + return xmm_regs[reg]; + return "noxmmreg"; } -const char* NameConverter::NameOfByteCPURegister(int a) const { - UNIMPLEMENTED(); - return NULL; + +const char* NameConverter::NameInCode(byte* addr) const { + // X64 does not embed debug strings at the moment. + UNREACHABLE(); + return ""; } -const char* NameConverter::NameOfXMMRegister(int a) const { - UNIMPLEMENTED(); - return NULL; +//------------------------------------------------------------------------------ + +Disassembler::Disassembler(const NameConverter& converter) + : converter_(converter) { } + +Disassembler::~Disassembler() { } + + +int Disassembler::InstructionDecode(v8::internal::Vector<char> buffer, + byte* instruction) { + DisassemblerX64 d(converter_, CONTINUE_ON_UNIMPLEMENTED_OPCODE); + return d.InstructionDecode(buffer, instruction); } -const char* NameConverter::NameOfConstant(unsigned char* a) const { - UNIMPLEMENTED(); - return NULL; + +// The X64 assembler does not use constant pools. +int Disassembler::ConstantPoolSizeAt(byte* instruction) { + return -1; } -const char* NameConverter::NameInCode(unsigned char* a) const { - UNIMPLEMENTED(); - return NULL; + +void Disassembler::Disassemble(FILE* f, byte* begin, byte* end) { + NameConverter converter; + Disassembler d(converter); + for (byte* pc = begin; pc < end;) { + v8::internal::EmbeddedVector<char, 128> buffer; + buffer[0] = '\0'; + byte* prev_pc = pc; + pc += d.InstructionDecode(buffer, pc); + fprintf(f, "%p", prev_pc); + fprintf(f, " "); + + for (byte* bp = prev_pc; bp < pc; bp++) { + fprintf(f, "%02x", *bp); + } + for (int i = 6 - (pc - prev_pc); i >= 0; i--) { + fprintf(f, " "); + } + fprintf(f, " %s\n", buffer.start()); + } } } // namespace disasm diff --git a/V8Binding/v8/src/x64/ic-x64.cc b/V8Binding/v8/src/x64/ic-x64.cc index abaffb3..7b8699f 100644 --- a/V8Binding/v8/src/x64/ic-x64.cc +++ b/V8Binding/v8/src/x64/ic-x64.cc @@ -212,11 +212,9 @@ void CallIC::Generate(MacroAssembler* masm, __ movq(rdx, Operand(rsp, (argc + 1) * kPointerSize)); // receiver __ testl(rdx, Immediate(kSmiTagMask)); __ j(zero, &invoke); - __ movq(rcx, FieldOperand(rdx, HeapObject::kMapOffset)); - __ movzxbq(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset)); - __ cmpq(rcx, Immediate(static_cast<int8_t>(JS_GLOBAL_OBJECT_TYPE))); + __ CmpObjectType(rdx, JS_GLOBAL_OBJECT_TYPE, rcx); __ j(equal, &global); - __ cmpq(rcx, Immediate(static_cast<int8_t>(JS_BUILTINS_OBJECT_TYPE))); + __ CmpInstanceType(rcx, JS_BUILTINS_OBJECT_TYPE); __ j(not_equal, &invoke); // Patch the receiver on the stack. diff --git a/V8Binding/v8/src/x64/macro-assembler-x64.cc b/V8Binding/v8/src/x64/macro-assembler-x64.cc index 1a0b119..099a461 100644 --- a/V8Binding/v8/src/x64/macro-assembler-x64.cc +++ b/V8Binding/v8/src/x64/macro-assembler-x64.cc @@ -176,7 +176,7 @@ void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) { const char* name = Builtins::GetName(id); int argc = Builtins::GetArgumentsCount(id); - movq(target, code, RelocInfo::EXTERNAL_REFERENCE); // Is external reference? + movq(target, code, RelocInfo::EMBEDDED_OBJECT); if (!resolved) { uint32_t flags = Bootstrapper::FixupFlagsArgumentsCount::encode(argc) | @@ -208,7 +208,9 @@ Handle<Code> MacroAssembler::ResolveBuiltin(Builtins::JavaScript id, void MacroAssembler::Set(Register dst, int64_t x) { - if (is_int32(x)) { + if (x == 0) { + xor_(dst, dst); + } else if (is_int32(x)) { movq(dst, Immediate(x)); } else if (is_uint32(x)) { movl(dst, Immediate(x)); @@ -219,14 +221,17 @@ void MacroAssembler::Set(Register dst, int64_t x) { void MacroAssembler::Set(const Operand& dst, int64_t x) { - if (is_int32(x)) { - movq(kScratchRegister, Immediate(x)); + if (x == 0) { + xor_(kScratchRegister, kScratchRegister); + movq(dst, kScratchRegister); + } else if (is_int32(x)) { + movq(dst, Immediate(x)); } else if (is_uint32(x)) { - movl(kScratchRegister, Immediate(x)); + movl(dst, Immediate(x)); } else { movq(kScratchRegister, x, RelocInfo::NONE); + movq(dst, kScratchRegister); } - movq(dst, kScratchRegister); } @@ -240,11 +245,13 @@ void MacroAssembler::LoadUnsafeSmi(Register dst, Smi* source) { void MacroAssembler::Move(Register dst, Handle<Object> source) { + ASSERT(!source->IsFailure()); if (source->IsSmi()) { if (IsUnsafeSmi(source)) { LoadUnsafeSmi(dst, source); } else { - movq(dst, source, RelocInfo::NONE); + int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source)); + movq(dst, Immediate(smi)); } } else { movq(dst, source, RelocInfo::EMBEDDED_OBJECT); @@ -253,8 +260,13 @@ void MacroAssembler::Move(Register dst, Handle<Object> source) { void MacroAssembler::Move(const Operand& dst, Handle<Object> source) { - Move(kScratchRegister, source); - movq(dst, kScratchRegister); + if (source->IsSmi()) { + int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source)); + movq(dst, Immediate(smi)); + } else { + movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT); + movq(dst, kScratchRegister); + } } @@ -265,14 +277,37 @@ void MacroAssembler::Cmp(Register dst, Handle<Object> source) { void MacroAssembler::Cmp(const Operand& dst, Handle<Object> source) { - Move(kScratchRegister, source); - cmpq(dst, kScratchRegister); + if (source->IsSmi()) { + if (IsUnsafeSmi(source)) { + LoadUnsafeSmi(kScratchRegister, source); + cmpl(dst, kScratchRegister); + } else { + // For smi-comparison, it suffices to compare the low 32 bits. + int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source)); + cmpl(dst, Immediate(smi)); + } + } else { + ASSERT(source->IsHeapObject()); + movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT); + cmpq(dst, kScratchRegister); + } } void MacroAssembler::Push(Handle<Object> source) { - Move(kScratchRegister, source); - push(kScratchRegister); + if (source->IsSmi()) { + if (IsUnsafeSmi(source)) { + LoadUnsafeSmi(kScratchRegister, source); + push(kScratchRegister); + } else { + int32_t smi = static_cast<int32_t>(reinterpret_cast<intptr_t>(*source)); + push(Immediate(smi)); + } + } else { + ASSERT(source->IsHeapObject()); + movq(kScratchRegister, source, RelocInfo::EMBEDDED_OBJECT); + push(kScratchRegister); + } } @@ -589,7 +624,7 @@ void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id, InvokeFlag flag) { if (!resolved) { uint32_t flags = Bootstrapper::FixupFlagsArgumentsCount::encode(argc) | - Bootstrapper::FixupFlagsIsPCRelative::encode(true) | + Bootstrapper::FixupFlagsIsPCRelative::encode(false) | Bootstrapper::FixupFlagsUseCodeObject::encode(false); Unresolved entry = { pc_offset() - kTargetAddrToReturnAddrDist, flags, name }; diff --git a/V8Binding/v8/src/x64/stub-cache-x64.cc b/V8Binding/v8/src/x64/stub-cache-x64.cc index 1a24694..c577615 100644 --- a/V8Binding/v8/src/x64/stub-cache-x64.cc +++ b/V8Binding/v8/src/x64/stub-cache-x64.cc @@ -42,7 +42,8 @@ namespace internal { Object* CallStubCompiler::CompileCallConstant(Object* a, JSObject* b, JSFunction* c, - StubCompiler::CheckType d) { + String* d, + StubCompiler::CheckType e) { UNIMPLEMENTED(); return NULL; } @@ -65,7 +66,8 @@ Object* CallStubCompiler::CompileCallInterceptor(Object* a, -Object* CallStubCompiler::CompileCallGlobal(GlobalObject* object, +Object* CallStubCompiler::CompileCallGlobal(JSObject* object, + GlobalObject* holder, JSGlobalPropertyCell* cell, JSFunction* function, String* name) { @@ -109,7 +111,8 @@ Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* a, } -Object* LoadStubCompiler::CompileLoadGlobal(GlobalObject* object, +Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object, + GlobalObject* holder, JSGlobalPropertyCell* cell, String* name, bool is_dont_delete) { diff --git a/V8Binding/v8/test/cctest/cctest.status b/V8Binding/v8/test/cctest/cctest.status index 68aabb5..bb82fc8 100644 --- a/V8Binding/v8/test/cctest/cctest.status +++ b/V8Binding/v8/test/cctest/cctest.status @@ -60,3 +60,65 @@ test-log/ProfLazyMode: SKIP # the JavaScript stacks are separate. test-api/ExceptionOrder: FAIL test-api/TryCatchInTryFinally: FAIL + + +[ $arch == x64 ] +test-regexp/Graph: CRASH || FAIL +test-decls/Present: CRASH || FAIL +test-decls/Unknown: CRASH || FAIL +test-decls/Appearing: CRASH || FAIL +test-decls/Absent: CRASH || FAIL +test-debug/DebugStub: CRASH || FAIL +test-decls/AbsentInPrototype: CRASH || FAIL +test-decls/Reappearing: CRASH || FAIL +test-debug/DebugInfo: CRASH || FAIL +test-decls/ExistsInPrototype: CRASH || FAIL +test-debug/BreakPointICStore: CRASH || FAIL +test-debug/BreakPointICLoad: CRASH || FAIL +test-debug/BreakPointICCall: CRASH || FAIL +test-debug/BreakPointReturn: CRASH || FAIL +test-debug/GCDuringBreakPointProcessing: CRASH || FAIL +test-debug/BreakPointSurviveGC: CRASH || FAIL +test-debug/BreakPointThroughJavaScript: CRASH || FAIL +test-debug/ScriptBreakPointByNameThroughJavaScript: CRASH || FAIL +test-debug/ScriptBreakPointByIdThroughJavaScript: CRASH || FAIL +test-debug/EnableDisableScriptBreakPoint: CRASH || FAIL +test-debug/ConditionalScriptBreakPoint: CRASH || FAIL +test-debug/ScriptBreakPointIgnoreCount: CRASH || FAIL +test-debug/ScriptBreakPointReload: CRASH || FAIL +test-debug/ScriptBreakPointMultiple: CRASH || FAIL +test-debug/RemoveBreakPointInBreak: CRASH || FAIL +test-debug/DebugEvaluate: CRASH || FAIL +test-debug/ScriptBreakPointLine: CRASH || FAIL +test-debug/ScriptBreakPointLineOffset: CRASH || FAIL +test-debug/DebugStepLinear: CRASH || FAIL +test-debug/DebugStepKeyedLoadLoop: CRASH || FAIL +test-debug/DebugStepKeyedStoreLoop: CRASH || FAIL +test-debug/DebugStepLinearMixedICs: CRASH || FAIL +test-debug/DebugStepFor: CRASH || FAIL +test-debug/DebugStepIf: CRASH || FAIL +test-debug/DebugStepSwitch: CRASH || FAIL +test-debug/StepInOutSimple: CRASH || FAIL +test-debug/StepInOutBranch: CRASH || FAIL +test-debug/StepInOutTree: CRASH || FAIL +test-debug/DebugStepNatives: CRASH || FAIL +test-debug/DebugStepFunctionApply: CRASH || FAIL +test-debug/DebugStepFunctionCall: CRASH || FAIL +test-debug/StepWithException: CRASH || FAIL +test-debug/DebugBreak: CRASH || FAIL +test-debug/DisableBreak: CRASH || FAIL +test-debug/MessageQueues: CRASH || FAIL +test-debug/CallFunctionInDebugger: CRASH || FAIL +test-debug/RecursiveBreakpoints: CRASH || FAIL +test-debug/DebuggerUnload: CRASH || FAIL +test-debug/DebuggerClearMessageHandler: CRASH || FAIL +test-debug/DebuggerClearMessageHandlerWhileActive: CRASH || FAIL +test-debug/DebuggerHostDispatch: CRASH || FAIL +test-debug/DebugBreakInMessageHandler: CRASH || FAIL +test-api/HugeConsStringOutOfMemory: CRASH || FAIL +test-api/OutOfMemory: CRASH || FAIL +test-api/OutOfMemoryNested: CRASH || FAIL +test-api/Threading: CRASH || FAIL +test-api/TryCatchSourceInfo: CRASH || FAIL +test-api/RegExpInterruption: CRASH || FAIL +test-api/RegExpStringModification: CRASH || FAIL diff --git a/V8Binding/v8/test/cctest/test-api.cc b/V8Binding/v8/test/cctest/test-api.cc index e77238b..5b04b2c 100644 --- a/V8Binding/v8/test/cctest/test-api.cc +++ b/V8Binding/v8/test/cctest/test-api.cc @@ -6962,6 +6962,28 @@ THREADED_TEST(ForceDeleteWithInterceptor) { } +// Make sure that forcing a delete invalidates any IC stubs, so we +// don't read the hole value. +THREADED_TEST(ForceDeleteIC) { + v8::HandleScope scope; + LocalContext context; + // Create a DontDelete variable on the global object. + CompileRun("this.__proto__ = { foo: 'horse' };" + "var foo = 'fish';" + "function f() { return foo.length; }"); + // Initialize the IC for foo in f. + CompileRun("for (var i = 0; i < 4; i++) f();"); + // Make sure the value of foo is correct before the deletion. + CHECK_EQ(4, CompileRun("f()")->Int32Value()); + // Force the deletion of foo. + CHECK(context->Global()->ForceDelete(v8_str("foo"))); + // Make sure the value for foo is read from the prototype, and that + // we don't get in trouble with reading the deleted cell value + // sentinel. + CHECK_EQ(5, CompileRun("f()")->Int32Value()); +} + + v8::Persistent<Context> calling_context0; v8::Persistent<Context> calling_context1; v8::Persistent<Context> calling_context2; @@ -7041,3 +7063,21 @@ THREADED_TEST(InitGlobalVarInProtoChain) { CHECK(!result->IsUndefined()); CHECK_EQ(42, result->Int32Value()); } + + +// Regression test for issue 398. +// If a function is added to an object, creating a constant function +// field, and the result is cloned, replacing the constant function on the +// original should not affect the clone. +// See http://code.google.com/p/v8/issues/detail?id=398 +THREADED_TEST(ReplaceConstantFunction) { + v8::HandleScope scope; + LocalContext context; + v8::Handle<v8::Object> obj = v8::Object::New(); + v8::Handle<v8::FunctionTemplate> func_templ = v8::FunctionTemplate::New(); + v8::Handle<v8::String> foo_string = v8::String::New("foo"); + obj->Set(foo_string, func_templ->GetFunction()); + v8::Handle<v8::Object> obj_clone = obj->Clone(); + obj_clone->Set(foo_string, v8::String::New("Hello")); + CHECK(!obj->Get(foo_string)->IsUndefined()); +} diff --git a/V8Binding/v8/test/cctest/test-debug.cc b/V8Binding/v8/test/cctest/test-debug.cc index a884d77..fddd000 100644 --- a/V8Binding/v8/test/cctest/test-debug.cc +++ b/V8Binding/v8/test/cctest/test-debug.cc @@ -4892,6 +4892,7 @@ TEST(DebugBreakInMessageHandler) { } +#ifdef V8_NATIVE_REGEXP // Debug event handler which gets the function on the top frame and schedules a // break a number of times. static void DebugEventDebugBreak( @@ -4928,11 +4929,10 @@ static void DebugEventDebugBreak( TEST(RegExpDebugBreak) { + // This test only applies to native regexps. v8::HandleScope scope; DebugLocalContext env; - i::FLAG_regexp_native = true; - // Create a function for checking the function when hitting a break point. frame_function_name = CompileFunction(&env, frame_function_name_source, @@ -4957,6 +4957,7 @@ TEST(RegExpDebugBreak) { CHECK_EQ(20, break_point_hit_count); CHECK_EQ("exec", last_function_hit); } +#endif // V8_NATIVE_REGEXP // Common part of EvalContextData and NestedBreakEventContextData tests. diff --git a/V8Binding/v8/test/cctest/test-regexp.cc b/V8Binding/v8/test/cctest/test-regexp.cc index 62597fb..33a83c7 100644 --- a/V8Binding/v8/test/cctest/test-regexp.cc +++ b/V8Binding/v8/test/cctest/test-regexp.cc @@ -597,6 +597,8 @@ TEST(DispatchTableConstruction) { } +// Tests of interpreter. + TEST(MacroAssembler) { V8::Initialize(NULL); byte codes[1024]; @@ -660,8 +662,8 @@ TEST(MacroAssembler) { CHECK_EQ(42, captures[0]); } - -#ifdef V8_TARGET_ARCH_IA32 // IA32 only tests. +#ifdef V8_TARGET_ARCH_IA32 // IA32 Native Regexp only tests. +#ifdef V8_NATIVE_REGEXP class ContextInitializer { public: @@ -1284,10 +1286,10 @@ TEST(MacroAssemblerIA32LotsOfRegisters) { Top::clear_pending_exception(); } +#endif // V8_REGEXP_NATIVE +#endif // V8_TARGET_ARCH_IA32 -#endif // !defined ARM - TEST(AddInverseToTable) { static const int kLimit = 1000; static const int kRangeCount = 16; diff --git a/V8Binding/v8/test/message/message.status b/V8Binding/v8/test/message/message.status index fc2896b..d40151e 100644 --- a/V8Binding/v8/test/message/message.status +++ b/V8Binding/v8/test/message/message.status @@ -29,3 +29,16 @@ prefix message # All tests in the bug directory are expected to fail. bugs: FAIL + +[ $arch == x64 ] + +simple-throw.js: FAIL +try-catch-finally-throw-in-catch-and-finally.js: FAIL +try-catch-finally-throw-in-catch.js: FAIL +try-catch-finally-throw-in-finally.js: FAIL +try-finally-throw-in-finally.js: FAIL +try-finally-throw-in-try-and-finally.js: FAIL +try-finally-throw-in-try.js: FAIL +overwritten-builtins.js: FAIL +regress-73.js: FAIL +regress-75.js: FAIL diff --git a/V8Binding/v8/test/mjsunit/apply.js b/V8Binding/v8/test/mjsunit/apply.js index 1d9dde9..a4b0fd7 100644 --- a/V8Binding/v8/test/mjsunit/apply.js +++ b/V8Binding/v8/test/mjsunit/apply.js @@ -38,12 +38,12 @@ assertTrue(this === f0.apply(), "1-0"); assertTrue(this === f0.apply(this), "2a"); assertTrue(this === f0.apply(this, new Array(1)), "2b"); assertTrue(this === f0.apply(this, new Array(2)), "2c"); -assertTrue(this === f0.apply(this, new Array(4242)), "2c"); +assertTrue(this === f0.apply(this, new Array(4242)), "2d"); assertTrue(this === f0.apply(null), "3a"); assertTrue(this === f0.apply(null, new Array(1)), "3b"); assertTrue(this === f0.apply(null, new Array(2)), "3c"); -assertTrue(this === f0.apply(this, new Array(4242)), "2c"); +assertTrue(this === f0.apply(this, new Array(4242)), "3d"); assertTrue(this === f0.apply(void 0), "4a"); assertTrue(this === f0.apply(void 0, new Array(1)), "4b"); @@ -51,26 +51,26 @@ assertTrue(this === f0.apply(void 0, new Array(2)), "4c"); assertTrue(void 0 === f1.apply(), "1-1"); -assertTrue(void 0 === f1.apply(this), "2a"); -assertTrue(void 0 === f1.apply(this, new Array(1)), "2b"); -assertTrue(void 0 === f1.apply(this, new Array(2)), "2c"); -assertTrue(void 0 === f1.apply(this, new Array(4242)), "2c"); -assertTrue(42 === f1.apply(this, new Array(42, 43)), "2c"); -assertEquals("foo", f1.apply(this, new Array("foo", "bar", "baz", "boo")), "2c"); - -assertTrue(void 0 === f1.apply(null), "3a"); -assertTrue(void 0 === f1.apply(null, new Array(1)), "3b"); -assertTrue(void 0 === f1.apply(null, new Array(2)), "3c"); -assertTrue(void 0 === f1.apply(null, new Array(4242)), "2c"); -assertTrue(42 === f1.apply(null, new Array(42, 43)), "2c"); -assertEquals("foo", f1.apply(null, new Array("foo", "bar", "baz", "boo")), "2c"); - -assertTrue(void 0 === f1.apply(void 0), "4a"); -assertTrue(void 0 === f1.apply(void 0, new Array(1)), "4b"); -assertTrue(void 0 === f1.apply(void 0, new Array(2)), "4c"); -assertTrue(void 0 === f1.apply(void 0, new Array(4242)), "4c"); -assertTrue(42 === f1.apply(void 0, new Array(42, 43)), "2c"); -assertEquals("foo", f1.apply(void 0, new Array("foo", "bar", "baz", "boo")), "2c"); +assertTrue(void 0 === f1.apply(this), "5a"); +assertTrue(void 0 === f1.apply(this, new Array(1)), "5b"); +assertTrue(void 0 === f1.apply(this, new Array(2)), "5c"); +assertTrue(void 0 === f1.apply(this, new Array(4242)), "5d"); +assertTrue(42 === f1.apply(this, new Array(42, 43)), "5e"); +assertEquals("foo", f1.apply(this, new Array("foo", "bar", "baz", "bo")), "5f"); + +assertTrue(void 0 === f1.apply(null), "6a"); +assertTrue(void 0 === f1.apply(null, new Array(1)), "6b"); +assertTrue(void 0 === f1.apply(null, new Array(2)), "6c"); +assertTrue(void 0 === f1.apply(null, new Array(4242)), "6d"); +assertTrue(42 === f1.apply(null, new Array(42, 43)), "6e"); +assertEquals("foo", f1.apply(null, new Array("foo", "bar", "baz", "bo")), "6f"); + +assertTrue(void 0 === f1.apply(void 0), "7a"); +assertTrue(void 0 === f1.apply(void 0, new Array(1)), "7b"); +assertTrue(void 0 === f1.apply(void 0, new Array(2)), "7c"); +assertTrue(void 0 === f1.apply(void 0, new Array(4242)), "7d"); +assertTrue(42 === f1.apply(void 0, new Array(42, 43)), "7e"); +assertEquals("foo", f1.apply(void 0, new Array("foo", "bar", "ba", "b")), "7f"); var arr = new Array(42, "foo", "fish", "horse"); function j(a, b, c, d, e, f, g, h, i, j, k, l) { @@ -81,7 +81,7 @@ function j(a, b, c, d, e, f, g, h, i, j, k, l) { var expect = "42foofishhorse"; for (var i = 0; i < 8; i++) expect += "undefined"; -assertEquals(expect, j.apply(undefined, arr)); +assertEquals(expect, j.apply(undefined, arr), "apply to undefined"); assertThrows("f0.apply(this, 1);"); assertThrows("f0.apply(this, 1, 2);"); @@ -95,7 +95,7 @@ function f() { return doo; } -assertEquals("42foofishhorse", f.apply(this, arr)); +assertEquals("42foofishhorse", f.apply(this, arr), "apply to this"); function s() { var doo = this; @@ -105,7 +105,7 @@ function s() { return doo; } -assertEquals("bar42foofishhorse", s.apply("bar", arr)); +assertEquals("bar42foofishhorse", s.apply("bar", arr), "apply to string"); function al() { assertEquals(345, this); @@ -118,19 +118,24 @@ for (var j = 1; j < 0x40000000; j <<= 1) { a[j - 1] = 42; assertEquals(42 + j, al.apply(345, a)); } catch (e) { - assertTrue(e.toString().indexOf("Function.prototype.apply") != -1); + assertTrue(e.toString().indexOf("Function.prototype.apply") != -1, + "exception does not contain Function.prototype.apply: " + + e.toString()); for (; j < 0x40000000; j <<= 1) { var caught = false; try { a = new Array(j); a[j - 1] = 42; al.apply(345, a); - assertEquals("Shouldn't get", "here"); + assertUnreachable("Apply of arrray with length " + a.length + + " should have thrown"); } catch (e) { - assertTrue(e.toString().indexOf("Function.prototype.apply") != -1); + assertTrue(e.toString().indexOf("Function.prototype.apply") != -1, + "exception does not contain Function.prototype.apply [" + + "length = " + j + "]: " + e.toString()); caught = true; } - assertTrue(caught); + assertTrue(caught, "exception not caught"); } break; } @@ -160,8 +165,8 @@ assertEquals(1229, primes.length); var same_primes = Array.prototype.constructor.apply(Array, primes); for (var i = 0; i < primes.length; i++) - assertEquals(primes[i], same_primes[i]); -assertEquals(primes.length, same_primes.length); + assertEquals(primes[i], same_primes[i], "prime" + primes[i]); +assertEquals(primes.length, same_primes.length, "prime-length"); Array.prototype["1"] = "sep"; @@ -170,15 +175,22 @@ var holey = new Array(3); holey[0] = "mor"; holey[2] = "er"; -assertEquals("morseper", String.prototype.concat.apply("", holey)); -assertEquals("morseper", String.prototype.concat.apply("", holey, 1)); -assertEquals("morseper", String.prototype.concat.apply("", holey, 1, 2)); -assertEquals("morseper", String.prototype.concat.apply("", holey, 1, 2, 3)); -assertEquals("morseper", String.prototype.concat.apply("", holey, 1, 2, 3, 4)); +assertEquals("morseper", String.prototype.concat.apply("", holey), + "moreseper0"); +assertEquals("morseper", String.prototype.concat.apply("", holey, 1), + "moreseper1"); +assertEquals("morseper", String.prototype.concat.apply("", holey, 1, 2), + "moreseper2"); +assertEquals("morseper", String.prototype.concat.apply("", holey, 1, 2, 3), + "morseper3"); +assertEquals("morseper", String.prototype.concat.apply("", holey, 1, 2, 3, 4), + "morseper4"); primes[0] = ""; primes[1] = holey; assertThrows("String.prototype.concat.apply.apply('foo', primes)"); -assertEquals("morseper", String.prototype.concat.apply.apply(String.prototype.concat, primes)); +assertEquals("morseper", + String.prototype.concat.apply.apply(String.prototype.concat, primes), + "moreseper-prime"); delete(Array.prototype["1"]); diff --git a/V8Binding/v8/test/mjsunit/array-reduce.js b/V8Binding/v8/test/mjsunit/array-reduce.js index e476e1c..83d9023 100644..100755 --- a/V8Binding/v8/test/mjsunit/array-reduce.js +++ b/V8Binding/v8/test/mjsunit/array-reduce.js @@ -413,7 +413,7 @@ testReduce("reduceRight", "ArrayWithNonElementPropertiesReduceRight", 6, try { [1].reduce("not a function"); - fail("Reduce callback not a function not throwing"); + assertUnreachable("Reduce callback not a function not throwing"); } catch (e) { assertTrue(e instanceof TypeError, "reduce callback not a function not throwing TypeError"); @@ -423,7 +423,7 @@ try { try { [1].reduceRight("not a function"); - fail("ReduceRight callback not a function not throwing"); + assertUnreachable("ReduceRight callback not a function not throwing"); } catch (e) { assertTrue(e instanceof TypeError, "reduceRight callback not a function not throwing TypeError"); @@ -434,7 +434,7 @@ try { try { [].reduce(sum); - fail("Reduce no initial value not throwing"); + assertUnreachable("Reduce no initial value not throwing"); } catch (e) { assertTrue(e instanceof TypeError, "reduce no initial value not throwing TypeError"); @@ -444,7 +444,7 @@ try { try { [].reduceRight(sum); - fail("ReduceRight no initial value not throwing"); + assertUnreachable("ReduceRight no initial value not throwing"); } catch (e) { assertTrue(e instanceof TypeError, "reduceRight no initial value not throwing TypeError"); @@ -455,7 +455,7 @@ try { try { [,,,].reduce(sum); - fail("Reduce sparse no initial value not throwing"); + assertUnreachable("Reduce sparse no initial value not throwing"); } catch (e) { assertTrue(e instanceof TypeError, "reduce sparse no initial value not throwing TypeError"); @@ -465,7 +465,7 @@ try { try { [,,,].reduceRight(sum); - fail("ReduceRight sparse no initial value not throwing"); + assertUnreachable("ReduceRight sparse no initial value not throwing"); } catch (e) { assertTrue(e instanceof TypeError, "reduceRight sparse no initial value not throwing TypeError"); diff --git a/V8Binding/v8/test/mjsunit/big-array-literal.js b/V8Binding/v8/test/mjsunit/big-array-literal.js index d64c968..a0fad7c 100644 --- a/V8Binding/v8/test/mjsunit/big-array-literal.js +++ b/V8Binding/v8/test/mjsunit/big-array-literal.js @@ -81,7 +81,7 @@ function testLiteral(size, array_in_middle) { } // The sizes to test. -var sizes = [1, 2, 100, 200, 400]; +var sizes = [1, 2, 100, 200, 300]; // Run the test. for (var i = 0; i < sizes.length; i++) { diff --git a/V8Binding/v8/test/mjsunit/big-object-literal.js b/V8Binding/v8/test/mjsunit/big-object-literal.js index 8417951..c937f54 100644 --- a/V8Binding/v8/test/mjsunit/big-object-literal.js +++ b/V8Binding/v8/test/mjsunit/big-object-literal.js @@ -84,7 +84,7 @@ function testLiteral(size, array_in_middle) { } // The sizes to test. -var sizes = [1, 2, 100, 200, 350]; +var sizes = [1, 2, 100, 200]; // Run the test. for (var i = 0; i < sizes.length; i++) { diff --git a/V8Binding/v8/test/mjsunit/date-parse.js b/V8Binding/v8/test/mjsunit/date-parse.js index 56ceba3..bb7ecd2 100644 --- a/V8Binding/v8/test/mjsunit/date-parse.js +++ b/V8Binding/v8/test/mjsunit/date-parse.js @@ -33,16 +33,16 @@ function testDateParse(string) { var d = Date.parse(string); - assertEquals(946713600000, d, string); + assertEquals(946713600000, d, "parse: " + string); }; // For local time we just test that parsing returns non-NaN positive // number of milliseconds to make it timezone independent. function testDateParseLocalTime(string) { - var d = Date.parse(string); - assertTrue(!isNaN(d), string + " is NaN."); - assertTrue(d > 0, string + " <= 0."); + var d = Date.parse("parse-local-time:" + string); + assertTrue(!isNaN(d), "parse-local-time: " + string + " is NaN."); + assertTrue(d > 0, "parse-local-time: " + string + " <= 0."); }; @@ -51,7 +51,7 @@ function testDateParseMisc(array) { var string = array[0]; var expected = array[1]; var d = Date.parse(string); - assertEquals(expected, d, string); + assertEquals(expected, d, "parse-misc: " + string); } diff --git a/V8Binding/v8/test/mjsunit/debug-backtrace.js b/V8Binding/v8/test/mjsunit/debug-backtrace.js index 1d2bb9a..0c200ae 100644 --- a/V8Binding/v8/test/mjsunit/debug-backtrace.js +++ b/V8Binding/v8/test/mjsunit/debug-backtrace.js @@ -76,167 +76,166 @@ ParsedResponse.prototype.lookup = function(handle) { function listener(event, exec_state, event_data, data) { try { - if (event == Debug.DebugEvent.Break) - { - // The expected backtrace is - // 0: f - // 1: m - // 2: g - // 3: [anonymous] - - var response; - var backtrace; - var frame; - var source; - - // Get the debug command processor. - var dcp = exec_state.debugCommandProcessor(); - - // Get the backtrace. - var json; - json = '{"seq":0,"type":"request","command":"backtrace"}' - var resp = dcp.processDebugJSONRequest(json); - response = new ParsedResponse(resp); - backtrace = response.body(); - assertEquals(0, backtrace.fromFrame); - assertEquals(4, backtrace.toFrame); - assertEquals(4, backtrace.totalFrames); - var frames = backtrace.frames; - assertEquals(4, frames.length); - for (var i = 0; i < frames.length; i++) { - assertEquals('frame', frames[i].type); + if (event == Debug.DebugEvent.Break) { + // The expected backtrace is + // 0: f + // 1: m + // 2: g + // 3: [anonymous] + + var response; + var backtrace; + var frame; + var source; + + // Get the debug command processor. + var dcp = exec_state.debugCommandProcessor(); + + // Get the backtrace. + var json; + json = '{"seq":0,"type":"request","command":"backtrace"}' + var resp = dcp.processDebugJSONRequest(json); + response = new ParsedResponse(resp); + backtrace = response.body(); + assertEquals(0, backtrace.fromFrame); + assertEquals(4, backtrace.toFrame); + assertEquals(4, backtrace.totalFrames); + var frames = backtrace.frames; + assertEquals(4, frames.length); + for (var i = 0; i < frames.length; i++) { + assertEquals('frame', frames[i].type); + } + assertEquals(0, frames[0].index); + assertEquals("f", response.lookup(frames[0].func.ref).name); + assertEquals(1, frames[1].index); + assertEquals("", response.lookup(frames[1].func.ref).name); + assertEquals("m", response.lookup(frames[1].func.ref).inferredName); + assertEquals(2, frames[2].index); + assertEquals("g", response.lookup(frames[2].func.ref).name); + assertEquals(3, frames[3].index); + assertEquals("", response.lookup(frames[3].func.ref).name); + + // Get backtrace with two frames. + json = '{"seq":0,"type":"request","command":"backtrace","arguments":{"fromFrame":1,"toFrame":3}}' + response = new ParsedResponse(dcp.processDebugJSONRequest(json)); + backtrace = response.body(); + assertEquals(1, backtrace.fromFrame); + assertEquals(3, backtrace.toFrame); + assertEquals(4, backtrace.totalFrames); + var frames = backtrace.frames; + assertEquals(2, frames.length); + for (var i = 0; i < frames.length; i++) { + assertEquals('frame', frames[i].type); + } + assertEquals(1, frames[0].index); + assertEquals("", response.lookup(frames[0].func.ref).name); + assertEquals("m", response.lookup(frames[0].func.ref).inferredName); + assertEquals(2, frames[1].index); + assertEquals("g", response.lookup(frames[1].func.ref).name); + + // Get backtrace with bottom two frames. + json = '{"seq":0,"type":"request","command":"backtrace","arguments":{"fromFrame":0,"toFrame":2, "bottom":true}}' + response = new ParsedResponse(dcp.processDebugJSONRequest(json)); + backtrace = response.body(); + assertEquals(2, backtrace.fromFrame); + assertEquals(4, backtrace.toFrame); + assertEquals(4, backtrace.totalFrames); + var frames = backtrace.frames; + assertEquals(2, frames.length); + for (var i = 0; i < frames.length; i++) { + assertEquals('frame', frames[i].type); + } + assertEquals(2, frames[0].index); + assertEquals("g", response.lookup(frames[0].func.ref).name); + assertEquals(3, frames[1].index); + assertEquals("", response.lookup(frames[1].func.ref).name); + + // Get the individual frames. + json = '{"seq":0,"type":"request","command":"frame"}' + response = new ParsedResponse(dcp.processDebugJSONRequest(json)); + frame = response.body(); + assertEquals(0, frame.index); + assertEquals("f", response.lookup(frame.func.ref).name); + assertTrue(frame.constructCall); + assertEquals(31, frame.line); + assertEquals(3, frame.column); + assertEquals(2, frame.arguments.length); + assertEquals('x', frame.arguments[0].name); + assertEquals('number', response.lookup(frame.arguments[0].value.ref).type); + assertEquals(1, response.lookup(frame.arguments[0].value.ref).value); + assertEquals('y', frame.arguments[1].name); + assertEquals('undefined', response.lookup(frame.arguments[1].value.ref).type); + + json = '{"seq":0,"type":"request","command":"frame","arguments":{"number":0}}' + response = new ParsedResponse(dcp.processDebugJSONRequest(json)); + frame = response.body(); + assertEquals(0, frame.index); + assertEquals("f", response.lookup(frame.func.ref).name); + assertEquals(31, frame.line); + assertEquals(3, frame.column); + assertEquals(2, frame.arguments.length); + assertEquals('x', frame.arguments[0].name); + assertEquals('number', response.lookup(frame.arguments[0].value.ref).type); + assertEquals(1, response.lookup(frame.arguments[0].value.ref).value); + assertEquals('y', frame.arguments[1].name); + assertEquals('undefined', response.lookup(frame.arguments[1].value.ref).type); + + json = '{"seq":0,"type":"request","command":"frame","arguments":{"number":1}}' + response = new ParsedResponse(dcp.processDebugJSONRequest(json)); + frame = response.body(); + assertEquals(1, frame.index); + assertEquals("", response.lookup(frame.func.ref).name); + assertEquals("m", response.lookup(frame.func.ref).inferredName); + assertFalse(frame.constructCall); + assertEquals(35, frame.line); + assertEquals(2, frame.column); + assertEquals(0, frame.arguments.length); + + json = '{"seq":0,"type":"request","command":"frame","arguments":{"number":3}}' + response = new ParsedResponse(dcp.processDebugJSONRequest(json)); + frame = response.body(); + assertEquals(3, frame.index); + assertEquals("", response.lookup(frame.func.ref).name); + + // Source slices for the individual frames (they all refer to this script). + json = '{"seq":0,"type":"request","command":"source",' + + '"arguments":{"frame":0,"fromLine":30,"toLine":32}}' + response = new ParsedResponse(dcp.processDebugJSONRequest(json)); + source = response.body(); + assertEquals("function f(x, y) {", source.source.substring(0, 18)); + assertEquals(30, source.fromLine); + assertEquals(32, source.toLine); + + json = '{"seq":0,"type":"request","command":"source",' + + '"arguments":{"frame":1,"fromLine":31,"toLine":32}}' + response = new ParsedResponse(dcp.processDebugJSONRequest(json)); + source = response.body(); + assertEquals(" a=1;", source.source.substring(0, 6)); + assertEquals(31, source.fromLine); + assertEquals(32, source.toLine); + + json = '{"seq":0,"type":"request","command":"source",' + + '"arguments":{"frame":2,"fromLine":35,"toLine":36}}' + response = new ParsedResponse(dcp.processDebugJSONRequest(json)); + source = response.body(); + assertEquals(" new f(1);", source.source.substring(0, 11)); + assertEquals(35, source.fromLine); + assertEquals(36, source.toLine); + + // Test line interval way beyond this script will result in an error. + json = '{"seq":0,"type":"request","command":"source",' + + '"arguments":{"frame":0,"fromLine":10000,"toLine":20000}}' + response = new ParsedResponse(dcp.processDebugJSONRequest(json)); + assertFalse(response.response().success); + + // Test without arguments. + json = '{"seq":0,"type":"request","command":"source"}' + response = new ParsedResponse(dcp.processDebugJSONRequest(json)); + source = response.body(); + assertEquals(Debug.findScript(f).source, source.source); + + listenerCalled = true; } - assertEquals(0, frames[0].index); - assertEquals("f", response.lookup(frames[0].func.ref).name); - assertEquals(1, frames[1].index); - assertEquals("", response.lookup(frames[1].func.ref).name); - assertEquals("m", response.lookup(frames[1].func.ref).inferredName); - assertEquals(2, frames[2].index); - assertEquals("g", response.lookup(frames[2].func.ref).name); - assertEquals(3, frames[3].index); - assertEquals("", response.lookup(frames[3].func.ref).name); - - // Get backtrace with two frames. - json = '{"seq":0,"type":"request","command":"backtrace","arguments":{"fromFrame":1,"toFrame":3}}' - response = new ParsedResponse(dcp.processDebugJSONRequest(json)); - backtrace = response.body(); - assertEquals(1, backtrace.fromFrame); - assertEquals(3, backtrace.toFrame); - assertEquals(4, backtrace.totalFrames); - var frames = backtrace.frames; - assertEquals(2, frames.length); - for (var i = 0; i < frames.length; i++) { - assertEquals('frame', frames[i].type); - } - assertEquals(1, frames[0].index); - assertEquals("", response.lookup(frames[0].func.ref).name); - assertEquals("m", response.lookup(frames[0].func.ref).inferredName); - assertEquals(2, frames[1].index); - assertEquals("g", response.lookup(frames[1].func.ref).name); - - // Get backtrace with bottom two frames. - json = '{"seq":0,"type":"request","command":"backtrace","arguments":{"fromFrame":0,"toFrame":2, "bottom":true}}' - response = new ParsedResponse(dcp.processDebugJSONRequest(json)); - backtrace = response.body(); - assertEquals(2, backtrace.fromFrame); - assertEquals(4, backtrace.toFrame); - assertEquals(4, backtrace.totalFrames); - var frames = backtrace.frames; - assertEquals(2, frames.length); - for (var i = 0; i < frames.length; i++) { - assertEquals('frame', frames[i].type); - } - assertEquals(2, frames[0].index); - assertEquals("g", response.lookup(frames[0].func.ref).name); - assertEquals(3, frames[1].index); - assertEquals("", response.lookup(frames[1].func.ref).name); - - // Get the individual frames. - json = '{"seq":0,"type":"request","command":"frame"}' - response = new ParsedResponse(dcp.processDebugJSONRequest(json)); - frame = response.body(); - assertEquals(0, frame.index); - assertEquals("f", response.lookup(frame.func.ref).name); - assertTrue(frame.constructCall); - assertEquals(31, frame.line); - assertEquals(3, frame.column); - assertEquals(2, frame.arguments.length); - assertEquals('x', frame.arguments[0].name); - assertEquals('number', response.lookup(frame.arguments[0].value.ref).type); - assertEquals(1, response.lookup(frame.arguments[0].value.ref).value); - assertEquals('y', frame.arguments[1].name); - assertEquals('undefined', response.lookup(frame.arguments[1].value.ref).type); - - json = '{"seq":0,"type":"request","command":"frame","arguments":{"number":0}}' - response = new ParsedResponse(dcp.processDebugJSONRequest(json)); - frame = response.body(); - assertEquals(0, frame.index); - assertEquals("f", response.lookup(frame.func.ref).name); - assertEquals(31, frame.line); - assertEquals(3, frame.column); - assertEquals(2, frame.arguments.length); - assertEquals('x', frame.arguments[0].name); - assertEquals('number', response.lookup(frame.arguments[0].value.ref).type); - assertEquals(1, response.lookup(frame.arguments[0].value.ref).value); - assertEquals('y', frame.arguments[1].name); - assertEquals('undefined', response.lookup(frame.arguments[1].value.ref).type); - - json = '{"seq":0,"type":"request","command":"frame","arguments":{"number":1}}' - response = new ParsedResponse(dcp.processDebugJSONRequest(json)); - frame = response.body(); - assertEquals(1, frame.index); - assertEquals("", response.lookup(frame.func.ref).name); - assertEquals("m", response.lookup(frame.func.ref).inferredName); - assertFalse(frame.constructCall); - assertEquals(35, frame.line); - assertEquals(2, frame.column); - assertEquals(0, frame.arguments.length); - - json = '{"seq":0,"type":"request","command":"frame","arguments":{"number":3}}' - response = new ParsedResponse(dcp.processDebugJSONRequest(json)); - frame = response.body(); - assertEquals(3, frame.index); - assertEquals("", response.lookup(frame.func.ref).name); - - // Source slices for the individual frames (they all refer to this script). - json = '{"seq":0,"type":"request","command":"source",' + - '"arguments":{"frame":0,"fromLine":30,"toLine":32}}' - response = new ParsedResponse(dcp.processDebugJSONRequest(json)); - source = response.body(); - assertEquals("function f(x, y) {", source.source.substring(0, 18)); - assertEquals(30, source.fromLine); - assertEquals(32, source.toLine); - - json = '{"seq":0,"type":"request","command":"source",' + - '"arguments":{"frame":1,"fromLine":31,"toLine":32}}' - response = new ParsedResponse(dcp.processDebugJSONRequest(json)); - source = response.body(); - assertEquals(" a=1;", source.source.substring(0, 6)); - assertEquals(31, source.fromLine); - assertEquals(32, source.toLine); - - json = '{"seq":0,"type":"request","command":"source",' + - '"arguments":{"frame":2,"fromLine":35,"toLine":36}}' - response = new ParsedResponse(dcp.processDebugJSONRequest(json)); - source = response.body(); - assertEquals(" new f(1);", source.source.substring(0, 11)); - assertEquals(35, source.fromLine); - assertEquals(36, source.toLine); - - // Test line interval way beyond this script will result in an error. - json = '{"seq":0,"type":"request","command":"source",' + - '"arguments":{"frame":0,"fromLine":10000,"toLine":20000}}' - response = new ParsedResponse(dcp.processDebugJSONRequest(json)); - assertFalse(response.response().success); - - // Test without arguments. - json = '{"seq":0,"type":"request","command":"source"}' - response = new ParsedResponse(dcp.processDebugJSONRequest(json)); - source = response.body(); - assertEquals(Debug.findScript(f).source, source.source); - - listenerCalled = true; - } } catch (e) { exception = e }; diff --git a/V8Binding/v8/test/mjsunit/debug-clearbreakpointgroup.js b/V8Binding/v8/test/mjsunit/debug-clearbreakpointgroup.js new file mode 100644 index 0000000..eca9378 --- /dev/null +++ b/V8Binding/v8/test/mjsunit/debug-clearbreakpointgroup.js @@ -0,0 +1,117 @@ +// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+// Get the Debug object exposed from the debug context global object.
+var Debug = debug.Debug
+
+// Simple function which stores the last debug event.
+var listenerComplete = false;
+var exception = false;
+
+var base_request = '"seq":0,"type":"request","command":"clearbreakpointgroup"';
+var scriptId = null;
+
+function safeEval(code) {
+ try {
+ return eval('(' + code + ')');
+ } catch (e) {
+ assertEquals(void 0, e);
+ return undefined;
+ }
+}
+
+function testArguments(dcp, arguments, success) {
+ var request = '{' + base_request + ',"arguments":' + arguments + '}'
+ var json_response = dcp.processDebugJSONRequest(request);
+ var response = safeEval(json_response);
+ if (success) {
+ assertTrue(response.success, json_response);
+ } else {
+ assertFalse(response.success, json_response);
+ }
+}
+
+function listener(event, exec_state, event_data, data) {
+ try {
+ if (event == Debug.DebugEvent.Break) {
+ // Get the debug command processor.
+ var dcp = exec_state.debugCommandProcessor();
+
+ // Clear breakpoint group 1.
+ testArguments(dcp, '{"groupId":1}', true);
+
+ // Indicate that all was processed.
+ listenerComplete = true;
+ } else if (event == Debug.DebugEvent.AfterCompile) {
+ scriptId = event_data.script().id();
+ assertEquals(source, event_data.script().source());
+ }
+ } catch (e) {
+ exception = e
+ };
+};
+
+
+// Add the debug event listener.
+Debug.setListener(listener);
+
+var source = 'function f(n) {\nreturn n+1;\n}\nfunction g() {return f(10);}' +
+ '\nvar r = g(); g;';
+eval(source);
+
+assertNotNull(scriptId);
+
+var groupId1 = 1;
+var groupId2 = 2;
+// Set a break point and call to invoke the debug event listener.
+var bp1 = Debug.setScriptBreakPointById(scriptId, 1, null, null, groupId1);
+var bp2 = Debug.setScriptBreakPointById(scriptId, 1, null, null, groupId2);
+var bp3 = Debug.setScriptBreakPointById(scriptId, 1, null, null, null);
+var bp4 = Debug.setScriptBreakPointById(scriptId, 3, null, null, groupId1);
+var bp5 = Debug.setScriptBreakPointById(scriptId, 4, null, null, groupId2);
+
+assertEquals(5, Debug.scriptBreakPoints().length);
+
+// Call function 'g' from the compiled script to trigger breakpoint.
+g();
+
+// Make sure that the debug event listener vas invoked.
+assertTrue(listenerComplete,
+ "listener did not run to completion: " + exception);
+
+var breakpoints = Debug.scriptBreakPoints();
+assertEquals(3, breakpoints.length);
+var breakpointNumbers = breakpoints.map(
+ function(scriptBreakpoint) { return scriptBreakpoint.number(); },
+ breakpointNumbers);
+
+// Check that all breakpoints from group 1 were deleted and all the
+// rest are preserved.
+assertEquals([bp2, bp3, bp5].sort(), breakpointNumbers.sort());
+
+assertFalse(exception, "exception in listener");
diff --git a/V8Binding/v8/test/mjsunit/debug-stepin-function-call.js b/V8Binding/v8/test/mjsunit/debug-stepin-function-call.js new file mode 100644 index 0000000..9f24c01 --- /dev/null +++ b/V8Binding/v8/test/mjsunit/debug-stepin-function-call.js @@ -0,0 +1,149 @@ +// Copyright 2008 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+// Flags: --expose-debug-as debug
+// Get the Debug object exposed from the debug context global object.
+Debug = debug.Debug
+
+var exception = null;
+var state = 0;
+
+// Simple debug event handler which first time will cause 'step in' action
+// to get into g.call and than check that execution is pauesed inside
+// function 'g'.
+function listener(event, exec_state, event_data, data) {
+ try {
+ if (event == Debug.DebugEvent.Break) {
+ if (state == 0) {
+ // Step into f2.call:
+ exec_state.prepareStep(Debug.StepAction.StepIn, 2);
+ state = 2;
+ } else if (state == 2) {
+ assertEquals('g', event_data.func().name());
+ assertEquals(' return t + 1; // expected line',
+ event_data.sourceLineText());
+ state = 3;
+ }
+ }
+ } catch(e) {
+ exception = e;
+ }
+};
+
+// Add the debug event listener.
+Debug.setListener(listener);
+
+
+// Sample functions.
+function g(t) {
+ return t + 1; // expected line
+}
+
+// Test step into function call from a function without local variables.
+function call1() {
+ debugger;
+ g.call(null, 3);
+}
+
+
+// Test step into function call from a function with some local variables.
+function call2() {
+ var aLocalVar = 'test';
+ var anotherLocalVar = g(aLocalVar) + 's';
+ var yetAnotherLocal = 10;
+ debugger;
+ g.call(null, 3);
+}
+
+// Test step into function call which is a part of an expression.
+function call3() {
+ var alias = g;
+ debugger;
+ var r = 10 + alias.call(null, 3);
+ var aLocalVar = 'test';
+ var anotherLocalVar = g(aLocalVar) + 's';
+ var yetAnotherLocal = 10;
+}
+
+// Test step into function call from a function with some local variables.
+function call4() {
+ var alias = g;
+ debugger;
+ alias.call(null, 3);
+ var aLocalVar = 'test';
+ var anotherLocalVar = g(aLocalVar) + 's';
+ var yetAnotherLocal = 10;
+}
+
+// Test step into function apply from a function without local variables.
+function apply1() {
+ debugger;
+ g.apply(null, [3]);
+}
+
+
+// Test step into function apply from a function with some local variables.
+function apply2() {
+ var aLocalVar = 'test';
+ var anotherLocalVar = g(aLocalVar) + 's';
+ var yetAnotherLocal = 10;
+ debugger;
+ g.apply(null, [3, 4]);
+}
+
+// Test step into function apply which is a part of an expression.
+function apply3() {
+ var alias = g;
+ debugger;
+ var r = 10 + alias.apply(null, [3, 'unused arg']);
+ var aLocalVar = 'test';
+ var anotherLocalVar = g(aLocalVar) + 's';
+ var yetAnotherLocal = 10;
+}
+
+// Test step into function apply from a function with some local variables.
+function apply4() {
+ var alias = g;
+ debugger;
+ alias.apply(null, [3]);
+ var aLocalVar = 'test';
+ var anotherLocalVar = g(aLocalVar) + 's';
+ var yetAnotherLocal = 10;
+}
+
+var testFunctions =
+ [call1, call2, call3, call4, apply1, apply2, apply3, apply4];
+
+for (var i = 0; i < testFunctions.length; i++) {
+ state = 0;
+ testFunctions[i]();
+ assertNull(exception);
+ assertEquals(3, state);
+}
+
+// Get rid of the debug event listener.
+Debug.setListener(null);
\ No newline at end of file diff --git a/V8Binding/v8/test/mjsunit/global-deleted-property-ic.js b/V8Binding/v8/test/mjsunit/global-deleted-property-ic.js new file mode 100644 index 0000000..b90fc79 --- /dev/null +++ b/V8Binding/v8/test/mjsunit/global-deleted-property-ic.js @@ -0,0 +1,45 @@ +// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +function LoadX(obj) { return obj.x; } + +// Load x from the prototype of this. Make sure to initialize the IC. +this.__proto__ = { x: 42 }; +for (var i = 0; i < 3; i++) assertEquals(42, LoadX(this)); + +// Introduce a global variable and make sure we load that from LoadX. +this.x = 87; +for (var i = 0; i < 3; i++) assertEquals(87, LoadX(this)); + +// Delete the global variable and make sure we get back to loading from +// the prototype. +delete this.x; +for (var i = 0; i < 3; i++) assertEquals(42, LoadX(this)); + +// ... and go back again to loading directly from the object. +this.x = 99; +for (var i = 0; i < 3; i++) assertEquals(99, LoadX(this)); diff --git a/V8Binding/v8/test/mjsunit/global-deleted-property-keyed.js b/V8Binding/v8/test/mjsunit/global-deleted-property-keyed.js new file mode 100644 index 0000000..e249fd3 --- /dev/null +++ b/V8Binding/v8/test/mjsunit/global-deleted-property-keyed.js @@ -0,0 +1,38 @@ +// Copyright 2009 the V8 project authors. All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are
+// met:
+//
+// * Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// * Redistributions in binary form must reproduce the above
+// copyright notice, this list of conditions and the following
+// disclaimer in the documentation and/or other materials provided
+// with the distribution.
+// * Neither the name of Google Inc. nor the names of its
+// contributors may be used to endorse or promote products derived
+// from this software without specific prior written permission.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+
+// Flags: --expose-natives_as natives
+// Test keyed access to deleted property in a global object without access checks.
+// Regression test that exposed the_hole value from Runtime_KeyedGetPoperty.
+
+var name = "fisk";
+natives[name] = name;
+function foo() { natives[name] + 12; }
+for(var i = 0; i < 3; i++) foo();
+delete natives[name];
+for(var i = 0; i < 3; i++) foo();
diff --git a/V8Binding/v8/test/mjsunit/global-ic.js b/V8Binding/v8/test/mjsunit/global-ic.js new file mode 100644 index 0000000..22c49ab --- /dev/null +++ b/V8Binding/v8/test/mjsunit/global-ic.js @@ -0,0 +1,48 @@ +// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +function f() { + return 87; +} + +function LoadFromGlobal(global) { return global.x; } +function StoreToGlobal(global, value) { global.x = value; } +function CallOnGlobal(global) { return global.f(); } + +// Initialize the ICs in the functions. +for (var i = 0; i < 3; i++) { + StoreToGlobal(this, 42 + i); + assertEquals(42 + i, LoadFromGlobal(this)); + assertEquals(87, CallOnGlobal(this)); +} + +// Try the ICs with a smi. This should not crash. +for (var i = 0; i < 3; i++) { + StoreToGlobal(i, 42 + i); + assertTrue(typeof LoadFromGlobal(i) == "undefined"); + assertThrows("CallOnGlobal(" + i + ")"); +} diff --git a/V8Binding/v8/test/mjsunit/mjsunit.status b/V8Binding/v8/test/mjsunit/mjsunit.status index 4bb7c16..962e4d3 100644 --- a/V8Binding/v8/test/mjsunit/mjsunit.status +++ b/V8Binding/v8/test/mjsunit/mjsunit.status @@ -51,6 +51,7 @@ debug-continue: SKIP debug-evaluate-recursive: CRASH || FAIL debug-changebreakpoint: CRASH || FAIL debug-clearbreakpoint: CRASH || FAIL +debug-clearbreakpointgroup: PASS, FAIL if $mode == debug debug-conditional-breakpoints: FAIL debug-evaluate: CRASH || FAIL debug-ignore-breakpoints: CRASH || FAIL @@ -58,6 +59,7 @@ debug-multiple-breakpoints: CRASH || FAIL debug-setbreakpoint: CRASH || FAIL || PASS debug-step-stub-callfunction: SKIP debug-stepin-constructor: CRASH, FAIL +debug-stepin-function-call: CRASH || FAIL debug-step: SKIP debug-breakpoints: PASS || FAIL debug-handle: CRASH || FAIL || PASS @@ -66,3 +68,41 @@ regress/regress-269: SKIP # Bug number 130 http://code.google.com/p/v8/issues/detail?id=130 # Fails on real ARM hardware but not on the simulator. string-compare-alignment: PASS || FAIL + + +[ $arch == x64 ] + +debug-backtrace.js: CRASH || FAIL +date-parse.js: CRASH || FAIL +debug-backtrace-text.js: CRASH || FAIL +debug-multiple-breakpoints.js: CRASH || FAIL +debug-breakpoints.js: CRASH || FAIL +debug-changebreakpoint.js: CRASH || FAIL +debug-clearbreakpoint.js: CRASH || FAIL +debug-conditional-breakpoints.js: CRASH || FAIL +debug-constructor.js: CRASH || FAIL +debug-continue.js: CRASH || FAIL +debug-enable-disable-breakpoints.js: CRASH || FAIL +debug-evaluate-recursive.js: CRASH || FAIL +debug-event-listener.js: CRASH || FAIL +debug-evaluate.js: CRASH || FAIL +debug-ignore-breakpoints.js: CRASH || FAIL +debug-setbreakpoint.js: CRASH || FAIL +debug-step-stub-callfunction.js: CRASH || FAIL +debug-step.js: CRASH || FAIL +mirror-date.js: CRASH || FAIL +invalid-lhs.js: CRASH || FAIL +debug-stepin-constructor.js: CRASH || FAIL +new.js: CRASH || FAIL +fuzz-natives.js: CRASH || FAIL +greedy.js: CRASH || FAIL +debug-handle.js: CRASH || FAIL +string-indexof.js: CRASH || FAIL +debug-clearbreakpointgroup.js: CRASH || FAIL +regress/regress-269.js: CRASH || FAIL +div-mod.js: CRASH || FAIL +unicode-test.js: CRASH || FAIL +regress/regress-392.js: CRASH || FAIL +regress/regress-1200351.js: CRASH || FAIL +regress/regress-998565.js: CRASH || FAIL +tools/tickprocessor.js: CRASH || FAIL diff --git a/V8Binding/v8/test/mjsunit/regress/regress-244.js b/V8Binding/v8/test/mjsunit/regress/regress-244.js index ffddcf8..dc5336f 100644 --- a/V8Binding/v8/test/mjsunit/regress/regress-244.js +++ b/V8Binding/v8/test/mjsunit/regress/regress-244.js @@ -57,7 +57,7 @@ function run() { var threw = false; try { decodeURI(value); - fail(value); + assertUnreachable(value); } catch (e) { assertInstanceof(e, URIError); } diff --git a/V8Binding/v8/test/mjsunit/regress/regress-397.js b/V8Binding/v8/test/mjsunit/regress/regress-397.js new file mode 100644 index 0000000..111f4a6 --- /dev/null +++ b/V8Binding/v8/test/mjsunit/regress/regress-397.js @@ -0,0 +1,34 @@ +// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// See http://code.google.com/p/v8/issues/detail?id=397 + +assertEquals("Infinity", String(Math.pow(Infinity, 0.5))); +assertEquals(0, Math.pow(Infinity, -0.5)); + +assertEquals("Infinity", String(Math.pow(-Infinity, 0.5))); +assertEquals(0, Math.pow(-Infinity, -0.5)); diff --git a/V8Binding/v8/test/mjsunit/regress/regress-399.js b/V8Binding/v8/test/mjsunit/regress/regress-399.js new file mode 100644 index 0000000..2ee998b --- /dev/null +++ b/V8Binding/v8/test/mjsunit/regress/regress-399.js @@ -0,0 +1,32 @@ +// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// See http://code.google.com/p/v8/issues/detail?id=399 + +var date = new Date(1.009804e12); +var year = String(date).match(/.*(200\d)/)[1]; +assertEquals(year, date.getFullYear()); diff --git a/V8Binding/v8/test/mjsunit/smi-ops.js b/V8Binding/v8/test/mjsunit/smi-ops.js index 5520327..284050d 100644 --- a/V8Binding/v8/test/mjsunit/smi-ops.js +++ b/V8Binding/v8/test/mjsunit/smi-ops.js @@ -56,15 +56,15 @@ function Add100Reversed(x) { assertEquals(1, Add1(0)); // fast case assertEquals(1, Add1Reversed(0)); // fast case -assertEquals(SMI_MAX + ONE, Add1(SMI_MAX)); // overflow -assertEquals(SMI_MAX + ONE, Add1Reversed(SMI_MAX)); // overflow +assertEquals(SMI_MAX + ONE, Add1(SMI_MAX), "smimax + 1"); +assertEquals(SMI_MAX + ONE, Add1Reversed(SMI_MAX), "1 + smimax"); assertEquals(42 + ONE, Add1(OBJ_42)); // non-smi assertEquals(42 + ONE, Add1Reversed(OBJ_42)); // non-smi assertEquals(100, Add100(0)); // fast case assertEquals(100, Add100Reversed(0)); // fast case -assertEquals(SMI_MAX + ONE_HUNDRED, Add100(SMI_MAX)); // overflow -assertEquals(SMI_MAX + ONE_HUNDRED, Add100Reversed(SMI_MAX)); // overflow +assertEquals(SMI_MAX + ONE_HUNDRED, Add100(SMI_MAX), "smimax + 100"); +assertEquals(SMI_MAX + ONE_HUNDRED, Add100Reversed(SMI_MAX), " 100 + smimax"); assertEquals(42 + ONE_HUNDRED, Add100(OBJ_42)); // non-smi assertEquals(42 + ONE_HUNDRED, Add100Reversed(OBJ_42)); // non-smi @@ -148,8 +148,8 @@ assertEquals(21, Sar1(OBJ_42)); assertEquals(0, Shr1Reversed(OBJ_42)); assertEquals(0, Sar1Reversed(OBJ_42)); -assertEquals(6, Shr100(100)); -assertEquals(6, Sar100(100)); +assertEquals(6, Shr100(100), "100 >>> 100"); +assertEquals(6, Sar100(100), "100 >> 100"); assertEquals(12, Shr100Reversed(99)); assertEquals(12, Sar100Reversed(99)); assertEquals(201326592, Shr100(SMI_MIN)); @@ -201,17 +201,17 @@ assertEquals(0x16, x ^ y); var v = 0; assertEquals(-1, ~v); v = SMI_MIN; -assertEquals(0x3fffffff, ~v); +assertEquals(0x3fffffff, ~v, "~smimin"); v = SMI_MAX; -assertEquals(-0x40000000, ~v); +assertEquals(-0x40000000, ~v, "~smimax"); // Overflowing ++ and --. v = SMI_MAX; v++; -assertEquals(0x40000000, v); +assertEquals(0x40000000, v, "smimax++"); v = SMI_MIN; v--; -assertEquals(-0x40000001, v); +assertEquals(-0x40000001, v, "smimin--"); // Not actually Smi operations. // Check that relations on unary ops work. @@ -234,14 +234,14 @@ assertEquals(-2.25, -(v * v)); var x1 = 0x10000000; var x2 = 0x40000002; var x3 = 0x40000000; -assertEquals(0x40000000, x1 << (x2 - x3)); +assertEquals(0x40000000, x1 << (x2 - x3), "0x10000000<<1(1)"); // Smi input to bitop gives non-smi result where the rhs could be overwritten // if it were a float, but it isn't. x1 = 0x10000000 x2 = 4 x3 = 2 -assertEquals(0x40000000, x1 << (x2 - x3)); +assertEquals(0x40000000, x1 << (x2 - x3), "0x10000000<<2(2)"); // Test shift operators on non-smi inputs, giving smi and non-smi results. @@ -258,12 +258,12 @@ function testShiftNonSmis() { assertEquals(neg_non_smi, (neg_non_smi) >> 0); assertEquals(neg_non_smi + 0x100000000, (neg_non_smi) >>> 0); assertEquals(neg_non_smi, (neg_non_smi) << 0); - assertEquals(pos_smi, (pos_smi) >> 0); - assertEquals(pos_smi, (pos_smi) >>> 0); - assertEquals(pos_smi, (pos_smi) << 0); - assertEquals(neg_smi, (neg_smi) >> 0); - assertEquals(neg_smi + 0x100000000, (neg_smi) >>> 0); - assertEquals(neg_smi, (neg_smi) << 0); + assertEquals(pos_smi, (pos_smi) >> 0, "possmi >> 0"); + assertEquals(pos_smi, (pos_smi) >>> 0, "possmi >>>0"); + assertEquals(pos_smi, (pos_smi) << 0, "possmi << 0"); + assertEquals(neg_smi, (neg_smi) >> 0, "negsmi >> 0"); + assertEquals(neg_smi + 0x100000000, (neg_smi) >>> 0, "negsmi >>> 0"); + assertEquals(neg_smi, (neg_smi) << 0), "negsmi << 0"; assertEquals(pos_non_smi / 2, (pos_non_smi) >> 1); assertEquals(pos_non_smi / 2, (pos_non_smi) >>> 1); @@ -283,18 +283,22 @@ function testShiftNonSmis() { assertEquals(-0x46536000, (pos_non_smi + 0.5) << 3); assertEquals(0x73594000, (pos_non_smi + 0.5) << 4); - assertEquals(neg_non_smi / 2, (neg_non_smi) >> 1); - assertEquals(neg_non_smi / 2 + 0x100000000 / 2, (neg_non_smi) >>> 1); + assertEquals(neg_non_smi / 2, (neg_non_smi) >> 1, "negnonsmi >> 1"); + + assertEquals(neg_non_smi / 2 + 0x100000000 / 2, (neg_non_smi) >>> 1, + "negnonsmi >>> 1"); assertEquals(0x1194D800, (neg_non_smi) << 1); assertEquals(neg_non_smi / 8, (neg_non_smi) >> 3); assertEquals(neg_non_smi / 8 + 0x100000000 / 8, (neg_non_smi) >>> 3); assertEquals(0x46536000, (neg_non_smi) << 3); assertEquals(-0x73594000, (neg_non_smi) << 4); assertEquals(neg_non_smi, (neg_non_smi - 0.5) >> 0); - assertEquals(neg_non_smi + 0x100000000, (neg_non_smi - 0.5) >>> 0); + assertEquals(neg_non_smi + 0x100000000, (neg_non_smi - 0.5) >>> 0, + "negnonsmi.5 >>> 0"); assertEquals(neg_non_smi, (neg_non_smi - 0.5) << 0); assertEquals(neg_non_smi / 2, (neg_non_smi - 0.5) >> 1); - assertEquals(neg_non_smi / 2 + 0x100000000 / 2, (neg_non_smi - 0.5) >>> 1); + assertEquals(neg_non_smi / 2 + 0x100000000 / 2, (neg_non_smi - 0.5) >>> 1, + "negnonsmi.5 >>> 1"); assertEquals(0x1194D800, (neg_non_smi - 0.5) << 1); assertEquals(neg_non_smi / 8, (neg_non_smi - 0.5) >> 3); assertEquals(neg_non_smi / 8 + 0x100000000 / 8, (neg_non_smi - 0.5) >>> 3); @@ -308,9 +312,9 @@ function testShiftNonSmis() { assertEquals(pos_smi / 8, (pos_smi) >>> 3); assertEquals(-0x2329b000, (pos_smi) << 3); assertEquals(0x73594000, (pos_smi) << 5); - assertEquals(pos_smi, (pos_smi + 0.5) >> 0); - assertEquals(pos_smi, (pos_smi + 0.5) >>> 0); - assertEquals(pos_smi, (pos_smi + 0.5) << 0); + assertEquals(pos_smi, (pos_smi + 0.5) >> 0, "possmi.5 >> 0"); + assertEquals(pos_smi, (pos_smi + 0.5) >>> 0, "possmi.5 >>> 0"); + assertEquals(pos_smi, (pos_smi + 0.5) << 0, "possmi.5 << 0"); assertEquals(pos_smi / 2, (pos_smi + 0.5) >> 1); assertEquals(pos_smi / 2, (pos_smi + 0.5) >>> 1); assertEquals(pos_non_smi, (pos_smi + 0.5) << 1); @@ -326,9 +330,9 @@ function testShiftNonSmis() { assertEquals(neg_smi / 8 + 0x100000000 / 8, (neg_smi) >>> 3); assertEquals(0x46536000, (neg_smi) << 4); assertEquals(-0x73594000, (neg_smi) << 5); - assertEquals(neg_smi, (neg_smi - 0.5) >> 0); - assertEquals(neg_smi + 0x100000000, (neg_smi - 0.5) >>> 0); - assertEquals(neg_smi, (neg_smi - 0.5) << 0); + assertEquals(neg_smi, (neg_smi - 0.5) >> 0, "negsmi.5 >> 0"); + assertEquals(neg_smi + 0x100000000, (neg_smi - 0.5) >>> 0, "negsmi.5 >>> 0"); + assertEquals(neg_smi, (neg_smi - 0.5) << 0, "negsmi.5 << 0"); assertEquals(neg_smi / 2, (neg_smi - 0.5) >> 1); assertEquals(neg_smi / 2 + 0x100000000 / 2, (neg_smi - 0.5) >>> 1); assertEquals(neg_non_smi, (neg_smi - 0.5) << 1); @@ -349,12 +353,12 @@ function testShiftNonSmis() { assertEquals(neg_non_smi, (neg_32 + neg_non_smi) >> 0); assertEquals(neg_non_smi + 0x100000000, (neg_32 + neg_non_smi) >>> 0); assertEquals(neg_non_smi, (neg_32 + neg_non_smi) << 0); - assertEquals(pos_smi, (two_32 + pos_smi) >> 0); - assertEquals(pos_smi, (two_32 + pos_smi) >>> 0); - assertEquals(pos_smi, (two_32 + pos_smi) << 0); - assertEquals(neg_smi, (neg_32 + neg_smi) >> 0); + assertEquals(pos_smi, (two_32 + pos_smi) >> 0, "2^32+possmi >> 0"); + assertEquals(pos_smi, (two_32 + pos_smi) >>> 0, "2^32+possmi >>> 0"); + assertEquals(pos_smi, (two_32 + pos_smi) << 0, "2^32+possmi << 0"); + assertEquals(neg_smi, (neg_32 + neg_smi) >> 0, "2^32+negsmi >> 0"); assertEquals(neg_smi + 0x100000000, (neg_32 + neg_smi) >>> 0); - assertEquals(neg_smi, (neg_32 + neg_smi) << 0); + assertEquals(neg_smi, (neg_32 + neg_smi) << 0, "2^32+negsmi << 0"); assertEquals(pos_non_smi / 2, (two_32 + pos_non_smi) >> 1); assertEquals(pos_non_smi / 2, (two_32 + pos_non_smi) >>> 1); @@ -419,9 +423,9 @@ function testShiftNonSmis() { assertEquals((neg_smi + 0x100000000) / 8, (neg_32 + neg_smi) >>> 3); assertEquals(0x46536000, (neg_32 + neg_smi) << 4); assertEquals(-0x73594000, (neg_32 + neg_smi) << 5); - assertEquals(neg_smi, (neg_32 + neg_smi - 0.5) >> 0); + assertEquals(neg_smi, (neg_32 + neg_smi - 0.5) >> 0, "-2^32+negsmi.5 >> 0"); assertEquals(neg_smi + 0x100000000, (neg_32 + neg_smi - 0.5) >>> 0); - assertEquals(neg_smi, (neg_32 + neg_smi - 0.5) << 0); + assertEquals(neg_smi, (neg_32 + neg_smi - 0.5) << 0, "-2^32+negsmi.5 << 0"); assertEquals(neg_smi / 2, (neg_32 + neg_smi - 0.5) >> 1); assertEquals(neg_smi / 2 + 0x100000000 / 2, (neg_32 + neg_smi - 0.5) >>> 1); assertEquals(neg_non_smi, (neg_32 + neg_smi - 0.5) << 1); @@ -447,9 +451,9 @@ function testShiftNonSmis() { assertEquals(pos_smi, (pos_smi) >> zero); assertEquals(pos_smi, (pos_smi) >>> zero); assertEquals(pos_smi, (pos_smi) << zero); - assertEquals(neg_smi, (neg_smi) >> zero); + assertEquals(neg_smi, (neg_smi) >> zero, "negsmi >> zero"); assertEquals(neg_smi + 0x100000000, (neg_smi) >>> zero); - assertEquals(neg_smi, (neg_smi) << zero); + assertEquals(neg_smi, (neg_smi) << zero, "negsmi << zero"); assertEquals(pos_non_smi / 2, (pos_non_smi) >> one); assertEquals(pos_non_smi / 2, (pos_non_smi) >>> one); @@ -543,9 +547,9 @@ function testShiftNonSmis() { assertEquals(pos_smi, (pos_smi) >> zero); assertEquals(pos_smi, (pos_smi) >>> zero); assertEquals(pos_smi, (pos_smi) << zero); - assertEquals(neg_smi, (neg_smi) >> zero); + assertEquals(neg_smi, (neg_smi) >> zero, "negsmi >> zero(2)"); assertEquals(neg_smi + 0x100000000, (neg_smi) >>> zero); - assertEquals(neg_smi, (neg_smi) << zero); + assertEquals(neg_smi, (neg_smi) << zero, "negsmi << zero(2)"); assertEquals(pos_non_smi / 2, (pos_non_smi) >> one); assertEquals(pos_non_smi / 2, (pos_non_smi) >>> one); @@ -609,9 +613,9 @@ function testShiftNonSmis() { assertEquals(neg_smi / 8 + 0x100000000 / 8, (neg_smi) >>> three); assertEquals(0x46536000, (neg_smi) << four); assertEquals(-0x73594000, (neg_smi) << five); - assertEquals(neg_smi, (neg_smi - 0.5) >> zero); + assertEquals(neg_smi, (neg_smi - 0.5) >> zero, "negsmi.5 >> zero"); assertEquals(neg_smi + 0x100000000, (neg_smi - 0.5) >>> zero); - assertEquals(neg_smi, (neg_smi - 0.5) << zero); + assertEquals(neg_smi, (neg_smi - 0.5) << zero, "negsmi.5 << zero"); assertEquals(neg_smi / 2, (neg_smi - 0.5) >> one); assertEquals(neg_smi / 2 + 0x100000000 / 2, (neg_smi - 0.5) >>> one); assertEquals(neg_non_smi, (neg_smi - 0.5) << one); diff --git a/V8Binding/v8/test/mjsunit/string-match.js b/V8Binding/v8/test/mjsunit/string-match.js index 202396d..202396d 100644..100755 --- a/V8Binding/v8/test/mjsunit/string-match.js +++ b/V8Binding/v8/test/mjsunit/string-match.js diff --git a/V8Binding/v8/test/mjsunit/testcfg.py b/V8Binding/v8/test/mjsunit/testcfg.py index 9c7e028..96840f5 100644 --- a/V8Binding/v8/test/mjsunit/testcfg.py +++ b/V8Binding/v8/test/mjsunit/testcfg.py @@ -29,10 +29,12 @@ import test import os from os.path import join, dirname, exists import re +import tempfile FLAGS_PATTERN = re.compile(r"//\s+Flags:(.*)") FILES_PATTERN = re.compile(r"//\s+Files:(.*)") +SELF_SCRIPT_PATTERN = re.compile(r"//\s+Env: TEST_FILE_NAME") class MjsunitTestCase(test.TestCase): @@ -42,6 +44,7 @@ class MjsunitTestCase(test.TestCase): self.file = file self.config = config self.mode = mode + self.self_script = False def GetLabel(self): return "%s %s" % (self.mode, self.GetName()) @@ -55,19 +58,43 @@ class MjsunitTestCase(test.TestCase): flags_match = FLAGS_PATTERN.search(source) if flags_match: result += flags_match.group(1).strip().split() - files_match = FILES_PATTERN.search(source); additional_files = [] - if files_match: - additional_files += files_match.group(1).strip().split() + files_match = FILES_PATTERN.search(source); + # Accept several lines of 'Files:' + while True: + if files_match: + additional_files += files_match.group(1).strip().split() + files_match = FILES_PATTERN.search(source, files_match.end()) + else: + break for a_file in additional_files: result.append(join(dirname(self.config.root), '..', a_file)) framework = join(dirname(self.config.root), 'mjsunit', 'mjsunit.js') + if SELF_SCRIPT_PATTERN.search(source): + result.append(self.CreateSelfScript()) result += [framework, self.file] return result def GetSource(self): return open(self.file).read() + def CreateSelfScript(self): + (fd_self_script, self_script) = tempfile.mkstemp(suffix=".js") + def MakeJsConst(name, value): + return "var %(name)s=\'%(value)s\';\n" % \ + {'name': name, \ + 'value': value.replace('\\', '\\\\').replace('\'', '\\\'') } + try: + os.write(fd_self_script, MakeJsConst('TEST_FILE_NAME', self.file)) + except IOError, e: + test.PrintError("write() " + str(e)) + os.close(fd_self_script) + self.self_script = self_script + return self_script + + def Cleanup(self): + if self.self_script: + test.CheckedUnlink(self.self_script) class MjsunitTestConfiguration(test.TestConfiguration): diff --git a/V8Binding/v8/test/mjsunit/to_number_order.js b/V8Binding/v8/test/mjsunit/to_number_order.js new file mode 100644 index 0000000..1329bad --- /dev/null +++ b/V8Binding/v8/test/mjsunit/to_number_order.js @@ -0,0 +1,129 @@ +// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +var x = ""; +var v = new Object(); +var w = new Object(); +var vv = function() { x += "hest"; return 1; } +var ww = function() { x += "fisk"; return 2; } +v.valueOf = vv; +w.valueOf = ww; +assertEquals(1, Math.min(v,w)); +assertEquals("hestfisk", x, "min"); + +x = ""; +assertEquals(2, Math.max(v,w)); +assertEquals("hestfisk", x, "max"); + +x = ""; +assertEquals(Math.atan2(1, 2), Math.atan2(v, w)); +// JSC says fiskhest. +assertEquals("hestfisk", x, "atan2"); + +x = ""; +assertEquals(1, Math.pow(v, w)); +assertEquals("hestfisk", x, "pow"); + +var year = { valueOf: function() { x += 1; return 2007; } }; +var month = { valueOf: function() { x += 2; return 2; } }; +var date = { valueOf: function() { x += 3; return 4; } }; +var hours = { valueOf: function() { x += 4; return 13; } }; +var minutes = { valueOf: function() { x += 5; return 50; } }; +var seconds = { valueOf: function() { x += 6; return 0; } }; +var ms = { valueOf: function() { x += 7; return 999; } }; + +x = ""; +new Date(year, month, date, hours, minutes, seconds, ms); +// JSC fails this one: Returns 12345671234567. +assertEquals("1234567", x, "Date"); + +x = ""; +Date(year, month, date, hours, minutes, seconds, ms); +assertEquals("", x, "Date not constructor"); + +x = ""; +Date.UTC(year, month, date, hours, minutes, seconds, ms); +// JSC fails this one: Returns 12345671234567. +assertEquals("1234567", x, "Date.UTC"); + +x = ""; +new Date().setSeconds(seconds, ms); +assertEquals("67", x, "Date.UTC"); + +x = ""; +new Date().setSeconds(seconds, ms); +assertEquals("67", x, "Date.setSeconds"); + +x = ""; +new Date().setUTCSeconds(seconds, ms); +assertEquals("67", x, "Date.setUTCSeconds"); + +x = ""; +new Date().setMinutes(minutes, seconds, ms); +assertEquals("567", x, "Date.setMinutes"); + +x = ""; +new Date().setUTCMinutes(minutes, seconds, ms); +assertEquals("567", x, "Date.setUTCMinutes"); + +x = ""; +new Date().setHours(hours, minutes, seconds, ms); +assertEquals("4567", x, "Date.setHours"); + +x = ""; +new Date().setUTCHours(hours, minutes, seconds, ms); +assertEquals("4567", x, "Date.setUTCHours"); + +x = ""; +new Date().setDate(date, hours, minutes, seconds, ms); +assertEquals("3", x, "Date.setDate"); + +x = ""; +new Date().setUTCDate(date, hours, minutes, seconds, ms); +assertEquals("3", x, "Date.setUTCDate"); + +x = ""; +new Date().setMonth(month, date, hours, minutes, seconds, ms); +assertEquals("23", x, "Date.setMonth"); + +x = ""; +new Date().setUTCMonth(month, date, hours, minutes, seconds, ms); +assertEquals("23", x, "Date.setUTCMonth"); + +x = ""; +new Date().setFullYear(year, month, date, hours, minutes, seconds, ms); +assertEquals("123", x, "Date.setFullYear"); + +x = ""; +new Date().setUTCFullYear(year, month, date, hours, minutes, seconds, ms); +assertEquals("123", x, "Date.setUTCFullYear"); + +var a = { valueOf: function() { x += "hest"; return 97; } }; +var b = { valueOf: function() { x += "fisk"; return 98; } }; +assertEquals("ab", String.fromCharCode(a, b), "String.fromCharCode"); + +print("ok"); diff --git a/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.default b/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.default new file mode 100644 index 0000000..a689ea8 --- /dev/null +++ b/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.default @@ -0,0 +1,60 @@ +Statistical profiling result from v8.log, (13 ticks, 2 unaccounted, 0 excluded). + + [Unknown]: + ticks total nonlib name + 2 15.4% + + [Shared libraries]: + ticks total nonlib name + 2 15.4% 0.0% /lib32/libm-2.7.so + 1 7.7% 0.0% ffffe000-fffff000 + + [JavaScript]: + ticks total nonlib name + 1 7.7% 10.0% LazyCompile: exp native math.js:41 + + [C++]: + ticks total nonlib name + 2 15.4% 20.0% v8::internal::Runtime_Math_exp(v8::internal::Arguments) + 1 7.7% 10.0% v8::internal::JSObject::LocalLookupRealNamedProperty(v8::internal::String*, v8::internal::LookupResult*) + 1 7.7% 10.0% v8::internal::HashTable<v8::internal::StringDictionaryShape, v8::internal::String*>::FindEntry(v8::internal::String*) + 1 7.7% 10.0% fegetexcept + 1 7.7% 10.0% exp + + [GC]: + ticks total nonlib name + 0 0.0% + + [Bottom up (heavy) profile]: + Note: percentage shows a share of a particular caller in the total + amount of its parent calls. + Callers occupying less than 2.0% are not shown. + + ticks parent name + 2 15.4% v8::internal::Runtime_Math_exp(v8::internal::Arguments) + 2 100.0% LazyCompile: exp native math.js:41 + 2 100.0% Script: exp.js + + 2 15.4% /lib32/libm-2.7.so + 2 100.0% LazyCompile: exp native math.js:41 + 2 100.0% Script: exp.js + + 1 7.7% v8::internal::JSObject::LocalLookupRealNamedProperty(v8::internal::String*, v8::internal::LookupResult*) + 1 100.0% Script: exp.js + + 1 7.7% v8::internal::HashTable<v8::internal::StringDictionaryShape, v8::internal::String*>::FindEntry(v8::internal::String*) + 1 100.0% Script: exp.js + + 1 7.7% ffffe000-fffff000 + + 1 7.7% fegetexcept + 1 100.0% LazyCompile: exp native math.js:41 + 1 100.0% Script: exp.js + + 1 7.7% exp + 1 100.0% LazyCompile: exp native math.js:41 + 1 100.0% Script: exp.js + + 1 7.7% LazyCompile: exp native math.js:41 + 1 100.0% Script: exp.js + diff --git a/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.gc-state b/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.gc-state new file mode 100644 index 0000000..40f90db --- /dev/null +++ b/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.gc-state @@ -0,0 +1,21 @@ +Statistical profiling result from v8.log, (13 ticks, 0 unaccounted, 13 excluded). + + [Shared libraries]: + ticks total nonlib name + + [JavaScript]: + ticks total nonlib name + + [C++]: + ticks total nonlib name + + [GC]: + ticks total nonlib name + 0 0.0% + + [Bottom up (heavy) profile]: + Note: percentage shows a share of a particular caller in the total + amount of its parent calls. + Callers occupying less than 2.0% are not shown. + + ticks parent name diff --git a/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.ignore-unknown b/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.ignore-unknown new file mode 100644 index 0000000..87beb08 --- /dev/null +++ b/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.ignore-unknown @@ -0,0 +1,56 @@ +Statistical profiling result from v8.log, (13 ticks, 2 unaccounted, 0 excluded). + + [Shared libraries]: + ticks total nonlib name + 2 18.2% 0.0% /lib32/libm-2.7.so + 1 9.1% 0.0% ffffe000-fffff000 + + [JavaScript]: + ticks total nonlib name + 1 9.1% 12.5% LazyCompile: exp native math.js:41 + + [C++]: + ticks total nonlib name + 2 18.2% 25.0% v8::internal::Runtime_Math_exp(v8::internal::Arguments) + 1 9.1% 12.5% v8::internal::JSObject::LocalLookupRealNamedProperty(v8::internal::String*, v8::internal::LookupResult*) + 1 9.1% 12.5% v8::internal::HashTable<v8::internal::StringDictionaryShape, v8::internal::String*>::FindEntry(v8::internal::String*) + 1 9.1% 12.5% fegetexcept + 1 9.1% 12.5% exp + + [GC]: + ticks total nonlib name + 0 0.0% + + [Bottom up (heavy) profile]: + Note: percentage shows a share of a particular caller in the total + amount of its parent calls. + Callers occupying less than 2.0% are not shown. + + ticks parent name + 2 18.2% v8::internal::Runtime_Math_exp(v8::internal::Arguments) + 2 100.0% LazyCompile: exp native math.js:41 + 2 100.0% Script: exp.js + + 2 18.2% /lib32/libm-2.7.so + 2 100.0% LazyCompile: exp native math.js:41 + 2 100.0% Script: exp.js + + 1 9.1% v8::internal::JSObject::LocalLookupRealNamedProperty(v8::internal::String*, v8::internal::LookupResult*) + 1 100.0% Script: exp.js + + 1 9.1% v8::internal::HashTable<v8::internal::StringDictionaryShape, v8::internal::String*>::FindEntry(v8::internal::String*) + 1 100.0% Script: exp.js + + 1 9.1% ffffe000-fffff000 + + 1 9.1% fegetexcept + 1 100.0% LazyCompile: exp native math.js:41 + 1 100.0% Script: exp.js + + 1 9.1% exp + 1 100.0% LazyCompile: exp native math.js:41 + 1 100.0% Script: exp.js + + 1 9.1% LazyCompile: exp native math.js:41 + 1 100.0% Script: exp.js + diff --git a/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.log b/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.log new file mode 100644 index 0000000..75daad6 --- /dev/null +++ b/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.log @@ -0,0 +1,24 @@ +shared-library,"shell",0x08048000,0x081ee000 +shared-library,"/lib32/libm-2.7.so",0xf7db6000,0xf7dd9000 +shared-library,"ffffe000-fffff000",0xffffe000,0xfffff000 +profiler,"begin",1 +code-creation,Stub,0xf540a100,474,"CEntryStub" +code-creation,Script,0xf541cd80,736,"exp.js" +code-creation,Stub,0xf541d0e0,47,"RuntimeStub_Math_exp" +code-creation,LazyCompile,0xf541d120,145,"exp native math.js:41" +code-creation,LoadIC,0xf541d280,117,"j" +code-creation,LoadIC,0xf541d360,63,"i" +tick,0x80f82d1,0xffdfe880,0,0xf541ce5c +tick,0x80f89a1,0xffdfecf0,0,0xf541ce5c +tick,0x8123b5c,0xffdff1a0,0,0xf541d1a1,0xf541ceea +tick,0x8123b65,0xffdff1a0,0,0xf541d1a1,0xf541ceea +tick,0xf541d2be,0xffdff1e4,0 +tick,0xf541d320,0xffdff1dc,0 +tick,0xf541d384,0xffdff1d8,0 +tick,0xf7db94da,0xffdff0ec,0,0xf541d1a1,0xf541ceea +tick,0xf7db951c,0xffdff0f0,0,0xf541d1a1,0xf541ceea +tick,0xf7dbc508,0xffdff14c,0,0xf541d1a1,0xf541ceea +tick,0xf7dbff21,0xffdff198,0,0xf541d1a1,0xf541ceea +tick,0xf7edec90,0xffdff0ec,0,0xf541d1a1,0xf541ceea +tick,0xffffe402,0xffdff488,0 +profiler,"end" diff --git a/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.separate-ic b/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.separate-ic new file mode 100644 index 0000000..7eb3d9a --- /dev/null +++ b/V8Binding/v8/test/mjsunit/tools/tickprocessor-test.separate-ic @@ -0,0 +1,66 @@ +Statistical profiling result from v8.log, (13 ticks, 2 unaccounted, 0 excluded). + + [Unknown]: + ticks total nonlib name + 2 15.4% + + [Shared libraries]: + ticks total nonlib name + 2 15.4% 0.0% /lib32/libm-2.7.so + 1 7.7% 0.0% ffffe000-fffff000 + + [JavaScript]: + ticks total nonlib name + 1 7.7% 10.0% LoadIC: j + 1 7.7% 10.0% LoadIC: i + 1 7.7% 10.0% LazyCompile: exp native math.js:41 + + [C++]: + ticks total nonlib name + 2 15.4% 20.0% v8::internal::Runtime_Math_exp(v8::internal::Arguments) + 1 7.7% 10.0% v8::internal::JSObject::LocalLookupRealNamedProperty(v8::internal::String*, v8::internal::LookupResult*) + 1 7.7% 10.0% v8::internal::HashTable<v8::internal::StringDictionaryShape, v8::internal::String*>::FindEntry(v8::internal::String*) + 1 7.7% 10.0% fegetexcept + 1 7.7% 10.0% exp + + [GC]: + ticks total nonlib name + 0 0.0% + + [Bottom up (heavy) profile]: + Note: percentage shows a share of a particular caller in the total + amount of its parent calls. + Callers occupying less than 2.0% are not shown. + + ticks parent name + 2 15.4% v8::internal::Runtime_Math_exp(v8::internal::Arguments) + 2 100.0% LazyCompile: exp native math.js:41 + 2 100.0% Script: exp.js + + 2 15.4% /lib32/libm-2.7.so + 2 100.0% LazyCompile: exp native math.js:41 + 2 100.0% Script: exp.js + + 1 7.7% v8::internal::JSObject::LocalLookupRealNamedProperty(v8::internal::String*, v8::internal::LookupResult*) + 1 100.0% Script: exp.js + + 1 7.7% v8::internal::HashTable<v8::internal::StringDictionaryShape, v8::internal::String*>::FindEntry(v8::internal::String*) + 1 100.0% Script: exp.js + + 1 7.7% ffffe000-fffff000 + + 1 7.7% fegetexcept + 1 100.0% LazyCompile: exp native math.js:41 + 1 100.0% Script: exp.js + + 1 7.7% exp + 1 100.0% LazyCompile: exp native math.js:41 + 1 100.0% Script: exp.js + + 1 7.7% LoadIC: j + + 1 7.7% LoadIC: i + + 1 7.7% LazyCompile: exp native math.js:41 + 1 100.0% Script: exp.js + diff --git a/V8Binding/v8/test/mjsunit/tools/tickprocessor.js b/V8Binding/v8/test/mjsunit/tools/tickprocessor.js new file mode 100644 index 0000000..587106a --- /dev/null +++ b/V8Binding/v8/test/mjsunit/tools/tickprocessor.js @@ -0,0 +1,268 @@ +// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Load implementations from <project root>/tools. +// Files: tools/splaytree.js tools/codemap.js tools/csvparser.js +// Files: tools/consarray.js tools/profile.js tools/profile_view.js +// Files: tools/logreader.js tools/tickprocessor.js +// Env: TEST_FILE_NAME + +(function testArgumentsProcessor() { + var p_default = new ArgumentsProcessor([]); + assertTrue(p_default.parse()); + assertEquals(ArgumentsProcessor.DEFAULTS, p_default.result()); + + var p_logFile = new ArgumentsProcessor(['logfile.log']); + assertTrue(p_logFile.parse()); + assertEquals('logfile.log', p_logFile.result().logFileName); + + var p_platformAndLog = new ArgumentsProcessor(['--windows', 'winlog.log']); + assertTrue(p_platformAndLog.parse()); + assertEquals('windows', p_platformAndLog.result().platform); + assertEquals('winlog.log', p_platformAndLog.result().logFileName); + + var p_flags = new ArgumentsProcessor(['--gc', '--separate-ic']); + assertTrue(p_flags.parse()); + assertEquals(TickProcessor.VmStates.GC, p_flags.result().stateFilter); + assertTrue(p_flags.result().separateIc); + + var p_nmAndLog = new ArgumentsProcessor(['--nm=mn', 'nmlog.log']); + assertTrue(p_nmAndLog.parse()); + assertEquals('mn', p_nmAndLog.result().nm); + assertEquals('nmlog.log', p_nmAndLog.result().logFileName); + + var p_bad = new ArgumentsProcessor(['--unknown', 'badlog.log']); + assertFalse(p_bad.parse()); +})(); + + +(function testUnixCppEntriesProvider() { + var oldLoadSymbols = UnixCppEntriesProvider.prototype.loadSymbols; + + // shell executable + UnixCppEntriesProvider.prototype.loadSymbols = function(libName) { + this.symbols = [[ + ' U operator delete[](void*)@@GLIBCXX_3.4', + '08049790 T _init', + '08049f50 T _start', + '08139150 t v8::internal::Runtime_StringReplaceRegExpWithString(v8::internal::Arguments)', + '08139ca0 T v8::internal::Runtime::GetElementOrCharAt(v8::internal::Handle<v8::internal::Object>, unsigned int)', + '0813a0b0 t v8::internal::Runtime_DebugGetPropertyDetails(v8::internal::Arguments)', + '08181d30 W v8::internal::RegExpMacroAssemblerIrregexp::stack_limit_slack()', + ' w __gmon_start__', + '081f08a0 B stdout' + ].join('\n'), '']; + }; + + var shell_prov = new UnixCppEntriesProvider(); + var shell_syms = []; + shell_prov.parseVmSymbols('shell', 0x08048000, 0x081ee000, + function (name, start, end) { + shell_syms.push(Array.prototype.slice.apply(arguments, [0])); + }); + assertEquals( + [['_init', 0x08049790, 0x08049f50], + ['_start', 0x08049f50, 0x08139150], + ['v8::internal::Runtime_StringReplaceRegExpWithString(v8::internal::Arguments)', 0x08139150, 0x08139ca0], + ['v8::internal::Runtime::GetElementOrCharAt(v8::internal::Handle<v8::internal::Object>, unsigned int)', 0x08139ca0, 0x0813a0b0], + ['v8::internal::Runtime_DebugGetPropertyDetails(v8::internal::Arguments)', 0x0813a0b0, 0x08181d30], + ['v8::internal::RegExpMacroAssemblerIrregexp::stack_limit_slack()', 0x08181d30, 0x081ee000]], + shell_syms); + + // libc library + UnixCppEntriesProvider.prototype.loadSymbols = function(libName) { + this.symbols = [[ + '000162a0 T __libc_init_first', + '0002a5f0 T __isnan', + '0002a5f0 W isnan', + '0002aaa0 W scalblnf', + '0002aaa0 W scalbnf', + '0011a340 T __libc_thread_freeres', + '00128860 R _itoa_lower_digits'].join('\n'), '']; + }; + var libc_prov = new UnixCppEntriesProvider(); + var libc_syms = []; + libc_prov.parseVmSymbols('libc', 0xf7c5c000, 0xf7da5000, + function (name, start, end) { + libc_syms.push(Array.prototype.slice.apply(arguments, [0])); + }); + assertEquals( + [['__libc_init_first', 0xf7c5c000 + 0x000162a0, 0xf7c5c000 + 0x0002a5f0], + ['isnan', 0xf7c5c000 + 0x0002a5f0, 0xf7c5c000 + 0x0002aaa0], + ['scalbnf', 0xf7c5c000 + 0x0002aaa0, 0xf7c5c000 + 0x0011a340], + ['__libc_thread_freeres', 0xf7c5c000 + 0x0011a340, 0xf7da5000]], + libc_syms); + + UnixCppEntriesProvider.prototype.loadSymbols = oldLoadSymbols; +})(); + + +(function testWindowsCppEntriesProvider() { + var oldLoadSymbols = WindowsCppEntriesProvider.prototype.loadSymbols; + + WindowsCppEntriesProvider.prototype.loadSymbols = function(libName) { + this.symbols = [ + ' Start Length Name Class', + ' 0001:00000000 000ac902H .text CODE', + ' 0001:000ac910 000005e2H .text$yc CODE', + ' Address Publics by Value Rva+Base Lib:Object', + ' 0000:00000000 __except_list 00000000 <absolute>', + ' 0001:00000000 ?ReadFile@@YA?AV?$Handle@VString@v8@@@v8@@PBD@Z 00401000 f shell.obj', + ' 0001:000000a0 ?Print@@YA?AV?$Handle@VValue@v8@@@v8@@ABVArguments@2@@Z 004010a0 f shell.obj', + ' 0001:00001230 ??1UTF8Buffer@internal@v8@@QAE@XZ 00402230 f v8_snapshot:scanner.obj', + ' 0001:00001230 ??1Utf8Value@String@v8@@QAE@XZ 00402230 f v8_snapshot:api.obj', + ' 0001:000954ba __fclose_nolock 004964ba f LIBCMT:fclose.obj', + ' 0002:00000000 __imp__SetThreadPriority@8 004af000 kernel32:KERNEL32.dll', + ' 0003:00000418 ?in_use_list_@PreallocatedStorage@internal@v8@@0V123@A 00544418 v8_snapshot:allocation.obj', + ' Static symbols', + ' 0001:00000b70 ?DefaultFatalErrorHandler@v8@@YAXPBD0@Z 00401b70 f v8_snapshot:api.obj', + ' 0001:000010b0 ?EnsureInitialized@v8@@YAXPBD@Z 004020b0 f v8_snapshot:api.obj', + ' 0001:000ad17b ??__Fnomem@?5???2@YAPAXI@Z@YAXXZ 004ae17b f LIBCMT:new.obj' + ].join('\r\n'); + }; + var shell_prov = new WindowsCppEntriesProvider(); + var shell_syms = []; + shell_prov.parseVmSymbols('shell.exe', 0x00400000, 0x0057c000, + function (name, start, end) { + shell_syms.push(Array.prototype.slice.apply(arguments, [0])); + }); + assertEquals( + [['ReadFile', 0x00401000, 0x004010a0], + ['Print', 0x004010a0, 0x00402230], + ['v8::String::?1Utf8Value', 0x00402230, 0x004964ba], + ['v8::DefaultFatalErrorHandler', 0x00401b70, 0x004020b0], + ['v8::EnsureInitialized', 0x004020b0, 0x0057c000]], + shell_syms); + + WindowsCppEntriesProvider.prototype.loadSymbols = oldLoadSymbols; +})(); + + +function CppEntriesProviderMock() { +}; + + +CppEntriesProviderMock.prototype.parseVmSymbols = function( + name, startAddr, endAddr, symbolAdder) { + var symbols = { + 'shell': + [['v8::internal::JSObject::LocalLookupRealNamedProperty(v8::internal::String*, v8::internal::LookupResult*)', 0x080f8800, 0x080f8d90], + ['v8::internal::HashTable<v8::internal::StringDictionaryShape, v8::internal::String*>::FindEntry(v8::internal::String*)', 0x080f8210, 0x080f8800], + ['v8::internal::Runtime_Math_exp(v8::internal::Arguments)', 0x08123b20, 0x08123b80]], + '/lib32/libm-2.7.so': + [['exp', startAddr + 0x00009e80, startAddr + 0x00009f30], + ['fegetexcept', startAddr + 0x000061e0, startAddr + 0x00008b10]], + 'ffffe000-fffff000': []}; + assertTrue(name in symbols); + var syms = symbols[name]; + for (var i = 0; i < syms.length; ++i) { + symbolAdder.apply(null, syms[i]); + } +}; + + +function PrintMonitor(outputOrFileName) { + var expectedOut = typeof outputOrFileName == 'string' ? + this.loadExpectedOutput(outputOrFileName) : outputOrFileName; + var outputPos = 0; + var diffs = this.diffs = []; + var realOut = this.realOut = []; + + this.oldPrint = print; + print = function(str) { + var strSplit = str.split('\n'); + for (var i = 0; i < strSplit.length; ++i) { + s = strSplit[i]; + realOut.push(s); + assertTrue(outputPos < expectedOut.length, + 'unexpected output: "' + s + '"'); + if (expectedOut[outputPos] != s) { + diffs.push('line ' + outputPos + ': expected <' + + expectedOut[outputPos] + '> found <' + s + '>\n'); + } + outputPos++; + } + }; +}; + + +PrintMonitor.prototype.loadExpectedOutput = function(fileName) { + var output = readFile(fileName); + return output.split('\n'); +}; + + +PrintMonitor.prototype.finish = function() { + print = this.oldPrint; + if (this.diffs.length > 0) { + print(this.realOut.join('\n')); + assertEquals([], this.diffs); + } +}; + + +function driveTickProcessorTest( + separateIc, ignoreUnknown, stateFilter, logInput, refOutput) { + // TEST_FILE_NAME must be provided by test runner. + assertEquals('string', typeof TEST_FILE_NAME); + var pathLen = TEST_FILE_NAME.lastIndexOf('/'); + if (pathLen == -1) { + pathLen = TEST_FILE_NAME.lastIndexOf('\\'); + } + assertTrue(pathLen != -1); + var testsPath = TEST_FILE_NAME.substr(0, pathLen + 1); + var tp = new TickProcessor( + new CppEntriesProviderMock(), separateIc, ignoreUnknown, stateFilter); + var pm = new PrintMonitor(testsPath + refOutput); + tp.processLogFile(testsPath + logInput); + // Hack file name to avoid dealing with platform specifics. + tp.lastLogFileName_ = 'v8.log'; + tp.printStatistics(); + pm.finish(); +}; + + +(function testProcessing() { + var testData = { + 'Default': [ + false, false, null, + 'tickprocessor-test.log', 'tickprocessor-test.default'], + 'SeparateIc': [ + true, false, null, + 'tickprocessor-test.log', 'tickprocessor-test.separate-ic'], + 'IgnoreUnknown': [ + false, true, null, + 'tickprocessor-test.log', 'tickprocessor-test.ignore-unknown'], + 'GcState': [ + false, false, TickProcessor.VmStates.GC, + 'tickprocessor-test.log', 'tickprocessor-test.gc-state'] + }; + for (var testName in testData) { + print('=== testProcessing-' + testName + ' ==='); + driveTickProcessorTest.apply(null, testData[testName]); + } +})(); diff --git a/V8Binding/v8/tools/gyp/v8.gyp b/V8Binding/v8/tools/gyp/v8.gyp index 8815456..b11a7ff 100644 --- a/V8Binding/v8/tools/gyp/v8.gyp +++ b/V8Binding/v8/tools/gyp/v8.gyp @@ -30,244 +30,8 @@ 'chromium_code': 1, 'msvs_use_common_release': 0, 'gcc_version%': 'unknown', - 'base_source_files': [ - '../../src/arm/assembler-arm-inl.h', - '../../src/arm/assembler-arm.cc', - '../../src/arm/assembler-arm.h', - '../../src/arm/builtins-arm.cc', - '../../src/arm/codegen-arm.cc', - '../../src/arm/codegen-arm.h', - '../../src/arm/constants-arm.h', - '../../src/arm/cpu-arm.cc', - '../../src/arm/debug-arm.cc', - '../../src/arm/disasm-arm.cc', - '../../src/arm/frames-arm.cc', - '../../src/arm/frames-arm.h', - '../../src/arm/ic-arm.cc', - '../../src/arm/jump-target-arm.cc', - '../../src/arm/macro-assembler-arm.cc', - '../../src/arm/macro-assembler-arm.h', - '../../src/arm/regexp-macro-assembler-arm.cc', - '../../src/arm/regexp-macro-assembler-arm.h', - '../../src/arm/register-allocator-arm.cc', - '../../src/arm/simulator-arm.cc', - '../../src/arm/stub-cache-arm.cc', - '../../src/arm/virtual-frame-arm.cc', - '../../src/arm/virtual-frame-arm.h', - '../../src/ia32/assembler-ia32-inl.h', - '../../src/ia32/assembler-ia32.cc', - '../../src/ia32/assembler-ia32.h', - '../../src/ia32/builtins-ia32.cc', - '../../src/ia32/codegen-ia32.cc', - '../../src/ia32/codegen-ia32.h', - '../../src/ia32/cpu-ia32.cc', - '../../src/ia32/debug-ia32.cc', - '../../src/ia32/disasm-ia32.cc', - '../../src/ia32/frames-ia32.cc', - '../../src/ia32/frames-ia32.h', - '../../src/ia32/ic-ia32.cc', - '../../src/ia32/jump-target-ia32.cc', - '../../src/ia32/macro-assembler-ia32.cc', - '../../src/ia32/macro-assembler-ia32.h', - '../../src/ia32/regexp-macro-assembler-ia32.cc', - '../../src/ia32/regexp-macro-assembler-ia32.h', - '../../src/ia32/register-allocator-ia32.cc', - '../../src/ia32/stub-cache-ia32.cc', - '../../src/ia32/virtual-frame-ia32.cc', - '../../src/ia32/virtual-frame-ia32.h', - '../../src/third_party/dtoa/dtoa.c', - '../../src/accessors.cc', - '../../src/accessors.h', - '../../src/allocation.cc', - '../../src/allocation.h', - '../../src/api.cc', - '../../src/api.h', - '../../src/apiutils.h', - '../../src/arguments.h', - '../../src/assembler.cc', - '../../src/assembler.h', - '../../src/ast.cc', - '../../src/ast.h', - '../../src/bootstrapper.cc', - '../../src/bootstrapper.h', - '../../src/builtins.cc', - '../../src/builtins.h', - '../../src/bytecodes-irregexp.h', - '../../src/char-predicates-inl.h', - '../../src/char-predicates.h', - '../../src/checks.cc', - '../../src/checks.h', - '../../src/code-stubs.cc', - '../../src/code-stubs.h', - '../../src/code.h', - '../../src/codegen-inl.h', - '../../src/codegen.cc', - '../../src/codegen.h', - '../../src/compilation-cache.cc', - '../../src/compilation-cache.h', - '../../src/compiler.cc', - '../../src/compiler.h', - '../../src/contexts.cc', - '../../src/contexts.h', - '../../src/conversions-inl.h', - '../../src/conversions.cc', - '../../src/conversions.h', - '../../src/counters.cc', - '../../src/counters.h', - '../../src/cpu.h', - '../../src/dateparser.cc', - '../../src/dateparser.h', - '../../src/dateparser-inl.h', - '../../src/debug.cc', - '../../src/debug.h', - '../../src/debug-agent.cc', - '../../src/debug-agent.h', - '../../src/disasm.h', - '../../src/disassembler.cc', - '../../src/disassembler.h', - '../../src/dtoa-config.c', - '../../src/execution.cc', - '../../src/execution.h', - '../../src/factory.cc', - '../../src/factory.h', - '../../src/flag-definitions.h', - '../../src/flags.cc', - '../../src/flags.h', - '../../src/frames-inl.h', - '../../src/frames.cc', - '../../src/frames.h', - '../../src/frame-element.h', - '../../src/func-name-inferrer.cc', - '../../src/func-name-inferrer.h', - '../../src/global-handles.cc', - '../../src/global-handles.h', - '../../src/globals.h', - '../../src/handles-inl.h', - '../../src/handles.cc', - '../../src/handles.h', - '../../src/hashmap.cc', - '../../src/hashmap.h', - '../../src/heap-inl.h', - '../../src/heap.cc', - '../../src/heap.h', - '../../src/ic-inl.h', - '../../src/ic.cc', - '../../src/ic.h', - '../../src/interpreter-irregexp.cc', - '../../src/interpreter-irregexp.h', - '../../src/jump-target.cc', - '../../src/jump-target.h', - '../../src/jump-target-inl.h', - '../../src/jsregexp-inl.h', - '../../src/jsregexp.cc', - '../../src/jsregexp.h', - '../../src/list-inl.h', - '../../src/list.h', - '../../src/log.cc', - '../../src/log-inl.h', - '../../src/log.h', - '../../src/log-utils.cc', - '../../src/log-utils.h', - '../../src/macro-assembler.h', - '../../src/mark-compact.cc', - '../../src/mark-compact.h', - '../../src/memory.h', - '../../src/messages.cc', - '../../src/messages.h', - '../../src/natives.h', - '../../src/objects-debug.cc', - '../../src/objects-inl.h', - '../../src/objects.cc', - '../../src/objects.h', - '../../src/oprofile-agent.h', - '../../src/oprofile-agent.cc', - '../../src/parser.cc', - '../../src/parser.h', - '../../src/platform-freebsd.cc', - '../../src/platform-linux.cc', - '../../src/platform-macos.cc', - '../../src/platform-nullos.cc', - '../../src/platform-posix.cc', - '../../src/platform-win32.cc', - '../../src/platform.h', - '../../src/prettyprinter.cc', - '../../src/prettyprinter.h', - '../../src/property.cc', - '../../src/property.h', - '../../src/regexp-macro-assembler-irregexp-inl.h', - '../../src/regexp-macro-assembler-irregexp.cc', - '../../src/regexp-macro-assembler-irregexp.h', - '../../src/regexp-macro-assembler-tracer.cc', - '../../src/regexp-macro-assembler-tracer.h', - '../../src/regexp-macro-assembler.cc', - '../../src/regexp-macro-assembler.h', - '../../src/regexp-stack.cc', - '../../src/regexp-stack.h', - '../../src/register-allocator.h', - '../../src/register-allocator-inl.h', - '../../src/register-allocator.cc', - '../../src/rewriter.cc', - '../../src/rewriter.h', - '../../src/runtime.cc', - '../../src/runtime.h', - '../../src/scanner.cc', - '../../src/scanner.h', - '../../src/scopeinfo.cc', - '../../src/scopeinfo.h', - '../../src/scopes.cc', - '../../src/scopes.h', - '../../src/serialize.cc', - '../../src/serialize.h', - '../../src/shell.h', - '../../src/smart-pointer.h', - '../../src/snapshot-common.cc', - '../../src/snapshot.h', - '../../src/spaces-inl.h', - '../../src/spaces.cc', - '../../src/spaces.h', - '../../src/string-stream.cc', - '../../src/string-stream.h', - '../../src/stub-cache.cc', - '../../src/stub-cache.h', - '../../src/token.cc', - '../../src/token.h', - '../../src/top.cc', - '../../src/top.h', - '../../src/unicode-inl.h', - '../../src/unicode.cc', - '../../src/unicode.h', - '../../src/usage-analyzer.cc', - '../../src/usage-analyzer.h', - '../../src/utils.cc', - '../../src/utils.h', - '../../src/v8-counters.cc', - '../../src/v8-counters.h', - '../../src/v8.cc', - '../../src/v8.h', - '../../src/v8threads.cc', - '../../src/v8threads.h', - '../../src/variables.cc', - '../../src/variables.h', - '../../src/version.cc', - '../../src/version.h', - '../../src/virtual-frame.h', - '../../src/virtual-frame.cc', - '../../src/zone-inl.h', - '../../src/zone.cc', - '../../src/zone.h', - ], - 'not_base_source_files': [ - # These files are #included by others and are not meant to be compiled - # directly. - '../../src/third_party/dtoa/dtoa.c', - ], - 'd8_source_files': [ - '../../src/d8-debug.cc', - '../../src/d8-posix.cc', - '../../src/d8-readline.cc', - '../../src/d8-windows.cc', - '../../src/d8.cc', - ], + 'target_arch%': 'ia32', + 'v8_use_snapshot%': 'true', }, 'includes': [ '../../../build/common.gypi', @@ -276,6 +40,19 @@ 'defines': [ 'ENABLE_LOGGING_AND_PROFILING', ], + 'conditions': [ + ['target_arch=="arm"', { + 'defines': [ + 'V8_TARGET_ARCH_ARM', + ], + }], + ['target_arch=="ia32"', { + 'defines': [ + 'V8_TARGET_ARCH_IA32', + 'V8_NATIVE_REGEXP', + ], + }], + ], 'configurations': { 'Debug': { 'defines': [ @@ -306,10 +83,10 @@ 'conditions': [ [ 'gcc_version=="44"', { 'cflags': [ - # Avoid gcc 4.4 strict aliasing issues in dtoa.c - '-fno-strict-aliasing', - # Avoid crashes with gcc 4.4 in the v8 test suite. - '-fno-tree-vrp', + # Avoid gcc 4.4 strict aliasing issues in dtoa.c + '-fno-strict-aliasing', + # Avoid crashes with gcc 4.4 in the v8 test suite. + '-fno-tree-vrp', ], }], ], @@ -350,117 +127,328 @@ }, }, 'targets': [ - # Targets that apply to any architecture. { - 'target_name': 'js2c', + 'target_name': 'v8', 'type': 'none', - 'variables': { - 'library_files': [ - '../../src/runtime.js', - '../../src/v8natives.js', - '../../src/array.js', - '../../src/string.js', - '../../src/uri.js', - '../../src/math.js', - '../../src/messages.js', - '../../src/apinatives.js', - '../../src/debug-delay.js', - '../../src/mirror-delay.js', - '../../src/date-delay.js', - '../../src/json-delay.js', - '../../src/regexp-delay.js', - '../../src/macros.py', - ], - }, - 'actions': [ - { - 'action_name': 'js2c', - 'inputs': [ - '../../tools/js2c.py', - '<@(library_files)', - ], - 'outputs': [ - '<(SHARED_INTERMEDIATE_DIR)/libraries.cc', - '<(SHARED_INTERMEDIATE_DIR)/libraries-empty.cc', - ], - 'action': ['python', '../../tools/js2c.py', '<@(_outputs)', 'CORE', '<@(library_files)'], + 'conditions': [ + ['v8_use_snapshot=="true"', { + 'dependencies': ['v8_snapshot'], }, + { + 'dependencies': ['v8_nosnapshot'], + }], ], - }, - { - 'target_name': 'd8_js2c', - 'type': 'none', - 'variables': { - 'library_files': [ - '../../src/d8.js', - '../../src/macros.py', + 'direct_dependent_settings': { + 'include_dirs': [ + '../../include', ], }, + }, + { + 'target_name': 'v8_snapshot', + 'type': '<(library)', + 'dependencies': [ + 'mksnapshot', + 'js2c', + 'v8_base', + ], + 'include_dirs+': [ + '../../src', + ], + 'sources': [ + '<(SHARED_INTERMEDIATE_DIR)/libraries-empty.cc', + '<(INTERMEDIATE_DIR)/snapshot.cc', + ], 'actions': [ { - 'action_name': 'js2c', + 'action_name': 'run_mksnapshot', 'inputs': [ - '../../tools/js2c.py', - '<@(library_files)', - ], - 'extra_inputs': [ + '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)mksnapshot<(EXECUTABLE_SUFFIX)', ], 'outputs': [ - '<(SHARED_INTERMEDIATE_DIR)/d8-js.cc', - '<(SHARED_INTERMEDIATE_DIR)/d8-js-empty.cc', + '<(INTERMEDIATE_DIR)/snapshot.cc', ], - 'action': ['python', '../../tools/js2c.py', '<@(_outputs)', 'D8', '<@(library_files)'], + 'action': ['<@(_inputs)', '<@(_outputs)'], }, ], }, - - # Targets to build v8 for the native architecture (ia32). { - 'target_name': 'v8_base', + 'target_name': 'v8_nosnapshot', 'type': '<(library)', - 'defines': [ - 'V8_TARGET_ARCH_IA32' + 'dependencies': [ + 'js2c', + 'v8_base', ], 'include_dirs+': [ '../../src', - '../../src/ia32', ], - 'msvs_guid': 'EC8B7909-62AF-470D-A75D-E1D89C837142', 'sources': [ - '<@(base_source_files)', + '<(SHARED_INTERMEDIATE_DIR)/libraries.cc', + '../../src/snapshot-empty.cc', ], - 'sources!': [ - '<@(not_base_source_files)', + }, + { + 'target_name': 'v8_base', + 'type': '<(library)', + 'include_dirs+': [ + '../../src', ], - 'sources/': [ - ['exclude', '-arm\\.cc$'], - ['exclude', 'src/platform-.*\\.cc$' ], + 'sources': [ + '../../src/accessors.cc', + '../../src/accessors.h', + '../../src/allocation.cc', + '../../src/allocation.h', + '../../src/api.cc', + '../../src/api.h', + '../../src/apiutils.h', + '../../src/arguments.h', + '../../src/assembler.cc', + '../../src/assembler.h', + '../../src/ast.cc', + '../../src/ast.h', + '../../src/bootstrapper.cc', + '../../src/bootstrapper.h', + '../../src/builtins.cc', + '../../src/builtins.h', + '../../src/bytecodes-irregexp.h', + '../../src/char-predicates-inl.h', + '../../src/char-predicates.h', + '../../src/checks.cc', + '../../src/checks.h', + '../../src/code-stubs.cc', + '../../src/code-stubs.h', + '../../src/code.h', + '../../src/codegen-inl.h', + '../../src/codegen.cc', + '../../src/codegen.h', + '../../src/compilation-cache.cc', + '../../src/compilation-cache.h', + '../../src/compiler.cc', + '../../src/compiler.h', + '../../src/contexts.cc', + '../../src/contexts.h', + '../../src/conversions-inl.h', + '../../src/conversions.cc', + '../../src/conversions.h', + '../../src/counters.cc', + '../../src/counters.h', + '../../src/cpu.h', + '../../src/dateparser.cc', + '../../src/dateparser.h', + '../../src/dateparser-inl.h', + '../../src/debug.cc', + '../../src/debug.h', + '../../src/debug-agent.cc', + '../../src/debug-agent.h', + '../../src/disasm.h', + '../../src/disassembler.cc', + '../../src/disassembler.h', + '../../src/dtoa-config.c', + '../../src/execution.cc', + '../../src/execution.h', + '../../src/factory.cc', + '../../src/factory.h', + '../../src/flag-definitions.h', + '../../src/flags.cc', + '../../src/flags.h', + '../../src/frames-inl.h', + '../../src/frames.cc', + '../../src/frames.h', + '../../src/frame-element.h', + '../../src/func-name-inferrer.cc', + '../../src/func-name-inferrer.h', + '../../src/global-handles.cc', + '../../src/global-handles.h', + '../../src/globals.h', + '../../src/handles-inl.h', + '../../src/handles.cc', + '../../src/handles.h', + '../../src/hashmap.cc', + '../../src/hashmap.h', + '../../src/heap-inl.h', + '../../src/heap.cc', + '../../src/heap.h', + '../../src/ic-inl.h', + '../../src/ic.cc', + '../../src/ic.h', + '../../src/interpreter-irregexp.cc', + '../../src/interpreter-irregexp.h', + '../../src/jump-target.cc', + '../../src/jump-target.h', + '../../src/jump-target-inl.h', + '../../src/jsregexp-inl.h', + '../../src/jsregexp.cc', + '../../src/jsregexp.h', + '../../src/list-inl.h', + '../../src/list.h', + '../../src/log.cc', + '../../src/log-inl.h', + '../../src/log.h', + '../../src/log-utils.cc', + '../../src/log-utils.h', + '../../src/macro-assembler.h', + '../../src/mark-compact.cc', + '../../src/mark-compact.h', + '../../src/memory.h', + '../../src/messages.cc', + '../../src/messages.h', + '../../src/natives.h', + '../../src/objects-debug.cc', + '../../src/objects-inl.h', + '../../src/objects.cc', + '../../src/objects.h', + '../../src/oprofile-agent.h', + '../../src/oprofile-agent.cc', + '../../src/parser.cc', + '../../src/parser.h', + '../../src/platform.h', + '../../src/prettyprinter.cc', + '../../src/prettyprinter.h', + '../../src/property.cc', + '../../src/property.h', + '../../src/regexp-macro-assembler-irregexp-inl.h', + '../../src/regexp-macro-assembler-irregexp.cc', + '../../src/regexp-macro-assembler-irregexp.h', + '../../src/regexp-macro-assembler-tracer.cc', + '../../src/regexp-macro-assembler-tracer.h', + '../../src/regexp-macro-assembler.cc', + '../../src/regexp-macro-assembler.h', + '../../src/regexp-stack.cc', + '../../src/regexp-stack.h', + '../../src/register-allocator.h', + '../../src/register-allocator-inl.h', + '../../src/register-allocator.cc', + '../../src/rewriter.cc', + '../../src/rewriter.h', + '../../src/runtime.cc', + '../../src/runtime.h', + '../../src/scanner.cc', + '../../src/scanner.h', + '../../src/scopeinfo.cc', + '../../src/scopeinfo.h', + '../../src/scopes.cc', + '../../src/scopes.h', + '../../src/serialize.cc', + '../../src/serialize.h', + '../../src/shell.h', + '../../src/smart-pointer.h', + '../../src/snapshot-common.cc', + '../../src/snapshot.h', + '../../src/spaces-inl.h', + '../../src/spaces.cc', + '../../src/spaces.h', + '../../src/string-stream.cc', + '../../src/string-stream.h', + '../../src/stub-cache.cc', + '../../src/stub-cache.h', + '../../src/token.cc', + '../../src/token.h', + '../../src/top.cc', + '../../src/top.h', + '../../src/unicode-inl.h', + '../../src/unicode.cc', + '../../src/unicode.h', + '../../src/usage-analyzer.cc', + '../../src/usage-analyzer.h', + '../../src/utils.cc', + '../../src/utils.h', + '../../src/v8-counters.cc', + '../../src/v8-counters.h', + '../../src/v8.cc', + '../../src/v8.h', + '../../src/v8threads.cc', + '../../src/v8threads.h', + '../../src/variables.cc', + '../../src/variables.h', + '../../src/version.cc', + '../../src/version.h', + '../../src/virtual-frame.h', + '../../src/virtual-frame.cc', + '../../src/zone-inl.h', + '../../src/zone.cc', + '../../src/zone.h', ], 'conditions': [ - ['OS=="linux"', - { + ['target_arch=="arm"', { + 'include_dirs+': [ + '../../src/arm', + ], + 'sources': [ + '../../src/arm/assembler-arm-inl.h', + '../../src/arm/assembler-arm.cc', + '../../src/arm/assembler-arm.h', + '../../src/arm/builtins-arm.cc', + '../../src/arm/codegen-arm.cc', + '../../src/arm/codegen-arm.h', + '../../src/arm/constants-arm.h', + '../../src/arm/cpu-arm.cc', + '../../src/arm/debug-arm.cc', + '../../src/arm/disasm-arm.cc', + '../../src/arm/frames-arm.cc', + '../../src/arm/frames-arm.h', + '../../src/arm/ic-arm.cc', + '../../src/arm/jump-target-arm.cc', + '../../src/arm/macro-assembler-arm.cc', + '../../src/arm/macro-assembler-arm.h', + '../../src/arm/regexp-macro-assembler-arm.cc', + '../../src/arm/regexp-macro-assembler-arm.h', + '../../src/arm/register-allocator-arm.cc', + '../../src/arm/simulator-arm.cc', + '../../src/arm/stub-cache-arm.cc', + '../../src/arm/virtual-frame-arm.cc', + '../../src/arm/virtual-frame-arm.h', + ], + }], + ['target_arch=="ia32"', { + 'include_dirs+': [ + '../../src/ia32', + ], + 'sources': [ + '../../src/ia32/assembler-ia32-inl.h', + '../../src/ia32/assembler-ia32.cc', + '../../src/ia32/assembler-ia32.h', + '../../src/ia32/builtins-ia32.cc', + '../../src/ia32/codegen-ia32.cc', + '../../src/ia32/codegen-ia32.h', + '../../src/ia32/cpu-ia32.cc', + '../../src/ia32/debug-ia32.cc', + '../../src/ia32/disasm-ia32.cc', + '../../src/ia32/frames-ia32.cc', + '../../src/ia32/frames-ia32.h', + '../../src/ia32/ic-ia32.cc', + '../../src/ia32/jump-target-ia32.cc', + '../../src/ia32/macro-assembler-ia32.cc', + '../../src/ia32/macro-assembler-ia32.h', + '../../src/ia32/regexp-macro-assembler-ia32.cc', + '../../src/ia32/regexp-macro-assembler-ia32.h', + '../../src/ia32/register-allocator-ia32.cc', + '../../src/ia32/stub-cache-ia32.cc', + '../../src/ia32/virtual-frame-ia32.cc', + '../../src/ia32/virtual-frame-ia32.h', + ], + }], + ['OS=="linux"', { 'link_settings': { 'libraries': [ # Needed for clock_gettime() used by src/platform-linux.cc. '-lrt', - ], - }, - 'sources/': [ - ['include', 'src/platform-linux\\.cc$'], - ['include', 'src/platform-posix\\.cc$'] - ] + ]}, + 'sources': [ + '../../src/platform-linux.cc', + '../../src/platform-posix.cc' + ], } ], - ['OS=="mac"', - { - 'sources/': [ - ['include', 'src/platform-macos\\.cc$'], - ['include', 'src/platform-posix\\.cc$'] - ] - } + ['OS=="mac"', { + 'sources': [ + '../../src/platform-macos.cc', + '../../src/platform-posix.cc' + ]}, ], ['OS=="win"', { - 'sources/': [['include', 'src/platform-win32\\.cc$']], + 'sources': [ + '../../src/platform-win32.cc', + ], # 4355, 4800 came from common.vsprops # 4018, 4244 were a per file config on dtoa-config.c # TODO: It's probably possible and desirable to stop disabling the @@ -474,209 +462,65 @@ ], }, { - 'target_name': 'v8_nosnapshot', - 'type': '<(library)', - 'defines': [ - 'V8_TARGET_ARCH_IA32' - ], - 'dependencies': [ - 'js2c', - 'v8_base', - ], - 'include_dirs': [ - '../../src', - ], - 'sources': [ - '<(SHARED_INTERMEDIATE_DIR)/libraries.cc', - '../../src/snapshot-empty.cc', - ], - 'export_dependent_settings': [ - 'v8_base', - ], - }, - { - 'target_name': 'mksnapshot', - 'type': 'executable', - 'dependencies': [ - 'v8_nosnapshot', - ], - 'msvs_guid': '865575D0-37E2-405E-8CBA-5F6C485B5A26', - 'sources': [ - '../../src/mksnapshot.cc', - ], - }, - { - 'target_name': 'v8', - 'type': '<(library)', - 'defines': [ - 'V8_TARGET_ARCH_IA32' - ], - 'dependencies': [ - 'js2c', - 'mksnapshot', - 'v8_base', - ], - 'msvs_guid': '21E22961-22BF-4493-BD3A-868F93DA5179', + 'target_name': 'js2c', + 'type': 'none', + 'variables': { + 'library_files': [ + '../../src/runtime.js', + '../../src/v8natives.js', + '../../src/array.js', + '../../src/string.js', + '../../src/uri.js', + '../../src/math.js', + '../../src/messages.js', + '../../src/apinatives.js', + '../../src/debug-delay.js', + '../../src/mirror-delay.js', + '../../src/date-delay.js', + '../../src/json-delay.js', + '../../src/regexp-delay.js', + '../../src/macros.py', + ], + }, 'actions': [ { - 'action_name': 'mksnapshot', + 'action_name': 'js2c', 'inputs': [ - '<(PRODUCT_DIR)/<(EXECUTABLE_PREFIX)mksnapshot<(EXECUTABLE_SUFFIX)', + '../../tools/js2c.py', + '<@(library_files)', ], 'outputs': [ - '<(INTERMEDIATE_DIR)/snapshot.cc', + '<(SHARED_INTERMEDIATE_DIR)/libraries.cc', + '<(SHARED_INTERMEDIATE_DIR)/libraries-empty.cc', + ], + 'action': [ + 'python', + '../../tools/js2c.py', + '<@(_outputs)', + 'CORE', + '<@(library_files)' ], - 'action': ['<@(_inputs)', '<@(_outputs)'], }, ], - 'include_dirs': [ - '../../src', - ], - 'sources': [ - '<(SHARED_INTERMEDIATE_DIR)/libraries-empty.cc', - '<(INTERMEDIATE_DIR)/snapshot.cc', - ], - 'direct_dependent_settings': { - 'include_dirs': [ - '../../include', - ], - }, - 'export_dependent_settings': [ - 'v8_base', - ], }, { - 'target_name': 'v8_shell', - 'type': 'executable', - 'defines': [ - 'V8_TARGET_ARCH_IA32' - ], - 'dependencies': [ - 'v8', - ], - 'sources': [ - '../../samples/shell.cc', - ], - 'conditions': [ - [ 'OS=="win"', { - # This could be gotten by not setting chromium_code, if that's OK. - 'defines': ['_CRT_SECURE_NO_WARNINGS'], - }], - ], - }, - ], - - 'conditions': [ ['OS=="mac"', { 'targets': [ - # TODO(bradnelson): temporarily disable 'd8' target on Windows while - # we work fix the performance regressions. - # TODO(sgk): temporarily disable 'd8' target on Linux while - # we work out getting the readline library on all the systems. - { - 'target_name': 'd8', + 'target_name': 'mksnapshot', 'type': 'executable', 'dependencies': [ - 'd8_js2c', - 'v8', - ], - 'defines': [ - 'V8_TARGET_ARCH_IA32' - ], - 'include_dirs': [ - '../../src', - ], - 'sources': [ - '<(SHARED_INTERMEDIATE_DIR)/d8-js.cc', - '<@(d8_source_files)', - ], - 'conditions': [ - [ 'OS=="linux"', { - 'sources!': [ '../../src/d8-windows.cc' ], - 'link_settings': { 'libraries': [ '-lreadline' ] }, - }], - [ 'OS=="mac"', { - 'sources!': [ '../../src/d8-windows.cc' ], - 'link_settings': { 'libraries': [ - '$(SDKROOT)/usr/lib/libreadline.dylib' - ]}, - }], - [ 'OS=="win"', { - 'sources!': [ '../../src/d8-readline.cc', '../../src/d8-posix.cc' ], - }], - ], - }, - # TODO(sgk): temporarily disable the arm targets on Linux while - # we work out how to refactor the generator and/or add configuration - # settings to the .gyp file to handle building both variants in - # the same output directory. - # - # ARM targets, to test ARM code generation. These use an ARM simulator - # (src/simulator-arm.cc). The ARM targets are not snapshot-enabled. - { - 'target_name': 'v8_arm', - 'type': '<(library)', - 'dependencies': [ - 'js2c', - ], - 'defines': [ - 'V8_TARGET_ARCH_ARM', + 'v8_nosnapshot', ], 'include_dirs+': [ '../../src', - '../../src/arm', ], 'sources': [ - '<@(base_source_files)', - '<(SHARED_INTERMEDIATE_DIR)/libraries.cc', - '../../src/snapshot-empty.cc', - ], - 'sources!': [ - '<@(not_base_source_files)', - ], - 'sources/': [ - ['exclude', '-ia32\\.cc$'], - ['exclude', 'src/platform-.*\\.cc$' ], - ], - 'direct_dependent_settings': { - 'include_dirs': [ - '../../include', - ], - }, - 'conditions': [ - ['OS=="linux"', - { - 'sources/': [ - ['include', 'src/platform-linux\\.cc$'], - ['include', 'src/platform-posix\\.cc$'] - ] - } - ], - ['OS=="mac"', - { - 'sources/': [ - ['include', 'src/platform-macos\\.cc$'], - ['include', 'src/platform-posix\\.cc$'] - ] - } - ], - ['OS=="win"', { - 'sources/': [['include', 'src/platform-win32\\.cc$']], - # 4355, 4800 came from common.vsprops - # 4018, 4244 were a per file config on dtoa-config.c - # TODO: It's probably possible and desirable to stop disabling the - # dtoa-specific warnings by modifying dtoa as was done in Chromium - # r9255. Refer to that revision for details. - 'msvs_disabled_warnings': [4355, 4800, 4018, 4244], - }], + '../../src/mksnapshot.cc', ], }, { - 'target_name': 'v8_shell_arm', + 'target_name': 'v8_shell', 'type': 'executable', 'dependencies': [ - 'v8_arm', - ], - 'defines': [ - 'V8_TARGET_ARCH_ARM', + 'v8' ], 'sources': [ '../../samples/shell.cc', @@ -688,55 +532,5 @@ }], ], }, - { - 'target_name': 'd8_arm', - 'type': 'executable', - 'dependencies': [ - 'd8_js2c', - 'v8_arm', - ], - 'defines': [ - 'V8_TARGET_ARCH_ARM', - ], - 'include_dirs': [ - '../../src', - ], - 'sources': [ - '<(SHARED_INTERMEDIATE_DIR)/d8-js.cc', - '<@(d8_source_files)', - ], - 'conditions': [ - [ 'OS=="linux"', { - 'sources!': [ '../../src/d8-windows.cc' ], - 'link_settings': { 'libraries': [ '-lreadline' ] }, - }], - [ 'OS=="mac"', { - 'sources!': [ '../../src/d8-windows.cc' ], - 'link_settings': { 'libraries': [ - '$(SDKROOT)/usr/lib/libreadline.dylib' - ]}, - }], - [ 'OS=="win"', { - 'sources!': [ '../../src/d8-readline.cc', '../../src/d8-posix.cc' ], - }], - ], - }, - ]}], # OS != "linux" (temporary, TODO(sgk)) - - - ['OS=="win"', { - 'target_defaults': { - 'defines': [ - '_USE_32BIT_TIME_T', - '_CRT_SECURE_NO_DEPRECATE', - '_CRT_NONSTDC_NO_DEPRECATE', - ], - 'msvs_settings': { - 'VCLinkerTool': { - 'AdditionalOptions': '/IGNORE:4221 /NXCOMPAT', - }, - }, - }, - }], ], } diff --git a/V8Binding/v8/tools/linux-tick-processor b/V8Binding/v8/tools/linux-tick-processor index c5130ff..ca1c721 100644..100755 --- a/V8Binding/v8/tools/linux-tick-processor +++ b/V8Binding/v8/tools/linux-tick-processor @@ -20,4 +20,5 @@ fi $d8_exec $tools_path/splaytree.js $tools_path/codemap.js \ $tools_path/csvparser.js $tools_path/consarray.js \ $tools_path/profile.js $tools_path/profile_view.js \ - $tools_path/logreader.js $tools_path/tickprocessor.js -- $@ 2>/dev/null + $tools_path/logreader.js $tools_path/tickprocessor.js \ + $tools_path/tickprocessor-driver.js -- $@ 2>/dev/null diff --git a/V8Binding/v8/tools/oprofile/annotate b/V8Binding/v8/tools/oprofile/annotate index a6a8545..a6a8545 100644..100755 --- a/V8Binding/v8/tools/oprofile/annotate +++ b/V8Binding/v8/tools/oprofile/annotate diff --git a/V8Binding/v8/tools/oprofile/common b/V8Binding/v8/tools/oprofile/common index fd00207..fd00207 100644..100755 --- a/V8Binding/v8/tools/oprofile/common +++ b/V8Binding/v8/tools/oprofile/common diff --git a/V8Binding/v8/tools/oprofile/dump b/V8Binding/v8/tools/oprofile/dump index 17bb0a1..17bb0a1 100644..100755 --- a/V8Binding/v8/tools/oprofile/dump +++ b/V8Binding/v8/tools/oprofile/dump diff --git a/V8Binding/v8/tools/oprofile/report b/V8Binding/v8/tools/oprofile/report index b7f28b9..b7f28b9 100644..100755 --- a/V8Binding/v8/tools/oprofile/report +++ b/V8Binding/v8/tools/oprofile/report diff --git a/V8Binding/v8/tools/oprofile/reset b/V8Binding/v8/tools/oprofile/reset index edb7071..edb7071 100644..100755 --- a/V8Binding/v8/tools/oprofile/reset +++ b/V8Binding/v8/tools/oprofile/reset diff --git a/V8Binding/v8/tools/oprofile/run b/V8Binding/v8/tools/oprofile/run index 0a92470..0a92470 100644..100755 --- a/V8Binding/v8/tools/oprofile/run +++ b/V8Binding/v8/tools/oprofile/run diff --git a/V8Binding/v8/tools/oprofile/shutdown b/V8Binding/v8/tools/oprofile/shutdown index 8ebb72f..8ebb72f 100644..100755 --- a/V8Binding/v8/tools/oprofile/shutdown +++ b/V8Binding/v8/tools/oprofile/shutdown diff --git a/V8Binding/v8/tools/oprofile/start b/V8Binding/v8/tools/oprofile/start index 059e4b8..059e4b8 100644..100755 --- a/V8Binding/v8/tools/oprofile/start +++ b/V8Binding/v8/tools/oprofile/start diff --git a/V8Binding/v8/tools/test.py b/V8Binding/v8/tools/test.py index f701ceb..05eb9fd 100755 --- a/V8Binding/v8/tools/test.py +++ b/V8Binding/v8/tools/test.py @@ -356,11 +356,15 @@ class TestCase(object): def RunCommand(self, command): full_command = self.context.processor(command) output = Execute(full_command, self.context, self.context.timeout) + self.Cleanup() return TestOutput(self, full_command, output) def Run(self): return self.RunCommand(self.GetCommand()) + def Cleanup(self): + return + class TestOutput(object): @@ -473,6 +477,13 @@ def PrintError(str): sys.stderr.write('\n') +def CheckedUnlink(name): + try: + os.unlink(name) + except OSError, e: + PrintError("os.unlink() " + str(e)) + + def Execute(args, context, timeout=None): (fd_out, outname) = tempfile.mkstemp() (fd_err, errname) = tempfile.mkstemp() @@ -487,11 +498,6 @@ def Execute(args, context, timeout=None): os.close(fd_err) output = file(outname).read() errors = file(errname).read() - def CheckedUnlink(name): - try: - os.unlink(name) - except OSError, e: - PrintError("os.unlink() " + str(e)) CheckedUnlink(outname) CheckedUnlink(errname) return CommandOutput(exit_code, timed_out, output, errors) diff --git a/V8Binding/v8/tools/tickprocessor-driver.js b/V8Binding/v8/tools/tickprocessor-driver.js new file mode 100644 index 0000000..f7cfd13 --- /dev/null +++ b/V8Binding/v8/tools/tickprocessor-driver.js @@ -0,0 +1,49 @@ +// Copyright 2009 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +// Tick Processor's code flow. + +function processArguments(args) { + var processor = new ArgumentsProcessor(args); + if (processor.parse()) { + return processor.result(); + } else { + processor.printUsageAndExit(); + } +} + + +var params = processArguments(arguments); +var tickProcessor = new TickProcessor( + params.platform == 'unix' ? new UnixCppEntriesProvider(params.nm) : + new WindowsCppEntriesProvider(), + params.separateIc, + params.ignoreUnknown, + params.stateFilter); +tickProcessor.processLogFile(params.logFileName); +tickProcessor.printStatistics(); diff --git a/V8Binding/v8/tools/tickprocessor.js b/V8Binding/v8/tools/tickprocessor.js index 4afc69f..c95a4e6 100644 --- a/V8Binding/v8/tools/tickprocessor.js +++ b/V8Binding/v8/tools/tickprocessor.js @@ -288,7 +288,11 @@ TickProcessor.prototype.printStatistics = function() { function padLeft(s, len) { s = s.toString(); if (s.length < len) { - s = (new Array(len - s.length + 1).join(' ')) + s; + var padLength = len - s.length; + if (!(padLength in padLeft)) { + padLeft[padLength] = new Array(padLength + 1).join(' '); + } + s = padLeft[padLength] + s; } return s; }; @@ -511,25 +515,11 @@ WindowsCppEntriesProvider.prototype.unmangleName = function(name) { }; -function padRight(s, len) { - s = s.toString(); - if (s.length < len) { - s = s + (new Array(len - s.length + 1).join(' ')); - } - return s; -}; - +function ArgumentsProcessor(args) { + this.args_ = args; + this.result_ = ArgumentsProcessor.DEFAULTS; -function processArguments(args) { - var result = { - logFileName: 'v8.log', - platform: 'unix', - stateFilter: null, - ignoreUnknown: false, - separateIc: false, - nm: 'nm' - }; - var argsDispatch = { + this.argsDispatch_ = { '-j': ['stateFilter', TickProcessor.VmStates.JS, 'Show only ticks from JS VM state'], '-g': ['stateFilter', TickProcessor.VmStates.GC, @@ -551,63 +541,82 @@ function processArguments(args) { '--nm': ['nm', 'nm', 'Specify the \'nm\' executable to use (e.g. --nm=/my_dir/nm)'] }; - argsDispatch['--js'] = argsDispatch['-j']; - argsDispatch['--gc'] = argsDispatch['-g']; - argsDispatch['--compiler'] = argsDispatch['-c']; - argsDispatch['--other'] = argsDispatch['-o']; - argsDispatch['--external'] = argsDispatch['-e']; - - function printUsageAndExit() { - print('Cmdline args: [options] [log-file-name]\n' + - 'Default log file name is "v8.log".\n'); - print('Options:'); - for (var arg in argsDispatch) { - var synonims = [arg]; - var dispatch = argsDispatch[arg]; - for (var synArg in argsDispatch) { - if (arg !== synArg && dispatch === argsDispatch[synArg]) { - synonims.push(synArg); - delete argsDispatch[synArg]; - } - } - print(' ' + padRight(synonims.join(', '), 20) + dispatch[2]); - } - quit(2); - } + this.argsDispatch_['--js'] = this.argsDispatch_['-j']; + this.argsDispatch_['--gc'] = this.argsDispatch_['-g']; + this.argsDispatch_['--compiler'] = this.argsDispatch_['-c']; + this.argsDispatch_['--other'] = this.argsDispatch_['-o']; + this.argsDispatch_['--external'] = this.argsDispatch_['-e']; +}; + - while (args.length) { - var arg = args[0]; +ArgumentsProcessor.DEFAULTS = { + logFileName: 'v8.log', + platform: 'unix', + stateFilter: null, + ignoreUnknown: false, + separateIc: false, + nm: 'nm' +}; + + +ArgumentsProcessor.prototype.parse = function() { + while (this.args_.length) { + var arg = this.args_[0]; if (arg.charAt(0) != '-') { break; } - args.shift(); + this.args_.shift(); var userValue = null; var eqPos = arg.indexOf('='); if (eqPos != -1) { userValue = arg.substr(eqPos + 1); arg = arg.substr(0, eqPos); } - if (arg in argsDispatch) { - var dispatch = argsDispatch[arg]; - result[dispatch[0]] = userValue == null ? dispatch[1] : userValue; + if (arg in this.argsDispatch_) { + var dispatch = this.argsDispatch_[arg]; + this.result_[dispatch[0]] = userValue == null ? dispatch[1] : userValue; } else { - printUsageAndExit(); + return false; } } - if (args.length >= 1) { - result.logFileName = args.shift(); + if (this.args_.length >= 1) { + this.result_.logFileName = this.args_.shift(); } - return result; + return true; }; -var params = processArguments(arguments); -var tickProcessor = new TickProcessor( - params.platform == 'unix' ? new UnixCppEntriesProvider(params.nm) : - new WindowsCppEntriesProvider(), - params.separateIc, - params.ignoreUnknown, - params.stateFilter); -tickProcessor.processLogFile(params.logFileName); -tickProcessor.printStatistics(); +ArgumentsProcessor.prototype.result = function() { + return this.result_; +}; + + +ArgumentsProcessor.prototype.printUsageAndExit = function() { + + function padRight(s, len) { + s = s.toString(); + if (s.length < len) { + s = s + (new Array(len - s.length + 1).join(' ')); + } + return s; + } + + print('Cmdline args: [options] [log-file-name]\n' + + 'Default log file name is "' + + ArgumentsProcessor.DEFAULTS.logFileName + '".\n'); + print('Options:'); + for (var arg in this.argsDispatch_) { + var synonims = [arg]; + var dispatch = this.argsDispatch_[arg]; + for (var synArg in this.argsDispatch_) { + if (arg !== synArg && dispatch === this.argsDispatch_[synArg]) { + synonims.push(synArg); + delete this.argsDispatch_[synArg]; + } + } + print(' ' + padRight(synonims.join(', '), 20) + dispatch[2]); + } + quit(2); +}; + diff --git a/V8Binding/v8/tools/v8.xcodeproj/project.pbxproj b/V8Binding/v8/tools/v8.xcodeproj/project.pbxproj index 6e3d276..368ba3f 100755..100644 --- a/V8Binding/v8/tools/v8.xcodeproj/project.pbxproj +++ b/V8Binding/v8/tools/v8.xcodeproj/project.pbxproj @@ -1449,6 +1449,7 @@ GCC_PREPROCESSOR_DEFINITIONS = ( "$(GCC_PREPROCESSOR_DEFINITIONS)", V8_TARGET_ARCH_IA32, + V8_NATIVE_REGEXP, DEBUG, ); HEADER_SEARCH_PATHS = ../src; @@ -1462,6 +1463,7 @@ GCC_PREPROCESSOR_DEFINITIONS = ( "$(GCC_PREPROCESSOR_DEFINITIONS)", V8_TARGET_ARCH_IA32, + V8_NATIVE_REGEXP, NDEBUG, ); HEADER_SEARCH_PATHS = ../src; @@ -1477,6 +1479,7 @@ "$(GCC_PREPROCESSOR_DEFINITIONS)", ENABLE_DISASSEMBLER, V8_TARGET_ARCH_IA32, + V8_NATIVE_REGEXP, ENABLE_LOGGING_AND_PROFILING, ); HEADER_SEARCH_PATHS = ../src; @@ -1492,6 +1495,7 @@ GCC_PREPROCESSOR_DEFINITIONS = ( "$(GCC_PREPROCESSOR_DEFINITIONS)", V8_TARGET_ARCH_IA32, + V8_NATIVE_REGEXP, NDEBUG, ); HEADER_SEARCH_PATHS = ../src; diff --git a/V8Binding/v8/tools/visual_studio/ia32.vsprops b/V8Binding/v8/tools/visual_studio/ia32.vsprops index fda6c32..aff0871 100644 --- a/V8Binding/v8/tools/visual_studio/ia32.vsprops +++ b/V8Binding/v8/tools/visual_studio/ia32.vsprops @@ -6,6 +6,6 @@ > <Tool Name="VCCLCompilerTool" - PreprocessorDefinitions="V8_TARGET_ARCH_IA32" + PreprocessorDefinitions="V8_TARGET_ARCH_IA32;V8_NATIVE_REGEXP" /> </VisualStudioPropertySheet> diff --git a/V8Binding/v8/tools/windows-tick-processor.bat b/V8Binding/v8/tools/windows-tick-processor.bat index 67cbe98..6743f68 100644..100755 --- a/V8Binding/v8/tools/windows-tick-processor.bat +++ b/V8Binding/v8/tools/windows-tick-processor.bat @@ -2,4 +2,4 @@ SET tools_dir=%~dp0 -%tools_dir%..\d8 %tools_dir%splaytree.js %tools_dir%codemap.js %tools_dir%csvparser.js %tools_dir%consarray.js %tools_dir%profile.js %tools_dir%profile_view.js %tools_dir%logreader.js %tools_dir%tickprocessor.js -- --windows %* +%tools_dir%..\d8 %tools_dir%splaytree.js %tools_dir%codemap.js %tools_dir%csvparser.js %tools_dir%consarray.js %tools_dir%profile.js %tools_dir%profile_view.js %tools_dir%logreader.js %tools_dir%tickprocessor.js %tools_dir%tickprocessor-driver.js -- --windows %* diff --git a/WEBKIT_MERGE_REVISION b/WEBKIT_MERGE_REVISION index a8ad1af..e66dc02 100644 --- a/WEBKIT_MERGE_REVISION +++ b/WEBKIT_MERGE_REVISION @@ -2,4 +2,4 @@ We sync with Chromium release revision, which has both webkit revision and V8 re http://src.chromium.org/svn/branches/187/src@18043 http://svn.webkit.org/repository/webkit/trunk@44544 - http://v8.googlecode.com/svn/branches/trunk@2361 + http://v8.googlecode.com/svn/branches/bleeding_edge@2450 |