Browse Source

Upgrade V8 to 2.3.0

v0.7.4-release
Ryan Dahl 15 years ago
parent
commit
dcd41ca864
  1. 8
      deps/v8/ChangeLog
  2. 16
      deps/v8/include/v8-debug.h
  3. 4
      deps/v8/src/accessors.cc
  4. 6
      deps/v8/src/api.cc
  5. 183
      deps/v8/src/arm/codegen-arm.cc
  6. 9
      deps/v8/src/arm/full-codegen-arm.cc
  7. 7
      deps/v8/src/arm/stub-cache-arm.cc
  8. 27
      deps/v8/src/codegen.h
  9. 10
      deps/v8/src/compiler.cc
  10. 22
      deps/v8/src/contexts.cc
  11. 116
      deps/v8/src/debug.cc
  12. 29
      deps/v8/src/debug.h
  13. 2
      deps/v8/src/factory.cc
  14. 2
      deps/v8/src/factory.h
  15. 4
      deps/v8/src/frames.cc
  16. 1
      deps/v8/src/globals.h
  17. 11
      deps/v8/src/heap.cc
  18. 8
      deps/v8/src/ia32/codegen-ia32.cc
  19. 6
      deps/v8/src/ia32/stub-cache-ia32.cc
  20. 14
      deps/v8/src/objects-inl.h
  21. 2
      deps/v8/src/objects.h
  22. 6
      deps/v8/src/parser.cc
  23. 7
      deps/v8/src/profile-generator.cc
  24. 53
      deps/v8/src/runtime.cc
  25. 275
      deps/v8/src/scopeinfo.cc
  26. 81
      deps/v8/src/scopeinfo.h
  27. 44
      deps/v8/src/v8natives.js
  28. 4
      deps/v8/src/version.cc
  29. 73
      deps/v8/src/x64/codegen-x64.cc
  30. 6
      deps/v8/src/x64/stub-cache-x64.cc
  31. 65
      deps/v8/test/cctest/test-debug.cc
  32. 11
      deps/v8/test/es5conform/es5conform.status
  33. 15
      deps/v8/test/mjsunit/call-stub.js
  34. 19
      deps/v8/test/mjsunit/object-freeze.js
  35. 195
      deps/v8/test/mjsunit/object-seal.js
  36. 94
      deps/v8/test/mjsunit/regress/regress-r4998.js

8
deps/v8/ChangeLog

@ -1,3 +1,11 @@
2010-07-15: Version 2.3.0
Added ES5 Object.seal and Object.isSealed.
Added debugger API for scheduling debugger commands from a
separate thread.
2010-07-14: Version 2.2.24
Added API for capturing stack traces for uncaught exceptions.

16
deps/v8/include/v8-debug.h

@ -76,7 +76,8 @@ enum DebugEvent {
NewFunction = 3,
BeforeCompile = 4,
AfterCompile = 5,
ScriptCollected = 6
ScriptCollected = 6,
BreakForCommand = 7
};
@ -172,6 +173,13 @@ class EXPORT Debug {
*/
virtual Handle<Value> GetCallbackData() const = 0;
/**
* Client data passed to DebugBreakForCommand function. The
* debugger takes ownership of the data and will delete it even if
* there is no message handler.
*/
virtual ClientData* GetClientData() const = 0;
virtual ~EventDetails() {}
};
@ -248,6 +256,12 @@ class EXPORT Debug {
// Break execution of JavaScript.
static void DebugBreak();
// Break execution of JavaScript (this method can be invoked from a
// non-VM thread) for further client command execution on a VM
// thread. Client data is then passed in EventDetails to
// EventCallback at the moment when the VM actually stops.
static void DebugBreakForCommand(ClientData* data = NULL);
// Message based interface. The message protocol is JSON. NOTE the message
// handler thread is not supported any more parameter must be false.
static void SetMessageHandler(MessageHandler handler,

4
deps/v8/src/accessors.cc

@ -549,8 +549,8 @@ Object* Accessors::FunctionGetArguments(Object* object, void*) {
if (frame->function() != *function) continue;
// If there is an arguments variable in the stack, we return that.
int index = ScopeInfo<>::StackSlotIndex(function->shared()->scope_info(),
Heap::arguments_symbol());
int index = function->shared()->scope_info()->
StackSlotIndex(Heap::arguments_symbol());
if (index >= 0) {
Handle<Object> arguments = Handle<Object>(frame->GetExpression(index));
if (!arguments->IsTheHole()) return *arguments;

6
deps/v8/src/api.cc

@ -4213,6 +4213,12 @@ void Debug::DebugBreak() {
}
void Debug::DebugBreakForCommand(ClientData* data) {
if (!i::V8::IsRunning()) return;
i::Debugger::EnqueueDebugCommand(data);
}
static v8::Debug::MessageHandler message_handler = NULL;
static void MessageHandlerWrapper(const v8::Debug::Message& message) {

183
deps/v8/src/arm/codegen-arm.cc

@ -54,11 +54,15 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm,
Condition cc,
bool never_nan_nan);
static void EmitSmiNonsmiComparison(MacroAssembler* masm,
Register lhs,
Register rhs,
Label* lhs_not_nan,
Label* slow,
bool strict);
static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc);
static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm);
static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
Register lhs,
Register rhs);
static void MultiplyByKnownInt(MacroAssembler* masm,
Register source,
Register destination,
@ -1404,11 +1408,7 @@ void CodeGenerator::Comparison(Condition cc,
// Perform non-smi comparison by stub.
// CompareStub takes arguments in r0 and r1, returns <0, >0 or 0 in r0.
// We call with 0 args because there are 0 on the stack.
if (!rhs.is(r0)) {
__ Swap(rhs, lhs, ip);
}
CompareStub stub(cc, strict);
CompareStub stub(cc, strict, kBothCouldBeNaN, true, lhs, rhs);
frame_->CallStub(&stub, 0);
__ cmp(r0, Operand(0));
exit.Jump();
@ -6968,7 +6968,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm,
// undefined >= undefined should fail.
__ mov(r0, Operand(LESS));
}
__ mov(pc, Operand(lr)); // Return.
__ Ret();
}
}
}
@ -6982,7 +6982,7 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm,
} else {
__ mov(r0, Operand(EQUAL)); // Things are <=, >=, ==, === themselves.
}
__ mov(pc, Operand(lr)); // Return.
__ Ret();
if (cc != eq || !never_nan_nan) {
// For less and greater we don't have to check for NaN since the result of
@ -7014,14 +7014,14 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm,
// value if it's a NaN.
if (cc != eq) {
// All-zero means Infinity means equal.
__ mov(pc, Operand(lr), LeaveCC, eq); // Return equal
__ Ret(eq);
if (cc == le) {
__ mov(r0, Operand(GREATER)); // NaN <= NaN should fail.
} else {
__ mov(r0, Operand(LESS)); // NaN >= NaN should fail.
}
}
__ mov(pc, Operand(lr)); // Return.
__ Ret();
}
// No fall through here.
}
@ -7032,43 +7032,50 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm,
// See comment at call site.
static void EmitSmiNonsmiComparison(MacroAssembler* masm,
Register lhs,
Register rhs,
Label* lhs_not_nan,
Label* slow,
bool strict) {
ASSERT((lhs.is(r0) && rhs.is(r1)) ||
(lhs.is(r1) && rhs.is(r0)));
Label rhs_is_smi;
__ tst(r0, Operand(kSmiTagMask));
__ tst(rhs, Operand(kSmiTagMask));
__ b(eq, &rhs_is_smi);
// Lhs is a Smi. Check whether the rhs is a heap number.
__ CompareObjectType(r0, r4, r4, HEAP_NUMBER_TYPE);
__ CompareObjectType(rhs, r4, r4, HEAP_NUMBER_TYPE);
if (strict) {
// If rhs is not a number and lhs is a Smi then strict equality cannot
// succeed. Return non-equal (r0 is already not zero)
__ mov(pc, Operand(lr), LeaveCC, ne); // Return.
// succeed. Return non-equal
// If rhs is r0 then there is already a non zero value in it.
if (!rhs.is(r0)) {
__ mov(r0, Operand(NOT_EQUAL), LeaveCC, ne);
}
__ Ret(ne);
} else {
// Smi compared non-strictly with a non-Smi non-heap-number. Call
// the runtime.
__ b(ne, slow);
}
// Lhs (r1) is a smi, rhs (r0) is a number.
// Lhs is a smi, rhs is a number.
if (CpuFeatures::IsSupported(VFP3)) {
// Convert lhs to a double in d7 .
// Convert lhs to a double in d7.
CpuFeatures::Scope scope(VFP3);
__ mov(r7, Operand(r1, ASR, kSmiTagSize));
__ vmov(s15, r7);
__ vcvt_f64_s32(d7, s15);
__ SmiToDoubleVFPRegister(lhs, d7, r7, s15);
// Load the double from rhs, tagged HeapNumber r0, to d6.
__ sub(r7, r0, Operand(kHeapObjectTag));
__ sub(r7, rhs, Operand(kHeapObjectTag));
__ vldr(d6, r7, HeapNumber::kValueOffset);
} else {
__ push(lr);
// Convert lhs to a double in r2, r3.
__ mov(r7, Operand(r1));
__ mov(r7, Operand(lhs));
ConvertToDoubleStub stub1(r3, r2, r7, r6);
__ Call(stub1.GetCode(), RelocInfo::CODE_TARGET);
// Load rhs to a double in r0, r1.
__ Ldrd(r0, r1, FieldMemOperand(r0, HeapNumber::kValueOffset));
__ Ldrd(r0, r1, FieldMemOperand(rhs, HeapNumber::kValueOffset));
__ pop(lr);
}
@ -7078,34 +7085,35 @@ static void EmitSmiNonsmiComparison(MacroAssembler* masm,
__ bind(&rhs_is_smi);
// Rhs is a smi. Check whether the non-smi lhs is a heap number.
__ CompareObjectType(r1, r4, r4, HEAP_NUMBER_TYPE);
__ CompareObjectType(lhs, r4, r4, HEAP_NUMBER_TYPE);
if (strict) {
// If lhs is not a number and rhs is a smi then strict equality cannot
// succeed. Return non-equal.
__ mov(r0, Operand(1), LeaveCC, ne); // Non-zero indicates not equal.
__ mov(pc, Operand(lr), LeaveCC, ne); // Return.
// If lhs is r0 then there is already a non zero value in it.
if (!lhs.is(r0)) {
__ mov(r0, Operand(NOT_EQUAL), LeaveCC, ne);
}
__ Ret(ne);
} else {
// Smi compared non-strictly with a non-smi non-heap-number. Call
// the runtime.
__ b(ne, slow);
}
// Rhs (r0) is a smi, lhs (r1) is a heap number.
// Rhs is a smi, lhs is a heap number.
if (CpuFeatures::IsSupported(VFP3)) {
// Convert rhs to a double in d6 .
CpuFeatures::Scope scope(VFP3);
// Load the double from lhs, tagged HeapNumber r1, to d7.
__ sub(r7, r1, Operand(kHeapObjectTag));
__ sub(r7, lhs, Operand(kHeapObjectTag));
__ vldr(d7, r7, HeapNumber::kValueOffset);
__ mov(r7, Operand(r0, ASR, kSmiTagSize));
__ vmov(s13, r7);
__ vcvt_f64_s32(d6, s13);
// Convert rhs to a double in d6 .
__ SmiToDoubleVFPRegister(rhs, d6, r7, s13);
} else {
__ push(lr);
// Load lhs to a double in r2, r3.
__ Ldrd(r2, r3, FieldMemOperand(r1, HeapNumber::kValueOffset));
__ Ldrd(r2, r3, FieldMemOperand(lhs, HeapNumber::kValueOffset));
// Convert rhs to a double in r0, r1.
__ mov(r7, Operand(r0));
__ mov(r7, Operand(rhs));
ConvertToDoubleStub stub2(r1, r0, r7, r6);
__ Call(stub2.GetCode(), RelocInfo::CODE_TARGET);
__ pop(lr);
@ -7159,7 +7167,7 @@ void EmitNanCheck(MacroAssembler* masm, Label* lhs_not_nan, Condition cc) {
} else {
__ mov(r0, Operand(LESS));
}
__ mov(pc, Operand(lr)); // Return.
__ Ret();
__ bind(&neither_is_nan);
}
@ -7180,11 +7188,11 @@ static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc) {
__ cmp(rhs_mantissa, Operand(lhs_mantissa));
__ orr(r0, rhs_mantissa, Operand(lhs_mantissa), LeaveCC, ne);
// Return non-zero if the numbers are unequal.
__ mov(pc, Operand(lr), LeaveCC, ne);
__ Ret(ne);
__ sub(r0, rhs_exponent, Operand(lhs_exponent), SetCC);
// If exponents are equal then return 0.
__ mov(pc, Operand(lr), LeaveCC, eq);
__ Ret(eq);
// Exponents are unequal. The only way we can return that the numbers
// are equal is if one is -0 and the other is 0. We already dealt
@ -7194,11 +7202,11 @@ static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc) {
// equal.
__ orr(r4, lhs_mantissa, Operand(lhs_exponent, LSL, kSmiTagSize), SetCC);
__ mov(r0, Operand(r4), LeaveCC, ne);
__ mov(pc, Operand(lr), LeaveCC, ne); // Return conditionally.
__ Ret(ne);
// Now they are equal if and only if the lhs exponent is zero in its
// low 31 bits.
__ mov(r0, Operand(rhs_exponent, LSL, kSmiTagSize));
__ mov(pc, Operand(lr));
__ Ret();
} else {
// Call a native function to do a comparison between two non-NaNs.
// Call C routine that may not cause GC or other trouble.
@ -7211,7 +7219,12 @@ static void EmitTwoNonNanDoubleComparison(MacroAssembler* masm, Condition cc) {
// See comment at call site.
static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm) {
static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm,
Register lhs,
Register rhs) {
ASSERT((lhs.is(r0) && rhs.is(r1)) ||
(lhs.is(r1) && rhs.is(r0)));
// If either operand is a JSObject or an oddball value, then they are
// not equal since their pointers are different.
// There is no test for undetectability in strict equality.
@ -7219,20 +7232,20 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm) {
Label first_non_object;
// Get the type of the first operand into r2 and compare it with
// FIRST_JS_OBJECT_TYPE.
__ CompareObjectType(r0, r2, r2, FIRST_JS_OBJECT_TYPE);
__ CompareObjectType(rhs, r2, r2, FIRST_JS_OBJECT_TYPE);
__ b(lt, &first_non_object);
// Return non-zero (r0 is not zero)
Label return_not_equal;
__ bind(&return_not_equal);
__ mov(pc, Operand(lr)); // Return.
__ Ret();
__ bind(&first_non_object);
// Check for oddballs: true, false, null, undefined.
__ cmp(r2, Operand(ODDBALL_TYPE));
__ b(eq, &return_not_equal);
__ CompareObjectType(r1, r3, r3, FIRST_JS_OBJECT_TYPE);
__ CompareObjectType(lhs, r3, r3, FIRST_JS_OBJECT_TYPE);
__ b(ge, &return_not_equal);
// Check for oddballs: true, false, null, undefined.
@ -7251,12 +7264,17 @@ static void EmitStrictTwoHeapObjectCompare(MacroAssembler* masm) {
// See comment at call site.
static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
Register lhs,
Register rhs,
Label* both_loaded_as_doubles,
Label* not_heap_numbers,
Label* slow) {
__ CompareObjectType(r0, r3, r2, HEAP_NUMBER_TYPE);
ASSERT((lhs.is(r0) && rhs.is(r1)) ||
(lhs.is(r1) && rhs.is(r0)));
__ CompareObjectType(rhs, r3, r2, HEAP_NUMBER_TYPE);
__ b(ne, not_heap_numbers);
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
__ ldr(r2, FieldMemOperand(lhs, HeapObject::kMapOffset));
__ cmp(r2, r3);
__ b(ne, slow); // First was a heap number, second wasn't. Go slow case.
@ -7264,13 +7282,13 @@ static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
// for that.
if (CpuFeatures::IsSupported(VFP3)) {
CpuFeatures::Scope scope(VFP3);
__ sub(r7, r0, Operand(kHeapObjectTag));
__ sub(r7, rhs, Operand(kHeapObjectTag));
__ vldr(d6, r7, HeapNumber::kValueOffset);
__ sub(r7, r1, Operand(kHeapObjectTag));
__ sub(r7, lhs, Operand(kHeapObjectTag));
__ vldr(d7, r7, HeapNumber::kValueOffset);
} else {
__ Ldrd(r2, r3, FieldMemOperand(r1, HeapNumber::kValueOffset));
__ Ldrd(r0, r1, FieldMemOperand(r0, HeapNumber::kValueOffset));
__ Ldrd(r2, r3, FieldMemOperand(lhs, HeapNumber::kValueOffset));
__ Ldrd(r0, r1, FieldMemOperand(rhs, HeapNumber::kValueOffset));
}
__ jmp(both_loaded_as_doubles);
}
@ -7278,9 +7296,14 @@ static void EmitCheckForTwoHeapNumbers(MacroAssembler* masm,
// Fast negative check for symbol-to-symbol equality.
static void EmitCheckForSymbolsOrObjects(MacroAssembler* masm,
Register lhs,
Register rhs,
Label* possible_strings,
Label* not_both_strings) {
// r2 is object type of r0.
ASSERT((lhs.is(r0) && rhs.is(r1)) ||
(lhs.is(r1) && rhs.is(r0)));
// r2 is object type of rhs.
// Ensure that no non-strings have the symbol bit set.
Label object_test;
ASSERT(kSymbolTag != 0);
@ -7288,31 +7311,31 @@ static void EmitCheckForSymbolsOrObjects(MacroAssembler* masm,
__ b(ne, &object_test);
__ tst(r2, Operand(kIsSymbolMask));
__ b(eq, possible_strings);
__ CompareObjectType(r1, r3, r3, FIRST_NONSTRING_TYPE);
__ CompareObjectType(lhs, r3, r3, FIRST_NONSTRING_TYPE);
__ b(ge, not_both_strings);
__ tst(r3, Operand(kIsSymbolMask));
__ b(eq, possible_strings);
// Both are symbols. We already checked they weren't the same pointer
// so they are not equal.
__ mov(r0, Operand(1)); // Non-zero indicates not equal.
__ mov(pc, Operand(lr)); // Return.
__ mov(r0, Operand(NOT_EQUAL));
__ Ret();
__ bind(&object_test);
__ cmp(r2, Operand(FIRST_JS_OBJECT_TYPE));
__ b(lt, not_both_strings);
__ CompareObjectType(r1, r2, r3, FIRST_JS_OBJECT_TYPE);
__ CompareObjectType(lhs, r2, r3, FIRST_JS_OBJECT_TYPE);
__ b(lt, not_both_strings);
// If both objects are undetectable, they are equal. Otherwise, they
// If both objects are undetectable, they are equal. Otherwise, they
// are not equal, since they are different objects and an object is not
// equal to undefined.
__ ldr(r3, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldr(r3, FieldMemOperand(rhs, HeapObject::kMapOffset));
__ ldrb(r2, FieldMemOperand(r2, Map::kBitFieldOffset));
__ ldrb(r3, FieldMemOperand(r3, Map::kBitFieldOffset));
__ and_(r0, r2, Operand(r3));
__ and_(r0, r0, Operand(1 << Map::kIsUndetectable));
__ eor(r0, r0, Operand(1 << Map::kIsUndetectable));
__ mov(pc, Operand(lr)); // Return.
__ Ret();
}
@ -7434,10 +7457,13 @@ void RecordWriteStub::Generate(MacroAssembler* masm) {
}
// On entry r0 (rhs) and r1 (lhs) are the values to be compared.
// On entry lhs_ and rhs_ are the values to be compared.
// On exit r0 is 0, positive or negative to indicate the result of
// the comparison.
void CompareStub::Generate(MacroAssembler* masm) {
ASSERT((lhs_.is(r0) && rhs_.is(r1)) ||
(lhs_.is(r1) && rhs_.is(r0)));
Label slow; // Call builtin.
Label not_smis, both_loaded_as_doubles, lhs_not_nan;
@ -7452,7 +7478,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
// be strictly equal if the other is a HeapNumber.
ASSERT_EQ(0, kSmiTag);
ASSERT_EQ(0, Smi::FromInt(0));
__ and_(r2, r0, Operand(r1));
__ and_(r2, lhs_, Operand(rhs_));
__ tst(r2, Operand(kSmiTagMask));
__ b(ne, &not_smis);
// One operand is a smi. EmitSmiNonsmiComparison generates code that can:
@ -7464,7 +7490,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
// comparison. If VFP3 is supported the double values of the numbers have
// been loaded into d7 and d6. Otherwise, the double values have been loaded
// into r0, r1, r2, and r3.
EmitSmiNonsmiComparison(masm, &lhs_not_nan, &slow, strict_);
EmitSmiNonsmiComparison(masm, lhs_, rhs_, &lhs_not_nan, &slow, strict_);
__ bind(&both_loaded_as_doubles);
// The arguments have been converted to doubles and stored in d6 and d7, if
@ -7481,7 +7507,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ mov(r0, Operand(EQUAL), LeaveCC, eq);
__ mov(r0, Operand(LESS), LeaveCC, lt);
__ mov(r0, Operand(GREATER), LeaveCC, gt);
__ mov(pc, Operand(lr));
__ Ret();
__ bind(&nan);
// If one of the sides was a NaN then the v flag is set. Load r0 with
@ -7492,7 +7518,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
} else {
__ mov(r0, Operand(LESS));
}
__ mov(pc, Operand(lr));
__ Ret();
} else {
// Checks for NaN in the doubles we have loaded. Can return the answer or
// fall through if neither is a NaN. Also binds lhs_not_nan.
@ -7504,11 +7530,11 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ bind(&not_smis);
// At this point we know we are dealing with two different objects,
// and neither of them is a Smi. The objects are in r0 and r1.
// and neither of them is a Smi. The objects are in rhs_ and lhs_.
if (strict_) {
// This returns non-equal for some object types, or falls through if it
// was not lucky.
EmitStrictTwoHeapObjectCompare(masm);
EmitStrictTwoHeapObjectCompare(masm, lhs_, rhs_);
}
Label check_for_symbols;
@ -7516,8 +7542,10 @@ void CompareStub::Generate(MacroAssembler* masm) {
// Check for heap-number-heap-number comparison. Can jump to slow case,
// or load both doubles into r0, r1, r2, r3 and jump to the code that handles
// that case. If the inputs are not doubles then jumps to check_for_symbols.
// In this case r2 will contain the type of r0. Never falls through.
// In this case r2 will contain the type of rhs_. Never falls through.
EmitCheckForTwoHeapNumbers(masm,
lhs_,
rhs_,
&both_loaded_as_doubles,
&check_for_symbols,
&flat_string_check);
@ -7528,20 +7556,20 @@ void CompareStub::Generate(MacroAssembler* masm) {
if (cc_ == eq && !strict_) {
// Returns an answer for two symbols or two detectable objects.
// Otherwise jumps to string case or not both strings case.
// Assumes that r2 is the type of r0 on entry.
EmitCheckForSymbolsOrObjects(masm, &flat_string_check, &slow);
// Assumes that r2 is the type of rhs_ on entry.
EmitCheckForSymbolsOrObjects(masm, lhs_, rhs_, &flat_string_check, &slow);
}
// Check for both being sequential ASCII strings, and inline if that is the
// case.
__ bind(&flat_string_check);
__ JumpIfNonSmisNotBothSequentialAsciiStrings(r0, r1, r2, r3, &slow);
__ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs_, rhs_, r2, r3, &slow);
__ IncrementCounter(&Counters::string_compare_native, 1, r2, r3);
StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
r1,
r0,
lhs_,
rhs_,
r2,
r3,
r4,
@ -7550,7 +7578,7 @@ void CompareStub::Generate(MacroAssembler* masm) {
__ bind(&slow);
__ Push(r1, r0);
__ Push(lhs_, rhs_);
// Figure out which native to call and setup the arguments.
Builtins::JavaScript native;
if (cc_ == eq) {
@ -10051,6 +10079,9 @@ void CallFunctionStub::Generate(MacroAssembler* masm) {
// Unfortunately you have to run without snapshots to see most of these
// names in the profile since most compare stubs end up in the snapshot.
const char* CompareStub::GetName() {
ASSERT((lhs_.is(r0) && rhs_.is(r1)) ||
(lhs_.is(r1) && rhs_.is(r0)));
if (name_ != NULL) return name_;
const int kMaxNameLength = 100;
name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);
@ -10067,6 +10098,9 @@ const char* CompareStub::GetName() {
default: cc_name = "UnknownCondition"; break;
}
const char* lhs_name = lhs_.is(r0) ? "_r0" : "_r1";
const char* rhs_name = rhs_.is(r0) ? "_r0" : "_r1";
const char* strict_name = "";
if (strict_ && (cc_ == eq || cc_ == ne)) {
strict_name = "_STRICT";
@ -10083,8 +10117,10 @@ const char* CompareStub::GetName() {
}
OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
"CompareStub_%s%s%s%s",
"CompareStub_%s%s%s%s%s%s",
cc_name,
lhs_name,
rhs_name,
strict_name,
never_nan_nan_name,
include_number_compare_name);
@ -10096,8 +10132,11 @@ int CompareStub::MinorKey() {
// Encode the three parameters in a unique 16 bit value. To avoid duplicate
// stubs the never NaN NaN condition is only taken into account if the
// condition is equals.
ASSERT((static_cast<unsigned>(cc_) >> 28) < (1 << 13));
ASSERT((static_cast<unsigned>(cc_) >> 28) < (1 << 12));
ASSERT((lhs_.is(r0) && rhs_.is(r1)) ||
(lhs_.is(r1) && rhs_.is(r0)));
return ConditionField::encode(static_cast<unsigned>(cc_) >> 28)
| RegisterField::encode(lhs_.is(r0))
| StrictField::encode(strict_)
| NeverNanNanField::encode(cc_ == eq ? never_nan_nan_ : false)
| IncludeNumberCompareField::encode(include_number_compare_);

9
deps/v8/src/arm/full-codegen-arm.cc

@ -822,8 +822,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// the smi vs. smi case to be handled before it is called.
Label slow_case;
__ ldr(r1, MemOperand(sp, 0)); // Switch value.
__ mov(r2, r1);
__ orr(r2, r2, r0);
__ orr(r2, r1, r0);
__ tst(r2, Operand(kSmiTagMask));
__ b(ne, &slow_case);
__ cmp(r1, r0);
@ -832,9 +831,9 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
__ b(clause->body_target()->entry_label());
__ bind(&slow_case);
CompareStub stub(eq, true);
CompareStub stub(eq, true, kBothCouldBeNaN, true, r1, r0);
__ CallStub(&stub);
__ tst(r0, r0);
__ cmp(r0, Operand(0));
__ b(ne, &next_test);
__ Drop(1); // Switch value is no longer needed.
__ b(clause->body_target()->entry_label());
@ -3088,7 +3087,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
__ jmp(if_false);
__ bind(&slow_case);
CompareStub stub(cc, strict);
CompareStub stub(cc, strict, kBothCouldBeNaN, true, r1, r0);
__ CallStub(&stub);
__ cmp(r0, Operand(0));
__ b(cc, if_true);

7
deps/v8/src/arm/stub-cache-arm.cc

@ -176,6 +176,13 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
__ cmp(entity_name, Operand(Handle<String>(name)));
__ b(eq, miss_label);
// Check if the entry name is not a symbol.
__ ldr(entity_name, FieldMemOperand(entity_name, HeapObject::kMapOffset));
__ ldrb(entity_name,
FieldMemOperand(entity_name, Map::kInstanceTypeOffset));
__ tst(entity_name, Operand(kIsSymbolMask));
__ b(eq, miss_label);
// Restore the properties.
__ ldr(properties,
FieldMemOperand(receiver, JSObject::kPropertiesOffset));

27
deps/v8/src/codegen.h

@ -461,11 +461,15 @@ class CompareStub: public CodeStub {
CompareStub(Condition cc,
bool strict,
NaNInformation nan_info = kBothCouldBeNaN,
bool include_number_compare = true) :
bool include_number_compare = true,
Register lhs = no_reg,
Register rhs = no_reg) :
cc_(cc),
strict_(strict),
never_nan_nan_(nan_info == kCantBothBeNaN),
include_number_compare_(include_number_compare),
lhs_(lhs),
rhs_(rhs),
name_(NULL) { }
void Generate(MacroAssembler* masm);
@ -483,12 +487,19 @@ class CompareStub: public CodeStub {
// comparison code is used when the number comparison has been inlined, and
// the stub will be called if one of the operands is not a number.
bool include_number_compare_;
// Encoding of the minor key CCCCCCCCCCCCCCNS.
// Register holding the left hand side of the comparison if the stub gives
// a choice, no_reg otherwise.
Register lhs_;
// Register holding the right hand side of the comparison if the stub gives
// a choice, no_reg otherwise.
Register rhs_;
// Encoding of the minor key CCCCCCCCCCCCRCNS.
class StrictField: public BitField<bool, 0, 1> {};
class NeverNanNanField: public BitField<bool, 1, 1> {};
class IncludeNumberCompareField: public BitField<bool, 2, 1> {};
class ConditionField: public BitField<int, 3, 13> {};
class RegisterField: public BitField<bool, 3, 1> {};
class ConditionField: public BitField<int, 4, 12> {};
Major MajorKey() { return Compare; }
@ -507,11 +518,17 @@ class CompareStub: public CodeStub {
#ifdef DEBUG
void Print() {
PrintF("CompareStub (cc %d), (strict %s), "
"(never_nan_nan %s), (number_compare %s)\n",
"(never_nan_nan %s), (number_compare %s) ",
static_cast<int>(cc_),
strict_ ? "true" : "false",
never_nan_nan_ ? "true" : "false",
include_number_compare_ ? "included" : "not included");
if (!lhs_.is(no_reg) && !rhs_.is(no_reg)) {
PrintF("(lhs r%d), (rhs r%d)\n", lhs_.code(), rhs_.code());
} else {
PrintF("\n");
}
}
#endif
};

10
deps/v8/src/compiler.cc

@ -160,7 +160,7 @@ Handle<Code> MakeCodeForLiveEdit(CompilationInfo* info) {
Handle<Code> code = MakeCode(context, info);
if (!info->shared_info().is_null()) {
info->shared_info()->set_scope_info(
*ScopeInfo<>::CreateHeapObject(info->scope()));
*SerializedScopeInfo::Create(info->scope()));
}
return code;
}
@ -262,7 +262,7 @@ static Handle<SharedFunctionInfo> MakeFunctionInfo(bool is_global,
lit->name(),
lit->materialized_literal_count(),
code,
ScopeInfo<>::CreateHeapObject(info.scope()));
SerializedScopeInfo::Create(info.scope()));
ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
Compiler::SetFunctionInfo(result, lit, true, script);
@ -450,7 +450,7 @@ bool Compiler::CompileLazy(CompilationInfo* info) {
// Update the shared function info with the compiled code and the scope info.
shared->set_code(*code);
shared->set_scope_info(*ScopeInfo<>::CreateHeapObject(info->scope()));
shared->set_scope_info(*SerializedScopeInfo::Create(info->scope()));
// Set the expected number of properties for instances.
SetExpectedNofPropertiesFromEstimate(shared, lit->expected_property_count());
@ -485,7 +485,7 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
bool allow_lazy = literal->AllowsLazyCompilation() &&
!LiveEditFunctionTracker::IsActive();
Handle<Object> scope_info(ScopeInfo<>::EmptyHeapObject());
Handle<SerializedScopeInfo> scope_info(SerializedScopeInfo::Empty());
// Generate code
Handle<Code> code;
@ -568,7 +568,7 @@ Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
literal->start_position(),
script,
code);
scope_info = ScopeInfo<>::CreateHeapObject(info.scope());
scope_info = SerializedScopeInfo::Create(info.scope());
}
// Create a shared function info object.

22
deps/v8/src/contexts.cc

@ -120,9 +120,10 @@ Handle<Object> Context::Lookup(Handle<String> name, ContextLookupFlags flags,
// we have context-local slots
// check non-parameter locals in context
Handle<Object> scope_info(context->closure()->shared()->scope_info());
Handle<SerializedScopeInfo> scope_info(
context->closure()->shared()->scope_info());
Variable::Mode mode;
int index = ScopeInfo<>::ContextSlotIndex(*scope_info, *name, &mode);
int index = scope_info->ContextSlotIndex(*name, &mode);
ASSERT(index < 0 || index >= MIN_CONTEXT_SLOTS);
if (index >= 0) {
// slot found
@ -150,13 +151,11 @@ Handle<Object> Context::Lookup(Handle<String> name, ContextLookupFlags flags,
}
// check parameter locals in context
int param_index = ScopeInfo<>::ParameterIndex(*scope_info, *name);
int param_index = scope_info->ParameterIndex(*name);
if (param_index >= 0) {
// slot found.
int index =
ScopeInfo<>::ContextSlotIndex(*scope_info,
Heap::arguments_shadow_symbol(),
NULL);
scope_info->ContextSlotIndex(Heap::arguments_shadow_symbol(), NULL);
ASSERT(index >= 0); // arguments must exist and be in the heap context
Handle<JSObject> arguments(JSObject::cast(context->get(index)));
ASSERT(arguments->HasLocalProperty(Heap::length_symbol()));
@ -170,7 +169,7 @@ Handle<Object> Context::Lookup(Handle<String> name, ContextLookupFlags flags,
// check intermediate context (holding only the function name variable)
if (follow_context_chain) {
int index = ScopeInfo<>::FunctionContextSlotIndex(*scope_info, *name);
int index = scope_info->FunctionContextSlotIndex(*name);
if (index >= 0) {
// slot found
if (FLAG_trace_contexts) {
@ -216,18 +215,19 @@ bool Context::GlobalIfNotShadowedByEval(Handle<String> name) {
ASSERT(context->is_function_context());
// Check non-parameter locals.
Handle<Object> scope_info(context->closure()->shared()->scope_info());
Handle<SerializedScopeInfo> scope_info(
context->closure()->shared()->scope_info());
Variable::Mode mode;
int index = ScopeInfo<>::ContextSlotIndex(*scope_info, *name, &mode);
int index = scope_info->ContextSlotIndex(*name, &mode);
ASSERT(index < 0 || index >= MIN_CONTEXT_SLOTS);
if (index >= 0) return false;
// Check parameter locals.
int param_index = ScopeInfo<>::ParameterIndex(*scope_info, *name);
int param_index = scope_info->ParameterIndex(*name);
if (param_index >= 0) return false;
// Check context only holding the function name variable.
index = ScopeInfo<>::FunctionContextSlotIndex(*scope_info, *name);
index = scope_info->FunctionContextSlotIndex(*name);
if (index >= 0) return false;
context = Context::cast(context->closure()->context());
}

116
deps/v8/src/debug.cc

@ -1882,6 +1882,7 @@ int Debugger::host_dispatch_micros_ = 100 * 1000;
DebuggerAgent* Debugger::agent_ = NULL;
LockingCommandMessageQueue Debugger::command_queue_(kQueueInitialSize);
Semaphore* Debugger::command_received_ = OS::CreateSemaphore(0);
LockingCommandMessageQueue Debugger::event_command_queue_(kQueueInitialSize);
Handle<Object> Debugger::MakeJSObject(Vector<const char> constructor_name,
@ -2207,39 +2208,75 @@ void Debugger::ProcessDebugEvent(v8::DebugEvent event,
event_data,
auto_continue);
}
// Notify registered debug event listener. This can be either a C or a
// JavaScript function.
if (!event_listener_.is_null()) {
if (event_listener_->IsProxy()) {
// C debug event listener.
Handle<Proxy> callback_obj(Handle<Proxy>::cast(event_listener_));
v8::Debug::EventCallback2 callback =
FUNCTION_CAST<v8::Debug::EventCallback2>(callback_obj->proxy());
EventDetailsImpl event_details(
event,
Handle<JSObject>::cast(exec_state),
event_data,
event_listener_data_);
callback(event_details);
} else {
// JavaScript debug event listener.
ASSERT(event_listener_->IsJSFunction());
Handle<JSFunction> fun(Handle<JSFunction>::cast(event_listener_));
// Invoke the JavaScript debug event listener.
const int argc = 4;
Object** argv[argc] = { Handle<Object>(Smi::FromInt(event)).location(),
exec_state.location(),
Handle<Object>::cast(event_data).location(),
event_listener_data_.location() };
Handle<Object> result = Execution::TryCall(fun, Top::global(),
argc, argv, &caught_exception);
// Silently ignore exceptions from debug event listeners.
// Notify registered debug event listener. This can be either a C or
// a JavaScript function. Don't call event listener for v8::Break
// here, if it's only a debug command -- they will be processed later.
if ((event != v8::Break || !auto_continue) && !event_listener_.is_null()) {
CallEventCallback(event, exec_state, event_data, NULL);
}
// Process pending debug commands.
if (event == v8::Break) {
while (!event_command_queue_.IsEmpty()) {
CommandMessage command = event_command_queue_.Get();
if (!event_listener_.is_null()) {
CallEventCallback(v8::BreakForCommand,
exec_state,
event_data,
command.client_data());
}
command.Dispose();
}
}
}
void Debugger::CallEventCallback(v8::DebugEvent event,
Handle<Object> exec_state,
Handle<Object> event_data,
v8::Debug::ClientData* client_data) {
if (event_listener_->IsProxy()) {
CallCEventCallback(event, exec_state, event_data, client_data);
} else {
CallJSEventCallback(event, exec_state, event_data);
}
}
void Debugger::CallCEventCallback(v8::DebugEvent event,
Handle<Object> exec_state,
Handle<Object> event_data,
v8::Debug::ClientData* client_data) {
Handle<Proxy> callback_obj(Handle<Proxy>::cast(event_listener_));
v8::Debug::EventCallback2 callback =
FUNCTION_CAST<v8::Debug::EventCallback2>(callback_obj->proxy());
EventDetailsImpl event_details(
event,
Handle<JSObject>::cast(exec_state),
Handle<JSObject>::cast(event_data),
event_listener_data_,
client_data);
callback(event_details);
}
void Debugger::CallJSEventCallback(v8::DebugEvent event,
Handle<Object> exec_state,
Handle<Object> event_data) {
ASSERT(event_listener_->IsJSFunction());
Handle<JSFunction> fun(Handle<JSFunction>::cast(event_listener_));
// Invoke the JavaScript debug event listener.
const int argc = 4;
Object** argv[argc] = { Handle<Object>(Smi::FromInt(event)).location(),
exec_state.location(),
Handle<Object>::cast(event_data).location(),
event_listener_data_.location() };
bool caught_exception = false;
Execution::TryCall(fun, Top::global(), argc, argv, &caught_exception);
// Silently ignore exceptions from debug event listeners.
}
Handle<Context> Debugger::GetDebugContext() {
never_unload_debugger_ = true;
EnterDebugger debugger;
@ -2273,6 +2310,7 @@ void Debugger::NotifyMessageHandler(v8::DebugEvent event,
bool sendEventMessage = false;
switch (event) {
case v8::Break:
case v8::BreakForCommand:
sendEventMessage = !auto_continue;
break;
case v8::Exception:
@ -2560,6 +2598,17 @@ bool Debugger::HasCommands() {
}
void Debugger::EnqueueDebugCommand(v8::Debug::ClientData* client_data) {
CommandMessage message = CommandMessage::New(Vector<uint16_t>(), client_data);
event_command_queue_.Put(message);
// Set the debug command break flag to have the command processed.
if (!Debug::InDebugger()) {
StackGuard::DebugCommand();
}
}
bool Debugger::IsDebuggerActive() {
ScopedLock with(debugger_access_);
@ -2761,11 +2810,13 @@ v8::Debug::ClientData* MessageImpl::GetClientData() const {
EventDetailsImpl::EventDetailsImpl(DebugEvent event,
Handle<JSObject> exec_state,
Handle<JSObject> event_data,
Handle<Object> callback_data)
Handle<Object> callback_data,
v8::Debug::ClientData* client_data)
: event_(event),
exec_state_(exec_state),
event_data_(event_data),
callback_data_(callback_data) {}
callback_data_(callback_data),
client_data_(client_data) {}
DebugEvent EventDetailsImpl::GetEvent() const {
@ -2793,6 +2844,11 @@ v8::Handle<v8::Value> EventDetailsImpl::GetCallbackData() const {
}
v8::Debug::ClientData* EventDetailsImpl::GetClientData() const {
return client_data_;
}
CommandMessage::CommandMessage() : text_(Vector<uint16_t>::empty()),
client_data_(NULL) {
}

29
deps/v8/src/debug.h

@ -566,18 +566,21 @@ class EventDetailsImpl : public v8::Debug::EventDetails {
EventDetailsImpl(DebugEvent event,
Handle<JSObject> exec_state,
Handle<JSObject> event_data,
Handle<Object> callback_data);
Handle<Object> callback_data,
v8::Debug::ClientData* client_data);
virtual DebugEvent GetEvent() const;
virtual v8::Handle<v8::Object> GetExecutionState() const;
virtual v8::Handle<v8::Object> GetEventData() const;
virtual v8::Handle<v8::Context> GetEventContext() const;
virtual v8::Handle<v8::Value> GetCallbackData() const;
virtual v8::Debug::ClientData* GetClientData() const;
private:
DebugEvent event_; // Debug event causing the break.
Handle<JSObject> exec_state_; // Current execution state.
Handle<JSObject> event_data_; // Data associated with the event.
Handle<Object> callback_data_; // User data passed with the callback when
// it was registered.
Handle<JSObject> exec_state_; // Current execution state.
Handle<JSObject> event_data_; // Data associated with the event.
Handle<Object> callback_data_; // User data passed with the callback
// when it was registered.
v8::Debug::ClientData* client_data_; // Data passed to DebugBreakForCommand.
};
@ -706,6 +709,9 @@ class Debugger {
// Check whether there are commands in the command queue.
static bool HasCommands();
// Enqueue a debugger command to the command queue for event listeners.
static void EnqueueDebugCommand(v8::Debug::ClientData* client_data = NULL);
static Handle<Object> Call(Handle<JSFunction> fun,
Handle<Object> data,
bool* pending_exception);
@ -753,6 +759,17 @@ class Debugger {
static bool IsDebuggerActive();
private:
static void CallEventCallback(v8::DebugEvent event,
Handle<Object> exec_state,
Handle<Object> event_data,
v8::Debug::ClientData* client_data);
static void CallCEventCallback(v8::DebugEvent event,
Handle<Object> exec_state,
Handle<Object> event_data,
v8::Debug::ClientData* client_data);
static void CallJSEventCallback(v8::DebugEvent event,
Handle<Object> exec_state,
Handle<Object> event_data);
static void ListenersChanged();
static Mutex* debugger_access_; // Mutex guarding debugger variables.
@ -775,6 +792,8 @@ class Debugger {
static LockingCommandMessageQueue command_queue_;
static Semaphore* command_received_; // Signaled for each command received.
static LockingCommandMessageQueue event_command_queue_;
friend class EnterDebugger;
};

2
deps/v8/src/factory.cc

@ -684,7 +684,7 @@ Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(
Handle<String> name,
int number_of_literals,
Handle<Code> code,
Handle<Object> scope_info) {
Handle<SerializedScopeInfo> scope_info) {
Handle<SharedFunctionInfo> shared = NewSharedFunctionInfo(name);
shared->set_code(*code);
shared->set_scope_info(*scope_info);

2
deps/v8/src/factory.h

@ -351,7 +351,7 @@ class Factory : public AllStatic {
Handle<String> name,
int number_of_literals,
Handle<Code> code,
Handle<Object> scope_info);
Handle<SerializedScopeInfo> scope_info);
static Handle<SharedFunctionInfo> NewSharedFunctionInfo(Handle<String> name);
static Handle<NumberDictionary> DictionaryAtNumberPut(

4
deps/v8/src/frames.cc

@ -532,11 +532,11 @@ void JavaScriptFrame::Print(StringStream* accumulator,
if (IsConstructor()) accumulator->Add("new ");
accumulator->PrintFunction(function, receiver, &code);
Handle<Object> scope_info(ScopeInfo<>::EmptyHeapObject());
Handle<SerializedScopeInfo> scope_info(SerializedScopeInfo::Empty());
if (function->IsJSFunction()) {
Handle<SharedFunctionInfo> shared(JSFunction::cast(function)->shared());
scope_info = Handle<Object>(shared->scope_info());
scope_info = Handle<SerializedScopeInfo>(shared->scope_info());
Object* script_obj = shared->script();
if (script_obj->IsScript()) {
Handle<Script> script(Script::cast(script_obj));

1
deps/v8/src/globals.h

@ -326,6 +326,7 @@ class RegExpCompiler;
class RegExpVisitor;
class Scope;
template<class Allocator = FreeStoreAllocationPolicy> class ScopeInfo;
class SerializedScopeInfo;
class Script;
class Slot;
class Smi;

11
deps/v8/src/heap.cc

@ -2055,7 +2055,7 @@ Object* Heap::AllocateSharedFunctionInfo(Object* name) {
share->set_name(name);
Code* illegal = Builtins::builtin(Builtins::Illegal);
share->set_code(illegal);
share->set_scope_info(ScopeInfo<>::EmptyHeapObject());
share->set_scope_info(SerializedScopeInfo::Empty());
Code* construct_stub = Builtins::builtin(Builtins::JSConstructStubGeneric);
share->set_construct_stub(construct_stub);
share->set_expected_nof_properties(0);
@ -2480,16 +2480,9 @@ static void FlushCodeForFunction(SharedFunctionInfo* function_info) {
ThreadManager::IterateArchivedThreads(&threadvisitor);
if (threadvisitor.FoundCode()) return;
// Check that there are heap allocated locals in the scopeinfo. If
// there is, we are potentially using eval and need the scopeinfo
// for variable resolution.
if (ScopeInfo<>::HasHeapAllocatedLocals(function_info->scope_info()))
return;
// Compute the lazy compilable version of the code.
HandleScope scope;
// Compute the lazy compilable version of the code, clear the scope info.
function_info->set_code(*ComputeLazyCompile(function_info->length()));
function_info->set_scope_info(ScopeInfo<>::EmptyHeapObject());
}

8
deps/v8/src/ia32/codegen-ia32.cc

@ -11635,6 +11635,8 @@ static int NegativeComparisonResult(Condition cc) {
void CompareStub::Generate(MacroAssembler* masm) {
ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
Label check_unequal_objects, done;
// NOTICE! This code is only reached after a smi-fast-case check, so
@ -12528,8 +12530,10 @@ int CompareStub::MinorKey() {
// Encode the three parameters in a unique 16 bit value. To avoid duplicate
// stubs the never NaN NaN condition is only taken into account if the
// condition is equals.
ASSERT(static_cast<unsigned>(cc_) < (1 << 13));
ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
return ConditionField::encode(static_cast<unsigned>(cc_))
| RegisterField::encode(false) // lhs_ and rhs_ are not used
| StrictField::encode(strict_)
| NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
| IncludeNumberCompareField::encode(include_number_compare_);
@ -12539,6 +12543,8 @@ int CompareStub::MinorKey() {
// Unfortunately you have to run without snapshots to see most of these
// names in the profile since most compare stubs end up in the snapshot.
const char* CompareStub::GetName() {
ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
if (name_ != NULL) return name_;
const int kMaxNameLength = 100;
name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);

6
deps/v8/src/ia32/stub-cache-ia32.cc

@ -184,6 +184,12 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
// Stop if found the property.
__ cmp(entity_name, Handle<String>(name));
__ j(equal, miss_label, not_taken);
// Check if the entry name is not a symbol.
__ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
__ test_b(FieldOperand(entity_name, Map::kInstanceTypeOffset),
kIsSymbolMask);
__ j(zero, miss_label, not_taken);
} else {
// Give up probing if still not found the undefined value.
__ j(not_equal, miss_label, not_taken);

14
deps/v8/src/objects-inl.h

@ -2511,7 +2511,6 @@ ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
#endif
ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
ACCESSORS(SharedFunctionInfo, scope_info, Object, kScopeInfoOffset)
ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
kInstanceClassNameOffset)
@ -2648,6 +2647,19 @@ void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
}
SerializedScopeInfo* SharedFunctionInfo::scope_info() {
return reinterpret_cast<SerializedScopeInfo*>(
READ_FIELD(this, kScopeInfoOffset));
}
void SharedFunctionInfo::set_scope_info(SerializedScopeInfo* value,
WriteBarrierMode mode) {
WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
CONDITIONAL_WRITE_BARRIER(this, kScopeInfoOffset, mode);
}
bool SharedFunctionInfo::is_compiled() {
// TODO(1242782): Create a code kind for uncompiled code.
return code()->kind() != Code::STUB;

2
deps/v8/src/objects.h

@ -3273,7 +3273,7 @@ class SharedFunctionInfo: public HeapObject {
DECL_ACCESSORS(code, Code)
// [scope_info]: Scope info.
DECL_ACCESSORS(scope_info, Object)
DECL_ACCESSORS(scope_info, SerializedScopeInfo)
// [construct stub]: Code stub for constructing instances of this function.
DECL_ACCESSORS(construct_stub, Code)

6
deps/v8/src/parser.cc

@ -36,6 +36,7 @@
#include "parser.h"
#include "platform.h"
#include "runtime.h"
#include "scopeinfo.h"
#include "scopes.h"
#include "string-stream.h"
@ -1968,8 +1969,9 @@ Statement* Parser::ParseNativeDeclaration(bool* ok) {
const int literals = fun->NumberOfLiterals();
Handle<Code> code = Handle<Code>(fun->shared()->code());
Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
Handle<SharedFunctionInfo> shared = Factory::NewSharedFunctionInfo(
name, literals, code, Handle<Object>(fun->shared()->scope_info()));
Handle<SharedFunctionInfo> shared =
Factory::NewSharedFunctionInfo(name, literals, code,
Handle<SerializedScopeInfo>(fun->shared()->scope_info()));
shared->set_construct_stub(*construct_stub);
// Copy the function data to the shared function info.

7
deps/v8/src/profile-generator.cc

@ -1555,12 +1555,13 @@ void HeapSnapshotGenerator::ExtractClosureReferences(JSObject* js_obj,
JSFunction* func = JSFunction::cast(js_obj);
Context* context = func->context();
ZoneScope zscope(DELETE_ON_EXIT);
Object* scope_info = context->closure()->shared()->scope_info();
ScopeInfo<ZoneListAllocationPolicy> zone_scope_info(scope_info);
SerializedScopeInfo* serialized_scope_info =
context->closure()->shared()->scope_info();
ScopeInfo<ZoneListAllocationPolicy> zone_scope_info(serialized_scope_info);
int locals_number = zone_scope_info.NumberOfLocals();
for (int i = 0; i < locals_number; ++i) {
String* local_name = *zone_scope_info.LocalName(i);
int idx = ScopeInfo<>::ContextSlotIndex(scope_info, local_name, NULL);
int idx = serialized_scope_info->ContextSlotIndex(local_name, NULL);
if (idx >= 0 && idx < context->length()) {
snapshot_->SetClosureReference(entry, local_name, context->get(idx));
}

53
deps/v8/src/runtime.cc

@ -6869,8 +6869,7 @@ static Object* Runtime_NewContext(Arguments args) {
ASSERT(args.length() == 1);
CONVERT_CHECKED(JSFunction, function, args[0]);
int length =
ScopeInfo<>::NumberOfContextSlots(function->shared()->scope_info());
int length = function->shared()->scope_info()->NumberOfContextSlots();
Object* result = Heap::AllocateFunctionContext(length, function);
if (result->IsFailure()) return result;
@ -8492,7 +8491,7 @@ static Object* Runtime_GetFrameDetails(Arguments args) {
// Get scope info and read from it for local variable information.
Handle<JSFunction> function(JSFunction::cast(it.frame()->function()));
Handle<Object> scope_info(function->shared()->scope_info());
Handle<SerializedScopeInfo> scope_info(function->shared()->scope_info());
ScopeInfo<> info(*scope_info);
// Get the context.
@ -8521,9 +8520,7 @@ static Object* Runtime_GetFrameDetails(Arguments args) {
}
ASSERT(context->is_function_context());
locals->set(i * 2 + 1,
context->get(ScopeInfo<>::ContextSlotIndex(*scope_info,
*name,
NULL)));
context->get(scope_info->ContextSlotIndex(*name, NULL)));
}
}
@ -8663,18 +8660,17 @@ static Object* Runtime_GetFrameDetails(Arguments args) {
// Copy all the context locals into an object used to materialize a scope.
static void CopyContextLocalsToScopeObject(Handle<SharedFunctionInfo> shared,
ScopeInfo<>& scope_info,
Handle<Context> context,
Handle<JSObject> scope_object) {
static void CopyContextLocalsToScopeObject(
Handle<SerializedScopeInfo> serialized_scope_info,
ScopeInfo<>& scope_info,
Handle<Context> context,
Handle<JSObject> scope_object) {
// Fill all context locals to the context extension.
for (int i = Context::MIN_CONTEXT_SLOTS;
i < scope_info.number_of_context_slots();
i++) {
int context_index =
ScopeInfo<>::ContextSlotIndex(shared->scope_info(),
*scope_info.context_slot_name(i),
NULL);
int context_index = serialized_scope_info->ContextSlotIndex(
*scope_info.context_slot_name(i), NULL);
// Don't include the arguments shadow (.arguments) context variable.
if (*scope_info.context_slot_name(i) != Heap::arguments_shadow_symbol()) {
@ -8691,7 +8687,8 @@ static void CopyContextLocalsToScopeObject(Handle<SharedFunctionInfo> shared,
static Handle<JSObject> MaterializeLocalScope(JavaScriptFrame* frame) {
Handle<JSFunction> function(JSFunction::cast(frame->function()));
Handle<SharedFunctionInfo> shared(function->shared());
ScopeInfo<> scope_info(shared->scope_info());
Handle<SerializedScopeInfo> serialized_scope_info(shared->scope_info());
ScopeInfo<> scope_info(*serialized_scope_info);
// Allocate and initialize a JSObject with all the arguments, stack locals
// heap locals and extension properties of the debugged function.
@ -8714,7 +8711,7 @@ static Handle<JSObject> MaterializeLocalScope(JavaScriptFrame* frame) {
// Third fill all context locals.
Handle<Context> frame_context(Context::cast(frame->context()));
Handle<Context> function_context(frame_context->fcontext());
CopyContextLocalsToScopeObject(shared, scope_info,
CopyContextLocalsToScopeObject(serialized_scope_info, scope_info,
function_context, local_scope);
// Finally copy any properties from the function context extension. This will
@ -8742,7 +8739,8 @@ static Handle<JSObject> MaterializeClosure(Handle<Context> context) {
ASSERT(context->is_function_context());
Handle<SharedFunctionInfo> shared(context->closure()->shared());
ScopeInfo<> scope_info(shared->scope_info());
Handle<SerializedScopeInfo> serialized_scope_info(shared->scope_info());
ScopeInfo<> scope_info(*serialized_scope_info);
// Allocate and initialize a JSObject with all the content of theis function
// closure.
@ -8750,9 +8748,8 @@ static Handle<JSObject> MaterializeClosure(Handle<Context> context) {
// Check whether the arguments shadow object exists.
int arguments_shadow_index =
ScopeInfo<>::ContextSlotIndex(shared->scope_info(),
Heap::arguments_shadow_symbol(),
NULL);
shared->scope_info()->ContextSlotIndex(Heap::arguments_shadow_symbol(),
NULL);
if (arguments_shadow_index >= 0) {
// In this case all the arguments are available in the arguments shadow
// object.
@ -8766,7 +8763,8 @@ static Handle<JSObject> MaterializeClosure(Handle<Context> context) {
}
// Fill all context locals to the context extension.
CopyContextLocalsToScopeObject(shared, scope_info, context, closure_scope);
CopyContextLocalsToScopeObject(serialized_scope_info, scope_info,
context, closure_scope);
// Finally copy any properties from the function context extension. This will
// be variables introduced by eval.
@ -8815,8 +8813,8 @@ class ScopeIterator {
// created for evaluating top level code and it is not a real local scope.
// Checking for the existence of .result seems fragile, but the scope info
// saved with the code object does not otherwise have that information.
int index = ScopeInfo<>::StackSlotIndex(function_->shared()->scope_info(),
Heap::result_symbol());
int index = function_->shared()->scope_info()->
StackSlotIndex(Heap::result_symbol());
at_local_ = index < 0;
} else if (context_->is_function_context()) {
at_local_ = true;
@ -9454,7 +9452,7 @@ static Handle<Context> CopyWithContextChain(Handle<Context> context_chain,
// Runtime_DebugEvaluate.
static Handle<Object> GetArgumentsObject(JavaScriptFrame* frame,
Handle<JSFunction> function,
Handle<Object> scope_info,
Handle<SerializedScopeInfo> scope_info,
const ScopeInfo<>* sinfo,
Handle<Context> function_context) {
// Try to find the value of 'arguments' to pass as parameter. If it is not
@ -9462,15 +9460,14 @@ static Handle<Object> GetArgumentsObject(JavaScriptFrame* frame,
// does not support eval) then create an 'arguments' object.
int index;
if (sinfo->number_of_stack_slots() > 0) {
index = ScopeInfo<>::StackSlotIndex(*scope_info, Heap::arguments_symbol());
index = scope_info->StackSlotIndex(Heap::arguments_symbol());
if (index != -1) {
return Handle<Object>(frame->GetExpression(index));
}
}
if (sinfo->number_of_context_slots() > Context::MIN_CONTEXT_SLOTS) {
index = ScopeInfo<>::ContextSlotIndex(*scope_info, Heap::arguments_symbol(),
NULL);
index = scope_info->ContextSlotIndex(Heap::arguments_symbol(), NULL);
if (index != -1) {
return Handle<Object>(function_context->get(index));
}
@ -9521,7 +9518,7 @@ static Object* Runtime_DebugEvaluate(Arguments args) {
JavaScriptFrameIterator it(id);
JavaScriptFrame* frame = it.frame();
Handle<JSFunction> function(JSFunction::cast(frame->function()));
Handle<Object> scope_info(function->shared()->scope_info());
Handle<SerializedScopeInfo> scope_info(function->shared()->scope_info());
ScopeInfo<> sinfo(*scope_info);
// Traverse the saved contexts chain to find the active context for the

275
deps/v8/src/scopeinfo.cc

@ -204,12 +204,6 @@ static inline Object** ReadSymbol(Object** p, Handle<String>* s) {
}
static inline Object** ReadSentinel(Object** p) {
ASSERT(*p == NULL);
return p + 1;
}
template <class Allocator>
static Object** ReadList(Object** p, List<Handle<String>, Allocator >* list) {
ASSERT(list->is_empty());
@ -220,7 +214,7 @@ static Object** ReadList(Object** p, List<Handle<String>, Allocator >* list) {
p = ReadSymbol(p, &s);
list->Add(s);
}
return ReadSentinel(p);
return p;
}
@ -239,42 +233,19 @@ static Object** ReadList(Object** p,
list->Add(s);
modes->Add(static_cast<Variable::Mode>(m));
}
return ReadSentinel(p);
}
template<class Allocator>
Handle<Object> ScopeInfo<Allocator>::CreateHeapObject(Scope* scope) {
ScopeInfo<ZoneListAllocationPolicy> sinfo(scope);
return sinfo.Serialize();
}
template<class Allocator>
Object* ScopeInfo<Allocator>::EmptyHeapObject() {
return Heap::empty_fixed_array();
}
inline bool IsNotEmpty(Object* data) {
return FixedArray::cast(data)->length() != 0;
}
inline Object** GetDataStart(Object* data) {
return FixedArray::cast(data)->data_start();
return p;
}
template<class Allocator>
ScopeInfo<Allocator>::ScopeInfo(Object* data)
ScopeInfo<Allocator>::ScopeInfo(SerializedScopeInfo* data)
: function_name_(Factory::empty_symbol()),
parameters_(4),
stack_slots_(8),
context_slots_(8),
context_modes_(8) {
if (IsNotEmpty(data)) {
Object** p0 = GetDataStart(data);
if (data->length() > 0) {
Object** p0 = data->data_start();
Object** p = p0;
p = ReadSymbol(p, &function_name_);
p = ReadBool(p, &calls_eval_);
@ -304,12 +275,6 @@ static inline Object** WriteSymbol(Object** p, Handle<String> s) {
}
static inline Object** WriteSentinel(Object** p) {
*p++ = NULL;
return p;
}
template <class Allocator>
static Object** WriteList(Object** p, List<Handle<String>, Allocator >* list) {
const int n = list->length();
@ -317,7 +282,7 @@ static Object** WriteList(Object** p, List<Handle<String>, Allocator >* list) {
for (int i = 0; i < n; i++) {
p = WriteSymbol(p, list->at(i));
}
return WriteSentinel(p);
return p;
}
@ -331,23 +296,24 @@ static Object** WriteList(Object** p,
p = WriteSymbol(p, list->at(i));
p = WriteInt(p, modes->at(i));
}
return WriteSentinel(p);
return p;
}
template<class Allocator>
Handle<Object> ScopeInfo<Allocator>::Serialize() {
// function name, calls eval, length & sentinel for 3 tables:
const int extra_slots = 1 + 1 + 2 * 3;
Handle<SerializedScopeInfo> ScopeInfo<Allocator>::Serialize() {
// function name, calls eval, length for 3 tables:
const int extra_slots = 1 + 1 + 3;
int length = extra_slots +
context_slots_.length() * 2 +
parameters_.length() +
stack_slots_.length();
Handle<Object> data(Factory::NewFixedArray(length, TENURED));
Handle<SerializedScopeInfo> data(
SerializedScopeInfo::cast(*Factory::NewFixedArray(length, TENURED)));
AssertNoAllocation nogc;
Object** p0 = GetDataStart(*data);
Object** p0 = data->data_start();
Object** p = p0;
p = WriteSymbol(p, function_name_);
p = WriteBool(p, calls_eval_);
@ -360,36 +326,69 @@ Handle<Object> ScopeInfo<Allocator>::Serialize() {
}
static Object** ContextEntriesAddr(Object* data) {
ASSERT(IsNotEmpty(data));
// +2 for function name and calls eval:
return GetDataStart(data) + 2;
template<class Allocator>
Handle<String> ScopeInfo<Allocator>::LocalName(int i) const {
// A local variable can be allocated either on the stack or in the context.
// For variables allocated in the context they are always preceded by
// Context::MIN_CONTEXT_SLOTS of fixed allocated slots in the context.
if (i < number_of_stack_slots()) {
return stack_slot_name(i);
} else {
return context_slot_name(i - number_of_stack_slots() +
Context::MIN_CONTEXT_SLOTS);
}
}
static Object** ParameterEntriesAddr(Object* data) {
ASSERT(IsNotEmpty(data));
Object** p = ContextEntriesAddr(data);
int n; // number of context slots;
p = ReadInt(p, &n);
return p + n*2 + 1; // *2 for pairs, +1 for sentinel
template<class Allocator>
int ScopeInfo<Allocator>::NumberOfLocals() const {
int number_of_locals = number_of_stack_slots();
if (number_of_context_slots() > 0) {
ASSERT(number_of_context_slots() >= Context::MIN_CONTEXT_SLOTS);
number_of_locals += number_of_context_slots() - Context::MIN_CONTEXT_SLOTS;
}
return number_of_locals;
}
static Object** StackSlotEntriesAddr(Object* data) {
ASSERT(IsNotEmpty(data));
Object** p = ParameterEntriesAddr(data);
int n; // number of parameter slots;
p = ReadInt(p, &n);
return p + n + 1; // +1 for sentinel
Handle<SerializedScopeInfo> SerializedScopeInfo::Create(Scope* scope) {
ScopeInfo<ZoneListAllocationPolicy> sinfo(scope);
return sinfo.Serialize();
}
template<class Allocator>
bool ScopeInfo<Allocator>::CallsEval(Object* data) {
if (IsNotEmpty(data)) {
// +1 for function name:
Object** p = GetDataStart(data) + 1;
SerializedScopeInfo* SerializedScopeInfo::Empty() {
return reinterpret_cast<SerializedScopeInfo*>(Heap::empty_fixed_array());
}
Object** SerializedScopeInfo::ContextEntriesAddr() {
ASSERT(length() > 0);
return data_start() + 2; // +2 for function name and calls eval.
}
Object** SerializedScopeInfo::ParameterEntriesAddr() {
ASSERT(length() > 0);
Object** p = ContextEntriesAddr();
int number_of_context_slots;
p = ReadInt(p, &number_of_context_slots);
return p + number_of_context_slots*2; // *2 for pairs
}
Object** SerializedScopeInfo::StackSlotEntriesAddr() {
ASSERT(length() > 0);
Object** p = ParameterEntriesAddr();
int number_of_parameter_slots;
p = ReadInt(p, &number_of_parameter_slots);
return p + number_of_parameter_slots;
}
bool SerializedScopeInfo::CallsEval() {
if (length() > 0) {
Object** p = data_start() + 1; // +1 for function name.
bool calls_eval;
p = ReadBool(p, &calls_eval);
return calls_eval;
@ -398,53 +397,49 @@ bool ScopeInfo<Allocator>::CallsEval(Object* data) {
}
template<class Allocator>
int ScopeInfo<Allocator>::NumberOfStackSlots(Object* data) {
if (IsNotEmpty(data)) {
Object** p = StackSlotEntriesAddr(data);
int n; // number of stack slots;
ReadInt(p, &n);
return n;
int SerializedScopeInfo::NumberOfStackSlots() {
if (length() > 0) {
Object** p = StackSlotEntriesAddr();
int number_of_stack_slots;
ReadInt(p, &number_of_stack_slots);
return number_of_stack_slots;
}
return 0;
}
template<class Allocator>
int ScopeInfo<Allocator>::NumberOfContextSlots(Object* data) {
if (IsNotEmpty(data)) {
Object** p = ContextEntriesAddr(data);
int n; // number of context slots;
ReadInt(p, &n);
return n + Context::MIN_CONTEXT_SLOTS;
int SerializedScopeInfo::NumberOfContextSlots() {
if (length() > 0) {
Object** p = ContextEntriesAddr();
int number_of_context_slots;
ReadInt(p, &number_of_context_slots);
return number_of_context_slots + Context::MIN_CONTEXT_SLOTS;
}
return 0;
}
template<class Allocator>
bool ScopeInfo<Allocator>::HasHeapAllocatedLocals(Object* data) {
if (IsNotEmpty(data)) {
Object** p = ContextEntriesAddr(data);
int n; // number of context slots;
ReadInt(p, &n);
return n > 0;
bool SerializedScopeInfo::HasHeapAllocatedLocals() {
if (length() > 0) {
Object** p = ContextEntriesAddr();
int number_of_context_slots;
ReadInt(p, &number_of_context_slots);
return number_of_context_slots > 0;
}
return false;
}
template<class Allocator>
int ScopeInfo<Allocator>::StackSlotIndex(Object* data, String* name) {
int SerializedScopeInfo::StackSlotIndex(String* name) {
ASSERT(name->IsSymbol());
if (IsNotEmpty(data)) {
// Loop below depends on the NULL sentinel after the stack slot names.
ASSERT(NumberOfStackSlots(data) > 0 ||
*(StackSlotEntriesAddr(data) + 1) == NULL);
// slots start after length entry
Object** p0 = StackSlotEntriesAddr(data) + 1;
if (length() > 0) {
// Slots start after length entry.
Object** p0 = StackSlotEntriesAddr();
int number_of_stack_slots;
p0 = ReadInt(p0, &number_of_stack_slots);
Object** p = p0;
while (*p != NULL) {
Object** end = p0 + number_of_stack_slots;
while (p != end) {
if (*p == name) return static_cast<int>(p - p0);
p++;
}
@ -452,24 +447,18 @@ int ScopeInfo<Allocator>::StackSlotIndex(Object* data, String* name) {
return -1;
}
template<class Allocator>
int ScopeInfo<Allocator>::ContextSlotIndex(Object* data,
String* name,
Variable::Mode* mode) {
int SerializedScopeInfo::ContextSlotIndex(String* name, Variable::Mode* mode) {
ASSERT(name->IsSymbol());
int result = ContextSlotCache::Lookup(data, name, mode);
int result = ContextSlotCache::Lookup(this, name, mode);
if (result != ContextSlotCache::kNotFound) return result;
if (IsNotEmpty(data)) {
// Loop below depends on the NULL sentinel after the context slot names.
ASSERT(NumberOfContextSlots(data) >= Context::MIN_CONTEXT_SLOTS ||
*(ContextEntriesAddr(data) + 1) == NULL);
// slots start after length entry
Object** p0 = ContextEntriesAddr(data) + 1;
if (length() > 0) {
// Slots start after length entry.
Object** p0 = ContextEntriesAddr();
int number_of_context_slots;
p0 = ReadInt(p0, &number_of_context_slots);
Object** p = p0;
// contexts may have no variable slots (in the presence of eval()).
while (*p != NULL) {
Object** end = p0 + number_of_context_slots * 2;
while (p != end) {
if (*p == name) {
ASSERT(((p - p0) & 1) == 0);
int v;
@ -477,21 +466,20 @@ int ScopeInfo<Allocator>::ContextSlotIndex(Object* data,
Variable::Mode mode_value = static_cast<Variable::Mode>(v);
if (mode != NULL) *mode = mode_value;
result = static_cast<int>((p - p0) >> 1) + Context::MIN_CONTEXT_SLOTS;
ContextSlotCache::Update(data, name, mode_value, result);
ContextSlotCache::Update(this, name, mode_value, result);
return result;
}
p += 2;
}
}
ContextSlotCache::Update(data, name, Variable::INTERNAL, -1);
ContextSlotCache::Update(this, name, Variable::INTERNAL, -1);
return -1;
}
template<class Allocator>
int ScopeInfo<Allocator>::ParameterIndex(Object* data, String* name) {
int SerializedScopeInfo::ParameterIndex(String* name) {
ASSERT(name->IsSymbol());
if (IsNotEmpty(data)) {
if (length() > 0) {
// We must read parameters from the end since for
// multiply declared parameters the value of the
// last declaration of that parameter is used
@ -502,10 +490,10 @@ int ScopeInfo<Allocator>::ParameterIndex(Object* data, String* name) {
// once, with corresponding index. This requires a new
// implementation of the ScopeInfo code. See also other
// comments in this file regarding this.
Object** p = ParameterEntriesAddr(data);
int n; // number of parameters
Object** p0 = ReadInt(p, &n);
p = p0 + n;
Object** p = ParameterEntriesAddr();
int number_of_parameter_slots;
Object** p0 = ReadInt(p, &number_of_parameter_slots);
p = p0 + number_of_parameter_slots;
while (p > p0) {
p--;
if (*p == name) return static_cast<int>(p - p0);
@ -515,50 +503,23 @@ int ScopeInfo<Allocator>::ParameterIndex(Object* data, String* name) {
}
template<class Allocator>
int ScopeInfo<Allocator>::FunctionContextSlotIndex(Object* data, String* name) {
int SerializedScopeInfo::FunctionContextSlotIndex(String* name) {
ASSERT(name->IsSymbol());
if (IsNotEmpty(data)) {
Object** p = GetDataStart(data);
if (length() > 0) {
Object** p = data_start();
if (*p == name) {
p = ContextEntriesAddr(data);
int n; // number of context slots
ReadInt(p, &n);
ASSERT(n != 0);
p = ContextEntriesAddr();
int number_of_context_slots;
ReadInt(p, &number_of_context_slots);
ASSERT(number_of_context_slots != 0);
// The function context slot is the last entry.
return n + Context::MIN_CONTEXT_SLOTS - 1;
return number_of_context_slots + Context::MIN_CONTEXT_SLOTS - 1;
}
}
return -1;
}
template<class Allocator>
Handle<String> ScopeInfo<Allocator>::LocalName(int i) const {
// A local variable can be allocated either on the stack or in the context.
// For variables allocated in the context they are always preceded by the
// number Context::MIN_CONTEXT_SLOTS number of fixed allocated slots in the
// context.
if (i < number_of_stack_slots()) {
return stack_slot_name(i);
} else {
return context_slot_name(i - number_of_stack_slots() +
Context::MIN_CONTEXT_SLOTS);
}
}
template<class Allocator>
int ScopeInfo<Allocator>::NumberOfLocals() const {
int number_of_locals = number_of_stack_slots();
if (number_of_context_slots() > 0) {
ASSERT(number_of_context_slots() >= Context::MIN_CONTEXT_SLOTS);
number_of_locals += number_of_context_slots() - Context::MIN_CONTEXT_SLOTS;
}
return number_of_locals;
}
int ContextSlotCache::Hash(Object* data, String* name) {
// Uses only lower 32 bits if pointers are larger.
uintptr_t addr_hash =

81
deps/v8/src/scopeinfo.h

@ -54,16 +54,11 @@ class ScopeInfo BASE_EMBEDDED {
// Create a ScopeInfo instance from a scope.
explicit ScopeInfo(Scope* scope);
// Create a ScopeInfo instance from an Object holding the serialized data.
explicit ScopeInfo(Object* data);
// Create a ScopeInfo instance from SerializedScopeInfo.
explicit ScopeInfo(SerializedScopeInfo* data);
// Creates a heap object holding the serialized scope info.
Handle<Object> Serialize();
static Handle<Object> CreateHeapObject(Scope* scope);
// Serializes empty scope info.
static Object* EmptyHeapObject();
// Creates a SerializedScopeInfo holding the serialized scope info.
Handle<SerializedScopeInfo> Serialize();
// --------------------------------------------------------------------------
// Lookup
@ -88,64 +83,80 @@ class ScopeInfo BASE_EMBEDDED {
int NumberOfLocals() const;
// --------------------------------------------------------------------------
// The following functions provide quick access to scope info details
// for runtime routines w/o the need to explicitly create a ScopeInfo
// object.
//
// ScopeInfo is the only class which should have to know about the
// encoding of it's information in a FixedArray object, which is why these
// functions are in this class.
// Debugging support
#ifdef DEBUG
void Print();
#endif
private:
Handle<String> function_name_;
bool calls_eval_;
List<Handle<String>, Allocator > parameters_;
List<Handle<String>, Allocator > stack_slots_;
List<Handle<String>, Allocator > context_slots_;
List<Variable::Mode, Allocator > context_modes_;
};
// This object provides quick access to scope info details for runtime
// routines w/o the need to explicitly create a ScopeInfo object.
class SerializedScopeInfo : public FixedArray {
public :
static SerializedScopeInfo* cast(Object* object) {
ASSERT(object->IsFixedArray());
return reinterpret_cast<SerializedScopeInfo*>(object);
}
// Does this scope call eval.
static bool CallsEval(Object* data);
bool CallsEval();
// Return the number of stack slots for code.
static int NumberOfStackSlots(Object* data);
int NumberOfStackSlots();
// Return the number of context slots for code.
static int NumberOfContextSlots(Object* data);
int NumberOfContextSlots();
// Return if this has context slots besides MIN_CONTEXT_SLOTS;
static bool HasHeapAllocatedLocals(Object* data);
bool HasHeapAllocatedLocals();
// Lookup support for serialized scope info. Returns the
// the stack slot index for a given slot name if the slot is
// present; otherwise returns a value < 0. The name must be a symbol
// (canonicalized).
static int StackSlotIndex(Object* data, String* name);
int StackSlotIndex(String* name);
// Lookup support for serialized scope info. Returns the
// context slot index for a given slot name if the slot is present; otherwise
// returns a value < 0. The name must be a symbol (canonicalized).
// If the slot is present and mode != NULL, sets *mode to the corresponding
// mode for that variable.
static int ContextSlotIndex(Object* data, String* name, Variable::Mode* mode);
int ContextSlotIndex(String* name, Variable::Mode* mode);
// Lookup support for serialized scope info. Returns the
// parameter index for a given parameter name if the parameter is present;
// otherwise returns a value < 0. The name must be a symbol (canonicalized).
static int ParameterIndex(Object* data, String* name);
int ParameterIndex(String* name);
// Lookup support for serialized scope info. Returns the
// function context slot index if the function name is present (named
// function expressions, only), otherwise returns a value < 0. The name
// must be a symbol (canonicalized).
static int FunctionContextSlotIndex(Object* data, String* name);
int FunctionContextSlotIndex(String* name);
// --------------------------------------------------------------------------
// Debugging support
static Handle<SerializedScopeInfo> Create(Scope* scope);
#ifdef DEBUG
void Print();
#endif
// Serializes empty scope info.
static SerializedScopeInfo* Empty();
private:
Handle<String> function_name_;
bool calls_eval_;
List<Handle<String>, Allocator > parameters_;
List<Handle<String>, Allocator > stack_slots_;
List<Handle<String>, Allocator > context_slots_;
List<Variable::Mode, Allocator > context_modes_;
inline Object** ContextEntriesAddr();
inline Object** ParameterEntriesAddr();
inline Object** StackSlotEntriesAddr();
};

44
deps/v8/src/v8natives.js

@ -745,6 +745,23 @@ function ObjectDefineProperties(obj, properties) {
}
// ES5 section 15.2.3.8.
function ObjectSeal(obj) {
if ((!IS_SPEC_OBJECT_OR_NULL(obj) || IS_NULL_OR_UNDEFINED(obj)) &&
!IS_UNDETECTABLE(obj)) {
throw MakeTypeError("obj_ctor_property_non_object", ["seal"]);
}
var names = ObjectGetOwnPropertyNames(obj);
for (var key in names) {
var name = names[key];
var desc = GetOwnProperty(obj, name);
if (desc.isConfigurable()) desc.setConfigurable(false);
DefineOwnProperty(obj, name, desc, true);
}
ObjectPreventExtension(obj);
}
// ES5 section 15.2.3.9.
function ObjectFreeze(obj) {
if ((!IS_SPEC_OBJECT_OR_NULL(obj) || IS_NULL_OR_UNDEFINED(obj)) &&
@ -774,6 +791,25 @@ function ObjectPreventExtension(obj) {
}
// ES5 section 15.2.3.11
function ObjectIsSealed(obj) {
if ((!IS_SPEC_OBJECT_OR_NULL(obj) || IS_NULL_OR_UNDEFINED(obj)) &&
!IS_UNDETECTABLE(obj)) {
throw MakeTypeError("obj_ctor_property_non_object", ["isSealed"]);
}
var names = ObjectGetOwnPropertyNames(obj);
for (var key in names) {
var name = names[key];
var desc = GetOwnProperty(obj, name);
if (desc.isConfigurable()) return false;
}
if (!ObjectIsExtensible(obj)) {
return true;
}
return false;
}
// ES5 section 15.2.3.12
function ObjectIsFrozen(obj) {
if ((!IS_SPEC_OBJECT_OR_NULL(obj) || IS_NULL_OR_UNDEFINED(obj)) &&
@ -784,8 +820,8 @@ function ObjectIsFrozen(obj) {
for (var key in names) {
var name = names[key];
var desc = GetOwnProperty(obj, name);
if (IsDataDescriptor(desc) && desc.writable) return false;
if (desc.configurable) return false;
if (IsDataDescriptor(desc) && desc.isWritable()) return false;
if (desc.isConfigurable()) return false;
}
if (!ObjectIsExtensible(obj)) {
return true;
@ -843,7 +879,9 @@ function SetupObject() {
"getOwnPropertyNames", ObjectGetOwnPropertyNames,
"isExtensible", ObjectIsExtensible,
"isFrozen", ObjectIsFrozen,
"preventExtensions", ObjectPreventExtension
"isSealed", ObjectIsSealed,
"preventExtensions", ObjectPreventExtension,
"seal", ObjectSeal
));
}

4
deps/v8/src/version.cc

@ -33,8 +33,8 @@
// NOTE these macros are used by the SCons build script so their names
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 2
#define MINOR_VERSION 2
#define BUILD_NUMBER 24
#define MINOR_VERSION 3
#define BUILD_NUMBER 0
#define PATCH_LEVEL 0
#define CANDIDATE_VERSION false

73
deps/v8/src/x64/codegen-x64.cc

@ -3428,49 +3428,56 @@ void CodeGenerator::GenerateFastSmiLoop(ForStatement* node) {
CodeForStatementPosition(node);
Slot* loop_var_slot = loop_var->slot();
if (loop_var_slot->type() == Slot::LOCAL) {
frame_->PushLocalAt(loop_var_slot->index());
frame_->TakeLocalAt(loop_var_slot->index());
} else {
ASSERT(loop_var_slot->type() == Slot::PARAMETER);
frame_->PushParameterAt(loop_var_slot->index());
frame_->TakeParameterAt(loop_var_slot->index());
}
Result loop_var_result = frame_->Pop();
if (!loop_var_result.is_register()) {
loop_var_result.ToRegister();
}
Register loop_var_reg = loop_var_result.reg();
frame_->Spill(loop_var_reg);
if (increments) {
__ SmiAddConstant(loop_var_result.reg(),
loop_var_result.reg(),
__ SmiAddConstant(loop_var_reg,
loop_var_reg,
Smi::FromInt(1));
} else {
__ SmiSubConstant(loop_var_result.reg(),
loop_var_result.reg(),
__ SmiSubConstant(loop_var_reg,
loop_var_reg,
Smi::FromInt(1));
}
{
__ SmiCompare(loop_var_result.reg(), limit_value);
Condition condition;
switch (compare_op) {
case Token::LT:
condition = less;
break;
case Token::LTE:
condition = less_equal;
break;
case Token::GT:
condition = greater;
break;
case Token::GTE:
condition = greater_equal;
break;
default:
condition = never;
UNREACHABLE();
}
loop.Branch(condition);
frame_->Push(&loop_var_result);
if (loop_var_slot->type() == Slot::LOCAL) {
frame_->StoreToLocalAt(loop_var_slot->index());
} else {
ASSERT(loop_var_slot->type() == Slot::PARAMETER);
frame_->StoreToParameterAt(loop_var_slot->index());
}
loop_var_result.Unuse();
frame_->Drop();
__ SmiCompare(loop_var_reg, limit_value);
Condition condition;
switch (compare_op) {
case Token::LT:
condition = less;
break;
case Token::LTE:
condition = less_equal;
break;
case Token::GT:
condition = greater;
break;
case Token::GTE:
condition = greater_equal;
break;
default:
condition = never;
UNREACHABLE();
}
loop.Branch(condition);
}
if (node->break_target()->is_linked()) {
node->break_target()->Bind();
@ -10094,6 +10101,8 @@ static int NegativeComparisonResult(Condition cc) {
void CompareStub::Generate(MacroAssembler* masm) {
ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
Label check_unequal_objects, done;
// The compare stub returns a positive, negative, or zero 64-bit integer
// value in rax, corresponding to result of comparing the two inputs.
@ -10942,8 +10951,10 @@ int CompareStub::MinorKey() {
// Encode the three parameters in a unique 16 bit value. To avoid duplicate
// stubs the never NaN NaN condition is only taken into account if the
// condition is equals.
ASSERT(static_cast<unsigned>(cc_) < (1 << 13));
ASSERT(static_cast<unsigned>(cc_) < (1 << 12));
ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
return ConditionField::encode(static_cast<unsigned>(cc_))
| RegisterField::encode(false) // lhs_ and rhs_ are not used
| StrictField::encode(strict_)
| NeverNanNanField::encode(cc_ == equal ? never_nan_nan_ : false)
| IncludeNumberCompareField::encode(include_number_compare_);
@ -10953,6 +10964,8 @@ int CompareStub::MinorKey() {
// Unfortunately you have to run without snapshots to see most of these
// names in the profile since most compare stubs end up in the snapshot.
const char* CompareStub::GetName() {
ASSERT(lhs_.is(no_reg) && rhs_.is(no_reg));
if (name_ != NULL) return name_;
const int kMaxNameLength = 100;
name_ = Bootstrapper::AllocateAutoDeletedArray(kMaxNameLength);

6
deps/v8/src/x64/stub-cache-x64.cc

@ -164,6 +164,12 @@ static void GenerateDictionaryNegativeLookup(MacroAssembler* masm,
// Stop if found the property.
__ Cmp(entity_name, Handle<String>(name));
__ j(equal, miss_label);
// Check if the entry name is not a symbol.
__ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
__ testb(FieldOperand(entity_name, Map::kInstanceTypeOffset),
Immediate(kIsSymbolMask));
__ j(zero, miss_label);
} else {
// Give up probing if still not found the undefined value.
__ j(not_equal, miss_label);

65
deps/v8/test/cctest/test-debug.cc

@ -6650,4 +6650,69 @@ TEST(DebugEventContext) {
CheckDebuggerUnloaded();
}
static void* expected_break_data;
static bool was_debug_break_called;
static bool was_debug_event_called;
static void DebugEventBreakDataChecker(const v8::Debug::EventDetails& details) {
if (details.GetEvent() == v8::BreakForCommand) {
CHECK_EQ(expected_break_data, details.GetClientData());
was_debug_event_called = true;
} else if (details.GetEvent() == v8::Break) {
was_debug_break_called = true;
}
}
// Check that event details contain context where debug event occured.
TEST(DebugEventBreakData) {
v8::HandleScope scope;
DebugLocalContext env;
v8::Debug::SetDebugEventListener2(DebugEventBreakDataChecker);
TestClientData::constructor_call_counter = 0;
TestClientData::destructor_call_counter = 0;
expected_break_data = NULL;
was_debug_event_called = false;
was_debug_break_called = false;
v8::Debug::DebugBreakForCommand();
v8::Script::Compile(v8::String::New("(function(x){return x;})(1);"))->Run();
CHECK(was_debug_event_called);
CHECK(!was_debug_break_called);
TestClientData* data1 = new TestClientData();
expected_break_data = data1;
was_debug_event_called = false;
was_debug_break_called = false;
v8::Debug::DebugBreakForCommand(data1);
v8::Script::Compile(v8::String::New("(function(x){return x+1;})(1);"))->Run();
CHECK(was_debug_event_called);
CHECK(!was_debug_break_called);
expected_break_data = NULL;
was_debug_event_called = false;
was_debug_break_called = false;
v8::Debug::DebugBreak();
v8::Script::Compile(v8::String::New("(function(x){return x+2;})(1);"))->Run();
CHECK(!was_debug_event_called);
CHECK(was_debug_break_called);
TestClientData* data2 = new TestClientData();
expected_break_data = data2;
was_debug_event_called = false;
was_debug_break_called = false;
v8::Debug::DebugBreak();
v8::Debug::DebugBreakForCommand(data2);
v8::Script::Compile(v8::String::New("(function(x){return x+3;})(1);"))->Run();
CHECK(was_debug_event_called);
CHECK(was_debug_break_called);
CHECK_EQ(2, TestClientData::constructor_call_counter);
CHECK_EQ(TestClientData::constructor_call_counter,
TestClientData::destructor_call_counter);
v8::Debug::SetDebugEventListener(NULL);
CheckDebuggerUnloaded();
}
#endif // ENABLE_DEBUGGER_SUPPORT

11
deps/v8/test/es5conform/es5conform.status

@ -47,17 +47,6 @@ chapter11/11.4/11.4.1//11.4.1-4.a-7: FAIL
# We do not have a global object called 'global' as required by tests.
chapter15/15.1: FAIL_OK
# NOT IMPLEMENTED: seal
chapter15/15.2/15.2.3/15.2.3.8: UNIMPLEMENTED
# NOT IMPLEMENTED: isSealed
chapter15/15.2/15.2.3/15.2.3.11: UNIMPLEMENTED
# NOT IMPLEMENTED: seal
chapter15/15.2/15.2.3/15.2.3.3/15.2.3.3-4-20: UNIMPLEMENTED
# NOT IMPLEMENTED: isSealed
chapter15/15.2/15.2.3/15.2.3.3/15.2.3.3-4-23: UNIMPLEMENTED
# NOT IMPLEMENTED: bind
chapter15/15.2/15.2.3/15.2.3.3/15.2.3.3-4-38: UNIMPLEMENTED

15
deps/v8/test/mjsunit/call-stub.js

@ -49,3 +49,18 @@ for (var i = 1; i < 100; i++) {
}
assertEquals(i < 50 || i >= 70 ? 1 : 2, h.m());
}
var nonsymbol = 'wwwww '.split(' ')[0];
Hash.prototype.wwwww = Hash.prototype.m;
for (var i = 1; i < 100; i++) {
if (i == 50) {
h[nonsymbol] = function() {
return 2;
};
} else if (i == 70) {
delete h[nonsymbol];
}
assertEquals(i < 50 || i >= 70 ? 1 : 2, h.wwwww());
}

19
deps/v8/test/mjsunit/object-freeze.js

@ -172,3 +172,22 @@ Object.defineProperty(obj3, 'y', {configurable: false, writable: false});
Object.preventExtensions(obj3);
assertTrue(Object.isFrozen(obj3));
// Make sure that an object that has only non-configurable, but one
// writable property, is not classified as frozen.
var obj4 = {};
Object.defineProperty(obj4, 'x', {configurable: false, writable: true});
Object.defineProperty(obj4, 'y', {configurable: false, writable: false});
Object.preventExtensions(obj4);
assertFalse(Object.isFrozen(obj4));
// Make sure that an object that has only non-writable, but one
// configurable property, is not classified as frozen.
var obj5 = {};
Object.defineProperty(obj5, 'x', {configurable: true, writable: false});
Object.defineProperty(obj5, 'y', {configurable: false, writable: false});
Object.preventExtensions(obj5);
assertFalse(Object.isFrozen(obj5));

195
deps/v8/test/mjsunit/object-seal.js

@ -0,0 +1,195 @@
// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Tests the Object.seal and Object.isSealed methods - ES 15.2.3.9 and
// ES 15.2.3.12
// Test that we throw an error if an object is not passed as argument.
var non_objects = new Array(undefined, null, 1, -1, 0, 42.43);
for (var key in non_objects) {
try {
Object.seal(non_objects[key]);
assertUnreachable();
} catch(e) {
assertTrue(/Object.seal called on non-object/.test(e));
}
}
for (var key in non_objects) {
try {
Object.isSealed(non_objects[key]);
assertUnreachable();
} catch(e) {
assertTrue(/Object.isSealed called on non-object/.test(e));
}
}
// Test normal data properties.
var obj = { x: 42, z: 'foobar' };
var desc = Object.getOwnPropertyDescriptor(obj, 'x');
assertTrue(desc.writable);
assertTrue(desc.configurable);
assertEquals(42, desc.value);
desc = Object.getOwnPropertyDescriptor(obj, 'z');
assertTrue(desc.writable);
assertTrue(desc.configurable);
assertEquals('foobar', desc.value);
assertTrue(Object.isExtensible(obj));
assertFalse(Object.isSealed(obj));
Object.seal(obj);
// Make sure we are no longer extensible.
assertFalse(Object.isExtensible(obj));
assertTrue(Object.isSealed(obj));
// We should not be frozen, since we are still able to
// update values.
assertFalse(Object.isFrozen(obj));
// We should not allow new properties to be added.
try {
obj.foo = 42;
assertUnreachable();
} catch(e) {
assertTrue(/object is not extensible/.test(e));
}
desc = Object.getOwnPropertyDescriptor(obj, 'x');
assertTrue(desc.writable);
assertFalse(desc.configurable);
assertEquals(42, desc.value);
desc = Object.getOwnPropertyDescriptor(obj, 'z');
assertTrue(desc.writable);
assertFalse(desc.configurable);
assertEquals("foobar", desc.value);
// Since writable is not affected by seal we should still be able to
// update the values.
obj.x = "43";
assertEquals(43, obj.x);
// Test on accessors.
var obj2 = {};
function get() { return 43; };
function set() {};
Object.defineProperty(obj2, 'x', { get: get, set: set, configurable: true });
desc = Object.getOwnPropertyDescriptor(obj2, 'x');
assertTrue(desc.configurable);
assertEquals(undefined, desc.value);
assertEquals(set, desc.set);
assertEquals(get, desc.get);
assertTrue(Object.isExtensible(obj2));
assertFalse(Object.isSealed(obj2));
Object.seal(obj2);
// Since this is an accessor property the object is now effectively both
// sealed and frozen (accessors has no writable attribute).
assertTrue(Object.isFrozen(obj2));
assertFalse(Object.isExtensible(obj2));
assertTrue(Object.isSealed(obj2));
desc = Object.getOwnPropertyDescriptor(obj2, 'x');
assertFalse(desc.configurable);
assertEquals(undefined, desc.value);
assertEquals(set, desc.set);
assertEquals(get, desc.get);
try {
obj2.foo = 42;
assertUnreachable();
} catch(e) {
assertTrue(/object is not extensible/.test(e));
}
// Test seal on arrays.
var arr = new Array(42,43);
desc = Object.getOwnPropertyDescriptor(arr, '0');
assertTrue(desc.configurable);
assertTrue(desc.writable);
assertEquals(42, desc.value);
desc = Object.getOwnPropertyDescriptor(arr, '1');
assertTrue(desc.configurable);
assertTrue(desc.writable);
assertEquals(43, desc.value);
assertTrue(Object.isExtensible(arr));
assertFalse(Object.isSealed(arr));
Object.seal(arr);
assertTrue(Object.isSealed(arr));
assertFalse(Object.isExtensible(arr));
// Since the values in the array is still writable this object
// is not frozen.
assertFalse(Object.isFrozen(arr));
desc = Object.getOwnPropertyDescriptor(arr, '0');
assertFalse(desc.configurable);
assertTrue(desc.writable);
assertEquals(42, desc.value);
desc = Object.getOwnPropertyDescriptor(arr, '1');
assertFalse(desc.configurable);
assertTrue(desc.writable);
assertEquals(43, desc.value);
arr[0] = 'foo';
// We should be able to overwrite the existing value.
assertEquals('foo', arr[0]);
// Test that isSealed returns the correct value even if configurable
// has been set to false on all properties manually and the extensible
// flag has also been set to false manually.
var obj3 = { x: 42, y: 'foo' };
assertFalse(Object.isFrozen(obj3));
Object.defineProperty(obj3, 'x', {configurable: false, writable: true});
Object.defineProperty(obj3, 'y', {configurable: false, writable: false});
Object.preventExtensions(obj3);
assertTrue(Object.isSealed(obj3));
// Make sure that an object that has a configurable property
// is not classified as sealed.
var obj4 = {};
Object.defineProperty(obj4, 'x', {configurable: true, writable: false});
Object.defineProperty(obj4, 'y', {configurable: false, writable: false});
Object.preventExtensions(obj4);
assertFalse(Object.isSealed(obj4));

94
deps/v8/test/mjsunit/regress/regress-r4998.js

@ -0,0 +1,94 @@
// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Test for a broken fast-smi-loop that does not save the incremented value
// of the loop index. If this test fails, it loops forever, and times out.
// Flags: --nofull-compiler
// Calling foo() spills the virtual frame.
function foo() {
return;
}
function bar() {
var x1 = 3;
var x2 = 3;
var x3 = 3;
var x4 = 3;
var x5 = 3;
var x6 = 3;
var x7 = 3;
var x8 = 3;
var x9 = 3;
var x10 = 3;
var x11 = 3;
var x12 = 3;
var x13 = 3;
foo();
x1 = 257;
x2 = 258;
x3 = 259;
x4 = 260;
x5 = 261;
x6 = 262;
x7 = 263;
x8 = 264;
x9 = 265;
x10 = 266;
x11 = 267;
x12 = 268;
x13 = 269;
// The loop variable x7 is initialized to 3,
// and then MakeMergeable is called on the virtual frame.
// MakeMergeable has forced the loop variable x7 to be spilled,
// so it is marked as synced
// The back edge then merges its virtual frame, which incorrectly
// claims that x7 is synced, and does not save the modified
// value.
for (x7 = 3; x7 < 10; ++x7) {
foo();
}
}
bar();
function aliasing() {
var x = 3;
var j;
for (j = 7; j < 11; ++j) {
x = j;
}
assertEquals(10, x);
assertEquals(11, j);
}
aliasing();
Loading…
Cancel
Save