Browse Source

Upgrade V8 to 2.3.6

v0.7.4-release
Ryan Dahl 15 years ago
parent
commit
a5be730906
  1. 1
      deps/v8/AUTHORS
  2. 10
      deps/v8/ChangeLog
  3. 1
      deps/v8/SConstruct
  4. 12
      deps/v8/src/api.cc
  5. 18
      deps/v8/src/arm/assembler-arm-inl.h
  6. 72
      deps/v8/src/arm/codegen-arm.cc
  7. 2
      deps/v8/src/arm/codegen-arm.h
  8. 60
      deps/v8/src/arm/full-codegen-arm.cc
  9. 17
      deps/v8/src/arm/ic-arm.cc
  10. 28
      deps/v8/src/arm/macro-assembler-arm.cc
  11. 2
      deps/v8/src/arm/macro-assembler-arm.h
  12. 26
      deps/v8/src/arm/stub-cache-arm.cc
  13. 2
      deps/v8/src/arm/virtual-frame-arm.h
  14. 2
      deps/v8/src/assembler.h
  15. 39
      deps/v8/src/codegen.h
  16. 2
      deps/v8/src/full-codegen.cc
  17. 1
      deps/v8/src/full-codegen.h
  18. 3
      deps/v8/src/globals.h
  19. 6
      deps/v8/src/heap-inl.h
  20. 19
      deps/v8/src/heap.cc
  21. 58
      deps/v8/src/heap.h
  22. 19
      deps/v8/src/ia32/assembler-ia32-inl.h
  23. 3
      deps/v8/src/ia32/assembler-ia32.cc
  24. 85
      deps/v8/src/ia32/codegen-ia32.cc
  25. 5
      deps/v8/src/ia32/codegen-ia32.h
  26. 84
      deps/v8/src/ia32/full-codegen-ia32.cc
  27. 14
      deps/v8/src/ia32/ic-ia32.cc
  28. 27
      deps/v8/src/ia32/macro-assembler-ia32.cc
  29. 35
      deps/v8/src/ia32/stub-cache-ia32.cc
  30. 2
      deps/v8/src/objects.cc
  31. 1
      deps/v8/src/objects.h
  32. 93
      deps/v8/src/parser.cc
  33. 6
      deps/v8/src/regexp.js
  34. 20
      deps/v8/src/runtime.cc
  35. 1
      deps/v8/src/runtime.h
  36. 13
      deps/v8/src/string.js
  37. 7
      deps/v8/src/stub-cache.cc
  38. 8
      deps/v8/src/stub-cache.h
  39. 3
      deps/v8/src/v8.h
  40. 4
      deps/v8/src/v8natives.js
  41. 4
      deps/v8/src/version.cc
  42. 19
      deps/v8/src/x64/assembler-x64-inl.h
  43. 3
      deps/v8/src/x64/assembler-x64.cc
  44. 85
      deps/v8/src/x64/codegen-x64.cc
  45. 2
      deps/v8/src/x64/codegen-x64.h
  46. 84
      deps/v8/src/x64/full-codegen-x64.cc
  47. 15
      deps/v8/src/x64/ic-x64.cc
  48. 16
      deps/v8/src/x64/macro-assembler-x64.cc
  49. 4
      deps/v8/src/x64/macro-assembler-x64.h
  50. 28
      deps/v8/src/x64/stub-cache-x64.cc
  51. 68
      deps/v8/test/cctest/test-debug.cc
  52. 4
      deps/v8/test/mjsunit/object-freeze.js
  53. 4
      deps/v8/test/mjsunit/object-literal.js
  54. 4
      deps/v8/test/mjsunit/object-seal.js

1
deps/v8/AUTHORS

@ -28,4 +28,5 @@ Rene Rebe <rene@exactcode.de>
Rodolph Perfetta <rodolph.perfetta@arm.com>
Ryan Dahl <coldredlemur@gmail.com>
Subrato K De <subratokde@codeaurora.org>
Burcu Dogan <burcujdogan@gmail.com>

10
deps/v8/ChangeLog

@ -1,3 +1,13 @@
2010-08-09: Version 2.3.6
RegExp literals create a new object every time they are evaluated
(issue 704).
Object.seal and Object.freeze return the modified object (issue 809).
Fix building using GCC 4.4.4.
2010-08-04: Version 2.3.5
Added support for ES5 property names. Object initialisers and

1
deps/v8/SConstruct

@ -300,6 +300,7 @@ V8_EXTRA_FLAGS = {
'gcc': {
'all': {
'WARNINGFLAGS': ['-Wall',
'-Werror',
'-W',
'-Wno-unused-parameter',
'-Wnon-virtual-dtor']

12
deps/v8/src/api.cc

@ -126,7 +126,7 @@ static FatalErrorCallback& GetFatalErrorHandler() {
// When V8 cannot allocated memory FatalProcessOutOfMemory is called.
// The default fatal error handler is called and execution is stopped.
void i::V8::FatalProcessOutOfMemory(const char* location) {
void i::V8::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
i::HeapStats heap_stats;
int start_marker;
heap_stats.start_marker = &start_marker;
@ -166,9 +166,17 @@ void i::V8::FatalProcessOutOfMemory(const char* location) {
heap_stats.near_death_global_handle_count = &near_death_global_handle_count;
int destroyed_global_handle_count;
heap_stats.destroyed_global_handle_count = &destroyed_global_handle_count;
int memory_allocator_size;
heap_stats.memory_allocator_size = &memory_allocator_size;
int memory_allocator_capacity;
heap_stats.memory_allocator_capacity = &memory_allocator_capacity;
int objects_per_type[LAST_TYPE + 1] = {0};
heap_stats.objects_per_type = objects_per_type;
int size_per_type[LAST_TYPE + 1] = {0};
heap_stats.size_per_type = size_per_type;
int end_marker;
heap_stats.end_marker = &end_marker;
i::Heap::RecordStats(&heap_stats);
i::Heap::RecordStats(&heap_stats, take_snapshot);
i::V8::SetFatalError();
FatalErrorCallback callback = GetFatalErrorHandler();
{

18
deps/v8/src/arm/assembler-arm-inl.h

@ -120,9 +120,8 @@ Address RelocInfo::call_address() {
void RelocInfo::set_call_address(Address target) {
ASSERT(IsPatchedReturnSequence());
// The 2 instructions offset assumes patched return sequence.
ASSERT(IsJSReturn(rmode()));
ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target;
}
@ -132,16 +131,15 @@ Object* RelocInfo::call_object() {
}
Object** RelocInfo::call_object_address() {
ASSERT(IsPatchedReturnSequence());
// The 2 instructions offset assumes patched return sequence.
ASSERT(IsJSReturn(rmode()));
return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
void RelocInfo::set_call_object(Object* target) {
*call_object_address() = target;
}
void RelocInfo::set_call_object(Object* target) {
*call_object_address() = target;
Object** RelocInfo::call_object_address() {
ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
}

72
deps/v8/src/arm/codegen-arm.cc

@ -3280,13 +3280,13 @@ void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
FixedArray::kHeaderSize + node->literal_index() * kPointerSize;
__ ldr(literal, FieldMemOperand(tmp, literal_offset));
JumpTarget done;
JumpTarget materialized;
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(literal, ip);
// This branch locks the virtual frame at the done label to match the
// one we have here, where the literal register is not on the stack and
// nothing is spilled.
done.Branch(ne);
materialized.Branch(ne);
// If the entry is undefined we call the runtime system to compute
// the literal.
@ -3301,11 +3301,23 @@ void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
frame_->CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
__ Move(literal, r0);
// This call to bind will get us back to the virtual frame we had before
// where things are not spilled and the literal register is not on the stack.
done.Bind();
// Push the literal.
materialized.Bind();
frame_->EmitPush(literal);
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
frame_->EmitPush(Operand(Smi::FromInt(size)));
frame_->CallRuntime(Runtime::kAllocateInNewSpace, 1);
// TODO(lrn): Use AllocateInNewSpace macro with fallback to runtime.
// r0 is newly allocated space.
// Reuse literal variable with (possibly) a new register, still holding
// the materialized boilerplate.
literal = frame_->PopToRegister(r0);
__ CopyFields(r0, literal, tmp.bit(), size / kPointerSize);
// Push the clone.
frame_->EmitPush(r0);
ASSERT_EQ(original_height + 1, frame_->height());
}
@ -5324,6 +5336,44 @@ void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
}
void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
ASSERT(args->length() == 2);
// Load the two objects into registers and perform the comparison.
Load(args->at(0));
Load(args->at(1));
Register right = frame_->PopToRegister();
Register left = frame_->PopToRegister(right);
Register tmp = frame_->scratch0();
Register tmp2 = frame_->scratch1();
// Jumps to done must have the eq flag set if the test is successful
// and clear if the test has failed.
Label done;
// Fail if either is a non-HeapObject.
__ cmp(left, Operand(right));
__ b(eq, &done);
__ and_(tmp, left, Operand(right));
__ eor(tmp, tmp, Operand(kSmiTagMask));
__ tst(tmp, Operand(kSmiTagMask));
__ b(ne, &done);
__ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
__ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
__ cmp(tmp2, Operand(JS_REGEXP_TYPE));
__ b(ne, &done);
__ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
__ cmp(tmp, Operand(tmp2));
__ b(ne, &done);
__ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
__ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
__ cmp(tmp, tmp2);
__ bind(&done);
cc_reg_ = eq;
}
void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
#ifdef DEBUG
int original_height = frame_->height();
@ -6908,10 +6958,7 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
__ str(r2, FieldMemOperand(r0, JSArray::kElementsOffset));
// Copy the elements array.
for (int i = 0; i < elements_size; i += kPointerSize) {
__ ldr(r1, FieldMemOperand(r3, i));
__ str(r1, FieldMemOperand(r2, i));
}
__ CopyFields(r2, r3, r1.bit(), elements_size / kPointerSize);
}
// Return and remove the on-stack parameters.
@ -9780,10 +9827,7 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
__ ldr(r4, MemOperand(r4, offset));
// Copy the JS object part.
for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
__ ldr(r3, FieldMemOperand(r4, i));
__ str(r3, FieldMemOperand(r0, i));
}
__ CopyFields(r0, r4, r3.bit(), JSObject::kHeaderSize / kPointerSize);
// Setup the callee in-object property.
STATIC_ASSERT(Heap::arguments_callee_index == 0);

2
deps/v8/src/arm/codegen-arm.h

@ -544,6 +544,8 @@ class CodeGenerator: public AstVisitor {
void GenerateMathCos(ZoneList<Expression*>* args);
void GenerateMathSqrt(ZoneList<Expression*>* args);
void GenerateIsRegExpEquivalent(ZoneList<Expression*>* args);
// Simple condition analysis.
enum ConditionAnalysis {
ALWAYS_TRUE,

60
deps/v8/src/arm/full-codegen-arm.cc

@ -1104,13 +1104,13 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var,
void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
Comment cmnt(masm_, "[ RegExpLiteral");
Label done;
Label materialized;
// Registers will be used as follows:
// r4 = JS function, literals array
// r3 = literal index
// r2 = RegExp pattern
// r1 = RegExp flags
// r0 = temp + return value (RegExp literal)
// r0 = temp + materialized value (RegExp literal)
__ ldr(r0, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ ldr(r4, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
int literal_offset =
@ -1118,13 +1118,24 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
__ ldr(r0, FieldMemOperand(r4, literal_offset));
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
__ cmp(r0, ip);
__ b(ne, &done);
__ b(ne, &materialized);
__ mov(r3, Operand(Smi::FromInt(expr->literal_index())));
__ mov(r2, Operand(expr->pattern()));
__ mov(r1, Operand(expr->flags()));
__ Push(r4, r3, r2, r1);
__ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
__ bind(&done);
__ bind(&materialized);
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
__ push(r0);
__ mov(r0, Operand(Smi::FromInt(size)));
__ push(r0);
__ CallRuntime(Runtime::kAllocateInNewSpace, 1);
// After this, registers are used as follows:
// r0: Newly allocated regexp.
// r1: Materialized regexp
// r2: temp.
__ pop(r1);
__ CopyFields(r0, r1, r2.bit(), size / kPointerSize);
Apply(context_, r0);
}
@ -2566,6 +2577,47 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
}
void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
ASSERT_EQ(2, args->length());
Register right = r0;
Register left = r1;
Register tmp = r2;
Register tmp2 = r3;
VisitForValue(args->at(0), kStack);
VisitForValue(args->at(1), kAccumulator);
__ pop(left);
Label done, fail, ok;
__ cmp(left, Operand(right));
__ b(eq, &ok);
// Fail if either is a non-HeapObject.
__ and_(tmp, left, Operand(right));
__ tst(tmp, Operand(kSmiTagMask));
__ b(eq, &fail);
__ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
__ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
__ cmp(tmp2, Operand(JS_REGEXP_TYPE));
__ b(ne, &fail);
__ ldr(tmp2, FieldMemOperand(right, HeapObject::kMapOffset));
__ cmp(tmp, Operand(tmp2));
__ b(ne, &fail);
__ ldr(tmp, FieldMemOperand(left, JSRegExp::kDataOffset));
__ ldr(tmp2, FieldMemOperand(right, JSRegExp::kDataOffset));
__ cmp(tmp, tmp2);
__ b(eq, &ok);
__ bind(&fail);
__ LoadRoot(r0, Heap::kFalseValueRootIndex);
__ jmp(&done);
__ bind(&ok);
__ LoadRoot(r0, Heap::kTrueValueRootIndex);
__ bind(&done);
Apply(context_, r0);
}
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Handle<String> name = expr->name();
if (name->length() > 0 && name->Get(0) == '_') {

17
deps/v8/src/arm/ic-arm.cc

@ -1105,7 +1105,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// -- r0 : key
// -- r1 : receiver
// -----------------------------------
Label slow, check_string, index_smi, index_string;
Label slow, check_string, index_smi, index_string, property_array_property;
Label check_pixel_array, probe_dictionary, check_number_dictionary;
Register key = r0;
@ -1193,7 +1193,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ cmp(r0, r5);
__ b(ne, &slow);
// Get field offset and check that it is an in-object property.
// Get field offset.
// r0 : key
// r1 : receiver
// r2 : receiver's map
@ -1203,11 +1203,10 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ mov(r4, Operand(cache_field_offsets));
__ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
__ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset));
__ cmp(r5, r6);
__ b(ge, &slow);
__ sub(r5, r5, r6, SetCC);
__ b(ge, &property_array_property);
// Load in-object property.
__ sub(r5, r5, r6); // Index from end of object.
__ ldrb(r6, FieldMemOperand(r2, Map::kInstanceSizeOffset));
__ add(r6, r6, r5); // Index from start of object.
__ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag.
@ -1215,6 +1214,14 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1, r2, r3);
__ Ret();
// Load property array property.
__ bind(&property_array_property);
__ ldr(r1, FieldMemOperand(r1, JSObject::kPropertiesOffset));
__ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2));
__ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1, r2, r3);
__ Ret();
// Do a quick inline probe of the receiver's dictionary, if it
// exists.
__ bind(&probe_dictionary);

28
deps/v8/src/arm/macro-assembler-arm.cc

@ -1728,6 +1728,34 @@ void MacroAssembler::AllocateHeapNumberWithValue(Register result,
}
// Copies a fixed number of fields of heap objects from src to dst.
void MacroAssembler::CopyFields(Register dst,
Register src,
RegList temps,
int field_count) {
// At least one bit set in the first 15 registers.
ASSERT((temps & ((1 << 15) - 1)) != 0);
ASSERT((temps & dst.bit()) == 0);
ASSERT((temps & src.bit()) == 0);
// Primitive implementation using only one temporary register.
Register tmp = no_reg;
// Find a temp register in temps list.
for (int i = 0; i < 15; i++) {
if ((temps & (1 << i)) != 0) {
tmp.set_code(i);
break;
}
}
ASSERT(!tmp.is(no_reg));
for (int i = 0; i < field_count; i++) {
ldr(tmp, FieldMemOperand(src, i * kPointerSize));
str(tmp, FieldMemOperand(dst, i * kPointerSize));
}
}
void MacroAssembler::CountLeadingZeros(Register zeros, // Answer.
Register source, // Input.
Register scratch) {

2
deps/v8/src/arm/macro-assembler-arm.h

@ -393,6 +393,8 @@ class MacroAssembler: public Assembler {
Register heap_number_map,
Label* gc_required);
// Copies a fixed number of fields of heap objects from src to dst.
void CopyFields(Register dst, Register src, RegList temps, int field_count);
// ---------------------------------------------------------------------------
// Support functions.

26
deps/v8/src/arm/stub-cache-arm.cc

@ -1252,9 +1252,11 @@ void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
}
void CallStubCompiler::GenerateMissBranch() {
Handle<Code> ic = ComputeCallMiss(arguments().immediate(), kind_);
__ Jump(ic, RelocInfo::CODE_TARGET);
Object* CallStubCompiler::GenerateMissBranch() {
Object* obj = StubCache::ComputeCallMiss(arguments().immediate(), kind_);
if (obj->IsFailure()) return obj;
__ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
return obj;
}
@ -1286,7 +1288,8 @@ Object* CallStubCompiler::CompileCallField(JSObject* object,
// Handle call cache miss.
__ bind(&miss);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(FIELD, name);
@ -1337,7 +1340,8 @@ Object* CallStubCompiler::CompileArrayPushCall(Object* object,
// Handle call cache miss.
__ bind(&miss);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(function);
@ -1388,7 +1392,8 @@ Object* CallStubCompiler::CompileArrayPopCall(Object* object,
// Handle call cache miss.
__ bind(&miss);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(function);
@ -1561,7 +1566,8 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
}
__ bind(&miss_in_smi_check);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(function);
@ -1610,7 +1616,8 @@ Object* CallStubCompiler::CompileCallInterceptor(JSObject* object,
// Handle call cache miss.
__ bind(&miss);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(INTERCEPTOR, name);
@ -1694,7 +1701,8 @@ Object* CallStubCompiler::CompileCallGlobal(JSObject* object,
// Handle call cache miss.
__ bind(&miss);
__ IncrementCounter(&Counters::call_global_inline_miss, 1, r1, r3);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(NORMAL, name);

2
deps/v8/src/arm/virtual-frame-arm.h

@ -501,7 +501,7 @@ class VirtualFrame : public ZoneObject {
}
inline void RaiseHeight(int count, unsigned known_smi_map = 0) {
ASSERT(known_smi_map < (1u << count));
ASSERT(count >= 32 || known_smi_map < (1u << count));
element_count_ += count;
if (count >= kTOSKnownSmiMapSize) {
tos_known_smi_map_ = known_smi_map;

2
deps/v8/src/assembler.h

@ -232,8 +232,8 @@ class RelocInfo BASE_EMBEDDED {
INLINE(Address call_address());
INLINE(void set_call_address(Address target));
INLINE(Object* call_object());
INLINE(Object** call_object_address());
INLINE(void set_call_object(Object* target));
INLINE(Object** call_object_address());
inline void Visit(ObjectVisitor* v);

39
deps/v8/src/codegen.h

@ -80,25 +80,6 @@ enum UnaryOverwriteMode { UNARY_OVERWRITE, UNARY_NO_OVERWRITE };
// Types of uncatchable exceptions.
enum UncatchableExceptionType { OUT_OF_MEMORY, TERMINATION };
#if V8_TARGET_ARCH_IA32
#include "ia32/codegen-ia32.h"
#elif V8_TARGET_ARCH_X64
#include "x64/codegen-x64.h"
#elif V8_TARGET_ARCH_ARM
#include "arm/codegen-arm.h"
#elif V8_TARGET_ARCH_MIPS
#include "mips/codegen-mips.h"
#else
#error Unsupported target architecture.
#endif
#include "register-allocator.h"
namespace v8 {
namespace internal {
#define INLINE_RUNTIME_FUNCTION_LIST(F) \
F(IsSmi, 1, 1) \
F(IsNonNegativeSmi, 1, 1) \
@ -132,8 +113,26 @@ namespace internal {
F(MathPow, 2, 1) \
F(MathSin, 1, 1) \
F(MathCos, 1, 1) \
F(MathSqrt, 1, 1)
F(MathSqrt, 1, 1) \
F(IsRegExpEquivalent, 2, 1)
#if V8_TARGET_ARCH_IA32
#include "ia32/codegen-ia32.h"
#elif V8_TARGET_ARCH_X64
#include "x64/codegen-x64.h"
#elif V8_TARGET_ARCH_ARM
#include "arm/codegen-arm.h"
#elif V8_TARGET_ARCH_MIPS
#include "mips/codegen-mips.h"
#else
#error Unsupported target architecture.
#endif
#include "register-allocator.h"
namespace v8 {
namespace internal {
// Support for "structured" code comments.
#ifdef DEBUG

2
deps/v8/src/full-codegen.cc

@ -917,6 +917,8 @@ void FullCodeGenerator::EmitInlineRuntimeCall(CallRuntime* expr) {
EmitSwapElements(expr->arguments());
} else if (strcmp("_GetFromCache", *name->ToCString()) == 0) {
EmitGetFromCache(expr->arguments());
} else if (strcmp("_IsRegExpEquivalent", *name->ToCString()) == 0) {
EmitIsRegExpEquivalent(expr->arguments());
} else {
UNREACHABLE();
}

1
deps/v8/src/full-codegen.h

@ -432,6 +432,7 @@ class FullCodeGenerator: public AstVisitor {
void EmitRegExpConstructResult(ZoneList<Expression*>* arguments);
void EmitSwapElements(ZoneList<Expression*>* arguments);
void EmitGetFromCache(ZoneList<Expression*>* arguments);
void EmitIsRegExpEquivalent(ZoneList<Expression*>* arguments);
// Platform-specific code for loading variables.
void EmitVariableLoad(Variable* expr, Expression::Context context);

3
deps/v8/src/globals.h

@ -643,11 +643,14 @@ F FUNCTION_CAST(Address addr) {
#if defined(__GNUC__) && !defined(DEBUG)
#if (__GNUC__ >= 4)
#define INLINE(header) inline header __attribute__((always_inline))
#define NO_INLINE(header) header __attribute__((noinline))
#else
#define INLINE(header) inline __attribute__((always_inline)) header
#define NO_INLINE(header) __attribute__((noinline)) header
#endif
#else
#define INLINE(header) inline header
#define NO_INLINE(header) header
#endif
// Feature flags bit positions. They are mostly based on the CPUID spec.

6
deps/v8/src/heap-inl.h

@ -390,7 +390,7 @@ void Heap::SetLastScriptId(Object* last_script_id) {
Object* __object__ = FUNCTION_CALL; \
if (!__object__->IsFailure()) RETURN_VALUE; \
if (__object__->IsOutOfMemoryFailure()) { \
v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0"); \
v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_0", true);\
} \
if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \
Heap::CollectGarbage(Failure::cast(__object__)->requested(), \
@ -398,7 +398,7 @@ void Heap::SetLastScriptId(Object* last_script_id) {
__object__ = FUNCTION_CALL; \
if (!__object__->IsFailure()) RETURN_VALUE; \
if (__object__->IsOutOfMemoryFailure()) { \
v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1"); \
v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_1", true);\
} \
if (!__object__->IsRetryAfterGC()) RETURN_EMPTY; \
Counters::gc_last_resort_from_handles.Increment(); \
@ -411,7 +411,7 @@ void Heap::SetLastScriptId(Object* last_script_id) {
if (__object__->IsOutOfMemoryFailure() || \
__object__->IsRetryAfterGC()) { \
/* TODO(1181417): Fix this. */ \
v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2"); \
v8::internal::V8::FatalProcessOutOfMemory("CALL_AND_RETRY_2", true);\
} \
RETURN_EMPTY; \
} while (false)

19
deps/v8/src/heap.cc

@ -4106,7 +4106,7 @@ bool Heap::ConfigureHeapDefault() {
}
void Heap::RecordStats(HeapStats* stats) {
void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
*stats->start_marker = 0xDECADE00;
*stats->end_marker = 0xDECADE01;
*stats->new_space_size = new_space_.Size();
@ -4123,6 +4123,23 @@ void Heap::RecordStats(HeapStats* stats) {
*stats->cell_space_capacity = cell_space_->Capacity();
*stats->lo_space_size = lo_space_->Size();
GlobalHandles::RecordStats(stats);
*stats->memory_allocator_size = MemoryAllocator::Size();
*stats->memory_allocator_capacity =
MemoryAllocator::Size() + MemoryAllocator::Available();
if (take_snapshot) {
HeapIterator iterator;
for (HeapObject* obj = iterator.next();
obj != NULL;
obj = iterator.next()) {
// Note: snapshot won't be precise because IsFreeListNode returns true
// for any bytearray.
if (FreeListNode::IsFreeListNode(obj)) continue;
InstanceType type = obj->map()->instance_type();
ASSERT(0 <= type && type <= LAST_TYPE);
stats->objects_per_type[type]++;
stats->size_per_type[type] += obj->Size();
}
}
}

58
deps/v8/src/heap.h

@ -981,7 +981,7 @@ class Heap : public AllStatic {
static RootListIndex RootIndexForExternalArrayType(
ExternalArrayType array_type);
static void RecordStats(HeapStats* stats);
static void RecordStats(HeapStats* stats, bool take_snapshot = false);
static Scavenger GetScavenger(int instance_type, int instance_size);
@ -1195,12 +1195,12 @@ class Heap : public AllStatic {
static bool CreateInitialMaps();
static bool CreateInitialObjects();
// These four Create*EntryStub functions are here because of a gcc-4.4 bug
// that assigns wrong vtable entries.
static void CreateCEntryStub();
static void CreateJSEntryStub();
static void CreateJSConstructEntryStub();
static void CreateRegExpCEntryStub();
// These four Create*EntryStub functions are here and forced to not be inlined
// because of a gcc-4.4 bug that assigns wrong vtable entries.
NO_INLINE(static void CreateCEntryStub());
NO_INLINE(static void CreateJSEntryStub());
NO_INLINE(static void CreateJSConstructEntryStub());
NO_INLINE(static void CreateRegExpCEntryStub());
static void CreateFixedStubs();
@ -1324,26 +1324,30 @@ class Heap : public AllStatic {
class HeapStats {
public:
int* start_marker;
int* new_space_size;
int* new_space_capacity;
int* old_pointer_space_size;
int* old_pointer_space_capacity;
int* old_data_space_size;
int* old_data_space_capacity;
int* code_space_size;
int* code_space_capacity;
int* map_space_size;
int* map_space_capacity;
int* cell_space_size;
int* cell_space_capacity;
int* lo_space_size;
int* global_handle_count;
int* weak_global_handle_count;
int* pending_global_handle_count;
int* near_death_global_handle_count;
int* destroyed_global_handle_count;
int* end_marker;
int* start_marker; // 0
int* new_space_size; // 1
int* new_space_capacity; // 2
int* old_pointer_space_size; // 3
int* old_pointer_space_capacity; // 4
int* old_data_space_size; // 5
int* old_data_space_capacity; // 6
int* code_space_size; // 7
int* code_space_capacity; // 8
int* map_space_size; // 9
int* map_space_capacity; // 10
int* cell_space_size; // 11
int* cell_space_capacity; // 12
int* lo_space_size; // 13
int* global_handle_count; // 14
int* weak_global_handle_count; // 15
int* pending_global_handle_count; // 16
int* near_death_global_handle_count; // 17
int* destroyed_global_handle_count; // 18
int* memory_allocator_size; // 19
int* memory_allocator_capacity; // 20
int* objects_per_type; // 21
int* size_per_type; // 22
int* end_marker; // 23
};

19
deps/v8/src/ia32/assembler-ia32-inl.h

@ -121,32 +121,33 @@ Address* RelocInfo::target_reference_address() {
Address RelocInfo::call_address() {
ASSERT(IsPatchedReturnSequence());
ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
return Assembler::target_address_at(pc_ + 1);
}
void RelocInfo::set_call_address(Address target) {
ASSERT(IsPatchedReturnSequence());
ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
Assembler::set_target_address_at(pc_ + 1, target);
}
Object* RelocInfo::call_object() {
ASSERT(IsPatchedReturnSequence());
return *call_object_address();
}
Object** RelocInfo::call_object_address() {
ASSERT(IsPatchedReturnSequence());
return reinterpret_cast<Object**>(pc_ + 1);
void RelocInfo::set_call_object(Object* target) {
*call_object_address() = target;
}
void RelocInfo::set_call_object(Object* target) {
ASSERT(IsPatchedReturnSequence());
*call_object_address() = target;
Object** RelocInfo::call_object_address() {
ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
return reinterpret_cast<Object**>(pc_ + 1);
}

3
deps/v8/src/ia32/assembler-ia32.cc

@ -158,7 +158,8 @@ void Displacement::init(Label* L, Type type) {
const int RelocInfo::kApplyMask =
RelocInfo::kCodeTargetMask | 1 << RelocInfo::RUNTIME_ENTRY |
1 << RelocInfo::JS_RETURN | 1 << RelocInfo::INTERNAL_REFERENCE;
1 << RelocInfo::JS_RETURN | 1 << RelocInfo::INTERNAL_REFERENCE |
1 << RelocInfo::DEBUG_BREAK_SLOT;
bool RelocInfo::IsCodedSpecially() {

85
deps/v8/src/ia32/codegen-ia32.cc

@ -138,7 +138,6 @@ CodeGenState::~CodeGenState() {
owner_->set_state(previous_);
}
// -------------------------------------------------------------------------
// CodeGenerator implementation.
@ -5309,6 +5308,30 @@ void DeferredRegExpLiteral::Generate() {
}
class DeferredAllocateInNewSpace: public DeferredCode {
public:
DeferredAllocateInNewSpace(int size, Register target)
: size_(size), target_(target) {
ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
set_comment("[ DeferredAllocateInNewSpace");
}
void Generate();
private:
int size_;
Register target_;
};
void DeferredAllocateInNewSpace::Generate() {
__ push(Immediate(Smi::FromInt(size_)));
__ CallRuntime(Runtime::kAllocateInNewSpace, 1);
if (!target_.is(eax)) {
__ mov(target_, eax);
}
}
void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
ASSERT(!in_safe_int32_mode());
Comment cmnt(masm_, "[ RegExp Literal");
@ -5339,10 +5362,33 @@ void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
__ cmp(boilerplate.reg(), Factory::undefined_value());
deferred->Branch(equal);
deferred->BindExit();
literals.Unuse();
// Push the boilerplate object.
// Register of boilerplate contains RegExp object.
Result tmp = allocator()->Allocate();
ASSERT(tmp.is_valid());
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
DeferredAllocateInNewSpace* allocate_fallback =
new DeferredAllocateInNewSpace(size, literals.reg());
frame_->Push(&boilerplate);
frame_->SpillTop();
__ AllocateInNewSpace(size,
literals.reg(),
tmp.reg(),
no_reg,
allocate_fallback->entry_label(),
TAG_OBJECT);
allocate_fallback->BindExit();
boilerplate = frame_->Pop();
// Copy from boilerplate to clone and return clone.
for (int i = 0; i < size; i += kPointerSize) {
__ mov(tmp.reg(), FieldOperand(boilerplate.reg(), i));
__ mov(FieldOperand(literals.reg(), i), tmp.reg());
}
frame_->Push(&literals);
}
@ -7525,6 +7571,39 @@ void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
}
void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
ASSERT_EQ(2, args->length());
Load(args->at(0));
Load(args->at(1));
Result right_res = frame_->Pop();
Result left_res = frame_->Pop();
right_res.ToRegister();
left_res.ToRegister();
Result tmp_res = allocator()->Allocate();
ASSERT(tmp_res.is_valid());
Register right = right_res.reg();
Register left = left_res.reg();
Register tmp = tmp_res.reg();
right_res.Unuse();
left_res.Unuse();
tmp_res.Unuse();
__ cmp(left, Operand(right));
destination()->true_target()->Branch(equal);
// Fail if either is a non-HeapObject.
__ mov(tmp, left);
__ and_(Operand(tmp), right);
__ test(Operand(tmp), Immediate(kSmiTagMask));
destination()->false_target()->Branch(equal);
__ CmpObjectType(left, JS_REGEXP_TYPE, tmp);
destination()->false_target()->Branch(not_equal);
__ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
destination()->false_target()->Branch(not_equal);
__ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
__ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
destination()->Split(equal);
}
void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
ASSERT(!in_safe_int32_mode());
if (CheckForInlineRuntimeCall(node)) {

5
deps/v8/src/ia32/codegen-ia32.h

@ -635,7 +635,7 @@ class CodeGenerator: public AstVisitor {
// Instantiate the function based on the shared function info.
Result InstantiateFunction(Handle<SharedFunctionInfo> function_info);
// Support for type checks.
// Support for types.
void GenerateIsSmi(ZoneList<Expression*>* args);
void GenerateIsNonNegativeSmi(ZoneList<Expression*>* args);
void GenerateIsArray(ZoneList<Expression*>* args);
@ -710,6 +710,9 @@ class CodeGenerator: public AstVisitor {
void GenerateMathCos(ZoneList<Expression*>* args);
void GenerateMathSqrt(ZoneList<Expression*>* args);
// Check whether two RegExps are equivalent
void GenerateIsRegExpEquivalent(ZoneList<Expression*>* args);
// Simple condition analysis.
enum ConditionAnalysis {
ALWAYS_TRUE,

84
deps/v8/src/ia32/full-codegen-ia32.cc

@ -1196,27 +1196,54 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var,
void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
Comment cmnt(masm_, "[ RegExpLiteral");
Label done;
Label materialized;
// Registers will be used as follows:
// edi = JS function.
// ebx = literals array.
// eax = regexp literal.
// ecx = literals array.
// ebx = regexp literal.
// eax = regexp literal clone.
__ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(ebx, FieldOperand(edi, JSFunction::kLiteralsOffset));
__ mov(ecx, FieldOperand(edi, JSFunction::kLiteralsOffset));
int literal_offset =
FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
__ mov(eax, FieldOperand(ebx, literal_offset));
__ cmp(eax, Factory::undefined_value());
__ j(not_equal, &done);
__ mov(ebx, FieldOperand(ecx, literal_offset));
__ cmp(ebx, Factory::undefined_value());
__ j(not_equal, &materialized);
// Create regexp literal using runtime function
// Result will be in eax.
__ push(ebx);
__ push(ecx);
__ push(Immediate(Smi::FromInt(expr->literal_index())));
__ push(Immediate(expr->pattern()));
__ push(Immediate(expr->flags()));
__ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
// Label done:
__ bind(&done);
__ mov(ebx, eax);
__ bind(&materialized);
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
Label allocated, runtime_allocate;
__ AllocateInNewSpace(size, eax, ecx, edx, &runtime_allocate, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&runtime_allocate);
__ push(ebx);
__ push(Immediate(Smi::FromInt(size)));
__ CallRuntime(Runtime::kAllocateInNewSpace, 1);
__ pop(ebx);
__ bind(&allocated);
// Copy the content into the newly allocated memory.
// (Unroll copy loop once for better throughput).
for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
__ mov(edx, FieldOperand(ebx, i));
__ mov(ecx, FieldOperand(ebx, i + kPointerSize));
__ mov(FieldOperand(eax, i), edx);
__ mov(FieldOperand(eax, i + kPointerSize), ecx);
}
if ((size % (2 * kPointerSize)) != 0) {
__ mov(edx, FieldOperand(ebx, size - kPointerSize));
__ mov(FieldOperand(eax, size - kPointerSize), edx);
}
Apply(context_, eax);
}
@ -2650,6 +2677,43 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
}
void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
ASSERT_EQ(2, args->length());
Register right = eax;
Register left = ebx;
Register tmp = ecx;
VisitForValue(args->at(0), kStack);
VisitForValue(args->at(1), kAccumulator);
__ pop(left);
Label done, fail, ok;
__ cmp(left, Operand(right));
__ j(equal, &ok);
// Fail if either is a non-HeapObject.
__ mov(tmp, left);
__ and_(Operand(tmp), right);
__ test(Operand(tmp), Immediate(kSmiTagMask));
__ j(zero, &fail);
__ CmpObjectType(left, JS_REGEXP_TYPE, tmp);
__ j(not_equal, &fail);
__ cmp(tmp, FieldOperand(right, HeapObject::kMapOffset));
__ j(not_equal, &fail);
__ mov(tmp, FieldOperand(left, JSRegExp::kDataOffset));
__ cmp(tmp, FieldOperand(right, JSRegExp::kDataOffset));
__ j(equal, &ok);
__ bind(&fail);
__ mov(eax, Immediate(Factory::false_value()));
__ jmp(&done);
__ bind(&ok);
__ mov(eax, Immediate(Factory::true_value()));
__ bind(&done);
Apply(context_, eax);
}
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Handle<String> name = expr->name();
if (name->length() > 0 && name->Get(0) == '_') {

14
deps/v8/src/ia32/ic-ia32.cc

@ -545,7 +545,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// -- edx : receiver
// -- esp[0] : return address
// -----------------------------------
Label slow, check_string, index_smi, index_string;
Label slow, check_string, index_smi, index_string, property_array_property;
Label check_pixel_array, probe_dictionary, check_number_dictionary;
// Check that the key is a smi.
@ -652,7 +652,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ cmp(eax, Operand::StaticArray(edi, times_1, cache_keys));
__ j(not_equal, &slow);
// Get field offset and check that it is an in-object property.
// Get field offset.
// edx : receiver
// ebx : receiver's map
// eax : key
@ -663,7 +663,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
Operand::StaticArray(ecx, times_pointer_size, cache_field_offsets));
__ movzx_b(ecx, FieldOperand(ebx, Map::kInObjectPropertiesOffset));
__ sub(edi, Operand(ecx));
__ j(above_equal, &slow);
__ j(above_equal, &property_array_property);
// Load in-object property.
__ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset));
@ -672,6 +672,14 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1);
__ ret(0);
// Load property array property.
__ bind(&property_array_property);
__ mov(eax, FieldOperand(edx, JSObject::kPropertiesOffset));
__ mov(eax, FieldOperand(eax, edi, times_pointer_size,
FixedArray::kHeaderSize));
__ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1);
__ ret(0);
// Do a quick inline probe of the receiver's dictionary, if it
// exists.
__ bind(&probe_dictionary);

27
deps/v8/src/ia32/macro-assembler-ia32.cc

@ -672,20 +672,33 @@ void MacroAssembler::AllocateInNewSpace(int object_size,
// Load address of new object into result.
LoadAllocationTopHelper(result, result_end, scratch, flags);
Register top_reg = result_end.is_valid() ? result_end : result;
// Calculate new top and bail out if new space is exhausted.
ExternalReference new_space_allocation_limit =
ExternalReference::new_space_allocation_limit_address();
lea(result_end, Operand(result, object_size));
cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
j(above, gc_required, not_taken);
// Tag result if requested.
if ((flags & TAG_OBJECT) != 0) {
lea(result, Operand(result, kHeapObjectTag));
if (top_reg.is(result)) {
add(Operand(top_reg), Immediate(object_size));
} else {
lea(top_reg, Operand(result, object_size));
}
cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
j(above, gc_required, not_taken);
// Update allocation top.
UpdateAllocationTopHelper(result_end, scratch);
UpdateAllocationTopHelper(top_reg, scratch);
// Tag result if requested.
if (top_reg.is(result)) {
if ((flags & TAG_OBJECT) != 0) {
sub(Operand(result), Immediate(object_size - kHeapObjectTag));
} else {
sub(Operand(result), Immediate(object_size));
}
} else if ((flags & TAG_OBJECT) != 0) {
add(Operand(result), Immediate(kHeapObjectTag));
}
}

35
deps/v8/src/ia32/stub-cache-ia32.cc

@ -1287,9 +1287,11 @@ void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
}
void CallStubCompiler::GenerateMissBranch() {
Handle<Code> ic = ComputeCallMiss(arguments().immediate(), kind_);
__ jmp(ic, RelocInfo::CODE_TARGET);
Object* CallStubCompiler::GenerateMissBranch() {
Object* obj = StubCache::ComputeCallMiss(arguments().immediate(), kind_);
if (obj->IsFailure()) return obj;
__ jmp(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
return obj;
}
@ -1340,7 +1342,8 @@ Object* CallStubCompiler::CompileCallField(JSObject* object,
// Handle call cache miss.
__ bind(&miss);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(FIELD, name);
@ -1487,7 +1490,8 @@ Object* CallStubCompiler::CompileArrayPushCall(Object* object,
}
__ bind(&miss);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(function);
@ -1570,7 +1574,8 @@ Object* CallStubCompiler::CompileArrayPopCall(Object* object,
1);
__ bind(&miss);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(function);
@ -1633,8 +1638,8 @@ Object* CallStubCompiler::CompileStringCharCodeAtCall(Object* object,
__ ret((argc + 1) * kPointerSize);
__ bind(&miss);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(function);
@ -1700,9 +1705,8 @@ Object* CallStubCompiler::CompileStringCharAtCall(Object* object,
__ ret((argc + 1) * kPointerSize);
__ bind(&miss);
// Restore function name in ecx.
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(function);
@ -1856,7 +1860,8 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
FreeSpaceForFastApiCall(masm(), eax);
}
__ bind(&miss_in_smi_check);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(function);
@ -1920,7 +1925,8 @@ Object* CallStubCompiler::CompileCallInterceptor(JSObject* object,
// Handle load cache miss.
__ bind(&miss);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(INTERCEPTOR, name);
@ -2005,7 +2011,8 @@ Object* CallStubCompiler::CompileCallGlobal(JSObject* object,
// Handle call cache miss.
__ bind(&miss);
__ IncrementCounter(&Counters::call_global_inline_miss, 1);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(NORMAL, name);

2
deps/v8/src/objects.cc

@ -4738,7 +4738,7 @@ bool String::SlowEquals(String* other) {
}
if (lhs->IsFlat()) {
if (IsAsciiRepresentation()) {
if (lhs->IsAsciiRepresentation()) {
Vector<const char> vec1 = lhs->ToAsciiVector();
if (rhs->IsFlat()) {
if (rhs->IsAsciiRepresentation()) {

1
deps/v8/src/objects.h

@ -3888,6 +3888,7 @@ class JSRegExp: public JSObject {
static const int kIgnoreCaseFieldIndex = 2;
static const int kMultilineFieldIndex = 3;
static const int kLastIndexFieldIndex = 4;
static const int kInObjectFieldCount = 5;
};

93
deps/v8/src/parser.cc

@ -210,6 +210,7 @@ class Parser {
Expression* ParsePrimaryExpression(bool* ok);
Expression* ParseArrayLiteral(bool* ok);
Expression* ParseObjectLiteral(bool* ok);
ObjectLiteral::Property* ParseObjectLiteralGetSet(bool is_getter, bool* ok);
Expression* ParseRegExpLiteral(bool seen_equal, bool* ok);
// Populate the constant properties fixed array for a materialized object
@ -3376,11 +3377,7 @@ Expression* Parser::ParsePrimaryExpression(bool* ok) {
// default case.
default: {
Token::Value tok = peek();
// Token::Peek returns the value of the next token but
// location() gives info about the current token.
// Therefore, we need to read ahead to the next token
Next();
Token::Value tok = Next();
ReportUnexpectedToken(tok);
*ok = false;
return NULL;
@ -3584,6 +3581,35 @@ void Parser::BuildObjectLiteralConstantProperties(
}
ObjectLiteral::Property* Parser::ParseObjectLiteralGetSet(bool is_getter,
bool* ok) {
// Special handling of getter and setter syntax:
// { ... , get foo() { ... }, ... , set foo(v) { ... v ... } , ... }
// We have already read the "get" or "set" keyword.
Token::Value next = Next();
if (next == Token::IDENTIFIER ||
next == Token::STRING ||
next == Token::NUMBER ||
Token::IsKeyword(next)) {
Handle<String> name =
factory()->LookupSymbol(scanner_.literal_string(),
scanner_.literal_length());
FunctionLiteral* value =
ParseFunctionLiteral(name,
RelocInfo::kNoPosition,
DECLARATION,
CHECK_OK);
ObjectLiteral::Property* property =
NEW(ObjectLiteral::Property(is_getter, value));
return property;
} else {
ReportUnexpectedToken(next);
*ok = false;
return NULL;
}
}
Expression* Parser::ParseObjectLiteral(bool* ok) {
// ObjectLiteral ::
// '{' (
@ -3601,64 +3627,39 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
Token::Value next = peek();
switch (next) {
case Token::IDENTIFIER: {
// Store identifier keys as literal symbols to avoid
// resolving them when compiling code for the object
// literal.
bool is_getter = false;
bool is_setter = false;
Handle<String> id =
ParseIdentifierOrGetOrSet(&is_getter, &is_setter, CHECK_OK);
if (is_getter || is_setter) {
// Special handling of getter and setter syntax.
Handle<String> name;
next = peek();
if (next == Token::IDENTIFIER ||
next == Token::STRING ||
next == Token::NUMBER ||
Token::IsKeyword(next)) {
Consume(next);
Handle<String> name =
factory()->LookupSymbol(scanner_.literal_string(),
scanner_.literal_length());
FunctionLiteral* value =
ParseFunctionLiteral(name,
RelocInfo::kNoPosition,
DECLARATION,
CHECK_OK);
if ((is_getter || is_setter) && peek() != Token::COLON) {
ObjectLiteral::Property* property =
NEW(ObjectLiteral::Property(is_getter, value));
ParseObjectLiteralGetSet(is_getter, CHECK_OK);
if (IsBoilerplateProperty(property)) {
number_of_boilerplate_properties++;
}
properties.Add(property);
if (peek() != Token::RBRACE) Expect(Token::COMMA, CHECK_OK);
continue; // restart the while
}
}
// Failed to parse as get/set property, so it's just a property
// called "get" or "set".
key = NEW(Literal(id));
break;
}
#define CASE_KEYWORD(name, ignore1, ignore2) \
case Token::name:
TOKEN_LIST(IGNORE_TOKEN, CASE_KEYWORD, IGNORE_TOKEN)
#undef CASE_KEYWORD
// FALLTHROUGH - keyword tokens fall through to the same code as strings.
case Token::STRING: {
Consume(next);
Consume(Token::STRING);
Handle<String> string =
factory()->LookupSymbol(scanner_.literal_string(),
scanner_.literal_length());
uint32_t index;
if (next == Token::STRING &&
!string.is_null() &&
if (!string.is_null() &&
string->AsArrayIndex(&index)) {
key = NewNumberLiteral(index);
} else {
key = NEW(Literal(string));
break;
}
key = NEW(Literal(string));
break;
}
case Token::NUMBER: {
Consume(Token::NUMBER);
double value =
@ -3666,10 +3667,20 @@ Expression* Parser::ParseObjectLiteral(bool* ok) {
key = NewNumberLiteral(value);
break;
}
default:
Expect(Token::RBRACE, CHECK_OK);
break;
if (Token::IsKeyword(next)) {
Consume(next);
Handle<String> string =
factory()->LookupSymbol(scanner_.literal_string(),
scanner_.literal_length());
key = NEW(Literal(string));
} else {
// Unexpected token.
Token::Value next = Next();
ReportUnexpectedToken(next);
*ok = false;
return NULL;
}
}
Expect(Token::COLON, CHECK_OK);

6
deps/v8/src/regexp.js

@ -126,7 +126,7 @@ function RegExpCache() {
this.regExp = 0;
this.subject = 0;
this.replaceString = 0;
this.lastIndex = 0;
this.lastIndex = 0; // Also used for splitLimit when type is "split"
this.answer = 0;
// answerSaved marks whether the contents of answer is valid for a cache
// hit in RegExpExec, StringMatch and StringSplit.
@ -194,7 +194,7 @@ function RegExpExec(string) {
if (%_ObjectEquals(cache.type, 'exec') &&
%_ObjectEquals(cache.lastIndex, this.lastIndex) &&
%_ObjectEquals(cache.regExp, this) &&
%_IsRegExpEquivalent(cache.regExp, this) &&
%_ObjectEquals(cache.subject, string)) {
if (cache.answerSaved) {
return CloneRegExpResult(cache.answer);
@ -290,7 +290,7 @@ function RegExpTest(string) {
var lastIndex = this.lastIndex;
var cache = regExpCache;
if (%_ObjectEquals(cache.type, 'test') &&
%_ObjectEquals(cache.regExp, this) &&
%_IsRegExpEquivalent(cache.regExp, this) &&
%_ObjectEquals(cache.subject, string) &&
%_ObjectEquals(cache.lastIndex, lastIndex)) {
return cache.answer;

20
deps/v8/src/runtime.cc

@ -7587,6 +7587,26 @@ static Object* Runtime_SetNewFunctionAttributes(Arguments args) {
}
static Object* Runtime_AllocateInNewSpace(Arguments args) {
// Allocate a block of memory in NewSpace (filled with a filler).
// Use as fallback for allocation in generated code when NewSpace
// is full.
ASSERT(args.length() == 1);
CONVERT_ARG_CHECKED(Smi, size_smi, 0);
int size = size_smi->value();
RUNTIME_ASSERT(IsAligned(size, kPointerSize));
RUNTIME_ASSERT(size > 0);
static const int kMinFreeNewSpaceAfterGC =
Heap::InitialSemiSpaceSize() * 3/4;
RUNTIME_ASSERT(size <= kMinFreeNewSpaceAfterGC);
Object* allocation = Heap::new_space()->AllocateRaw(size);
if (!allocation->IsFailure()) {
Heap::CreateFillerObjectAt(HeapObject::cast(allocation)->address(), size);
}
return allocation;
}
// Push an array unto an array of arrays if it is not already in the
// array. Returns true if the element was pushed on the stack and
// false otherwise.

1
deps/v8/src/runtime.h

@ -80,6 +80,7 @@ namespace internal {
F(NewArgumentsFast, 3, 1) \
F(LazyCompile, 1, 1) \
F(SetNewFunctionAttributes, 1, 1) \
F(AllocateInNewSpace, 1, 1) \
\
/* Array join support */ \
F(PushIfAbsent, 2, 1) \

13
deps/v8/src/string.js

@ -177,7 +177,7 @@ function StringMatch(regexp) {
var saveAnswer = false;
if (%_ObjectEquals(cache.type, 'match') &&
%_ObjectEquals(cache.regExp, regexp) &&
%_IsRegExpEquivalent(cache.regExp, regexp) &&
%_ObjectEquals(cache.subject, subject)) {
if (cache.answerSaved) {
return CloneDenseArray(cache.answer);
@ -274,8 +274,8 @@ function StringReplace(search, replace) {
// Helper function for regular expressions in String.prototype.replace.
function StringReplaceRegExp(subject, regexp, replace) {
var cache = regExpCache;
if (%_ObjectEquals(cache.regExp, regexp) &&
%_ObjectEquals(cache.type, 'replace') &&
if (%_ObjectEquals(cache.type, 'replace') &&
%_IsRegExpEquivalent(cache.regExp, regexp) &&
%_ObjectEquals(cache.replaceString, replace) &&
%_ObjectEquals(cache.subject, subject)) {
return cache.answer;
@ -609,8 +609,9 @@ function StringSplit(separator, limit) {
var saveAnswer = false;
if (%_ObjectEquals(cache.type, 'split') &&
%_ObjectEquals(cache.regExp, separator) &&
%_ObjectEquals(cache.subject, subject)) {
%_IsRegExpEquivalent(cache.regExp, separator) &&
%_ObjectEquals(cache.subject, subject) &&
%_ObjectEquals(cache.lastIndex, limit)) {
if (cache.answerSaved) {
return CloneDenseArray(cache.answer);
} else {
@ -621,6 +622,8 @@ function StringSplit(separator, limit) {
cache.type = 'split';
cache.regExp = separator;
cache.subject = subject;
// Reuse lastIndex field for split limit when type is "split".
cache.lastIndex = limit;
%_Log('regexp', 'regexp-split,%0S,%1r', [subject, separator]);

7
deps/v8/src/stub-cache.cc

@ -822,13 +822,6 @@ void StubCache::Clear() {
// StubCompiler implementation.
// Support function for computing call IC miss stubs.
Handle<Code> ComputeCallMiss(int argc, Code::Kind kind) {
CALL_HEAP_FUNCTION(StubCache::ComputeCallMiss(argc, kind), Code);
}
Object* LoadCallbackProperty(Arguments args) {
ASSERT(args[0]->IsJSObject());
ASSERT(args[1]->IsJSObject());

8
deps/v8/src/stub-cache.h

@ -336,10 +336,6 @@ Object* CallInterceptorProperty(Arguments args);
Object* KeyedLoadPropertyWithInterceptor(Arguments args);
// Support function for computing call IC miss stubs.
Handle<Code> ComputeCallMiss(int argc, Code::Kind kind);
// The stub compiler compiles stubs for the stub cache.
class StubCompiler BASE_EMBEDDED {
public:
@ -688,7 +684,9 @@ class CallStubCompiler: public StubCompiler {
void GenerateNameCheck(String* name, Label* miss);
void GenerateMissBranch();
// Generates a jump to CallIC miss stub. Returns Failure if the jump cannot
// be generated.
Object* GenerateMissBranch();
};

3
deps/v8/src/v8.h

@ -92,7 +92,8 @@ class V8 : public AllStatic {
static void SetFatalError();
// Report process out of memory. Implementation found in api.cc.
static void FatalProcessOutOfMemory(const char* location);
static void FatalProcessOutOfMemory(const char* location,
bool take_snapshot = false);
// Random number generation support. Not cryptographically safe.
static uint32_t Random();

4
deps/v8/src/v8natives.js

@ -749,7 +749,7 @@ function ObjectSeal(obj) {
if (desc.isConfigurable()) desc.setConfigurable(false);
DefineOwnProperty(obj, name, desc, true);
}
ObjectPreventExtension(obj);
return ObjectPreventExtension(obj);
}
@ -766,7 +766,7 @@ function ObjectFreeze(obj) {
if (desc.isConfigurable()) desc.setConfigurable(false);
DefineOwnProperty(obj, name, desc, true);
}
ObjectPreventExtension(obj);
return ObjectPreventExtension(obj);
}

4
deps/v8/src/version.cc

@ -34,8 +34,8 @@
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 2
#define MINOR_VERSION 3
#define BUILD_NUMBER 5
#define PATCH_LEVEL 0
#define BUILD_NUMBER 6
#define PATCH_LEVEL 1
#define CANDIDATE_VERSION false
// Define SONAME to have the SCons build the put a specific SONAME into the

19
deps/v8/src/x64/assembler-x64-inl.h

@ -201,14 +201,6 @@ void RelocInfo::apply(intptr_t delta) {
Memory::Address_at(pc_) += static_cast<int32_t>(delta);
} else if (IsCodeTarget(rmode_)) {
Memory::int32_at(pc_) -= static_cast<int32_t>(delta);
} else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) {
// Special handling of js_return when a break point is set (call
// instruction has been inserted).
Memory::int32_at(pc_ + 1) -= static_cast<int32_t>(delta); // relocate entry
} else if (rmode_ == DEBUG_BREAK_SLOT && IsPatchedDebugBreakSlotSequence()) {
// Special handling of debug break slot when a break point is set (call
// instruction has been inserted).
Memory::int32_at(pc_ + 1) -= static_cast<int32_t>(delta); // relocate entry
}
}
@ -303,33 +295,34 @@ bool RelocInfo::IsPatchedDebugBreakSlotSequence() {
Address RelocInfo::call_address() {
ASSERT(IsPatchedReturnSequence());
ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
return Memory::Address_at(
pc_ + Assembler::kRealPatchReturnSequenceAddressOffset);
}
void RelocInfo::set_call_address(Address target) {
ASSERT(IsPatchedReturnSequence());
ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) =
target;
}
Object* RelocInfo::call_object() {
ASSERT(IsPatchedReturnSequence());
return *call_object_address();
}
void RelocInfo::set_call_object(Object* target) {
ASSERT(IsPatchedReturnSequence());
*call_object_address() = target;
}
Object** RelocInfo::call_object_address() {
ASSERT(IsPatchedReturnSequence());
ASSERT((IsJSReturn(rmode()) && IsPatchedReturnSequence()) ||
(IsDebugBreakSlot(rmode()) && IsPatchedDebugBreakSlotSequence()));
return reinterpret_cast<Object**>(
pc_ + Assembler::kPatchReturnSequenceAddressOffset);
}

3
deps/v8/src/x64/assembler-x64.cc

@ -2941,8 +2941,7 @@ bool Assembler::WriteRecordedPositions() {
const int RelocInfo::kApplyMask = RelocInfo::kCodeTargetMask |
1 << RelocInfo::INTERNAL_REFERENCE |
1 << RelocInfo::JS_RETURN;
1 << RelocInfo::INTERNAL_REFERENCE;
bool RelocInfo::IsCodedSpecially() {

85
deps/v8/src/x64/codegen-x64.cc

@ -4813,6 +4813,30 @@ void DeferredRegExpLiteral::Generate() {
}
class DeferredAllocateInNewSpace: public DeferredCode {
public:
DeferredAllocateInNewSpace(int size, Register target)
: size_(size), target_(target) {
ASSERT(size >= kPointerSize && size <= Heap::MaxObjectSizeInNewSpace());
set_comment("[ DeferredAllocateInNewSpace");
}
void Generate();
private:
int size_;
Register target_;
};
void DeferredAllocateInNewSpace::Generate() {
__ Push(Smi::FromInt(size_));
__ CallRuntime(Runtime::kAllocateInNewSpace, 1);
if (!target_.is(rax)) {
__ movq(target_, rax);
}
}
void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
Comment cmnt(masm_, "[ RegExp Literal");
@ -4842,10 +4866,33 @@ void CodeGenerator::VisitRegExpLiteral(RegExpLiteral* node) {
__ CompareRoot(boilerplate.reg(), Heap::kUndefinedValueRootIndex);
deferred->Branch(equal);
deferred->BindExit();
literals.Unuse();
// Push the boilerplate object.
// Register of boilerplate contains RegExp object.
Result tmp = allocator()->Allocate();
ASSERT(tmp.is_valid());
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
DeferredAllocateInNewSpace* allocate_fallback =
new DeferredAllocateInNewSpace(size, literals.reg());
frame_->Push(&boilerplate);
frame_->SpillTop();
__ AllocateInNewSpace(size,
literals.reg(),
tmp.reg(),
no_reg,
allocate_fallback->entry_label(),
TAG_OBJECT);
allocate_fallback->BindExit();
boilerplate = frame_->Pop();
// Copy from boilerplate to clone and return clone.
for (int i = 0; i < size; i += kPointerSize) {
__ movq(tmp.reg(), FieldOperand(boilerplate.reg(), i));
__ movq(FieldOperand(literals.reg(), i), tmp.reg());
}
frame_->Push(&literals);
}
@ -7014,6 +7061,40 @@ void CodeGenerator::GenerateMathSqrt(ZoneList<Expression*>* args) {
}
void CodeGenerator::GenerateIsRegExpEquivalent(ZoneList<Expression*>* args) {
ASSERT_EQ(2, args->length());
Load(args->at(0));
Load(args->at(1));
Result right_res = frame_->Pop();
Result left_res = frame_->Pop();
right_res.ToRegister();
left_res.ToRegister();
Result tmp_res = allocator()->Allocate();
ASSERT(tmp_res.is_valid());
Register right = right_res.reg();
Register left = left_res.reg();
Register tmp = tmp_res.reg();
right_res.Unuse();
left_res.Unuse();
tmp_res.Unuse();
__ cmpq(left, right);
destination()->true_target()->Branch(equal);
// Fail if either is a non-HeapObject.
Condition either_smi =
masm()->CheckEitherSmi(left, right, tmp);
destination()->false_target()->Branch(either_smi);
__ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
__ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
Immediate(JS_REGEXP_TYPE));
destination()->false_target()->Branch(not_equal);
__ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
destination()->false_target()->Branch(not_equal);
__ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
__ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
destination()->Split(equal);
}
void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
if (CheckForInlineRuntimeCall(node)) {
return;

2
deps/v8/src/x64/codegen-x64.h

@ -677,6 +677,8 @@ class CodeGenerator: public AstVisitor {
void GenerateMathCos(ZoneList<Expression*>* args);
void GenerateMathSqrt(ZoneList<Expression*>* args);
void GenerateIsRegExpEquivalent(ZoneList<Expression*>* args);
// Simple condition analysis.
enum ConditionAnalysis {
ALWAYS_TRUE,

84
deps/v8/src/x64/full-codegen-x64.cc

@ -1199,26 +1199,54 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var,
void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
Comment cmnt(masm_, "[ RegExpLiteral");
Label done;
Label materialized;
// Registers will be used as follows:
// rdi = JS function.
// rbx = literals array.
// rax = regexp literal.
// rcx = literals array.
// rbx = regexp literal.
// rax = regexp literal clone.
__ movq(rdi, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
__ movq(rbx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
__ movq(rcx, FieldOperand(rdi, JSFunction::kLiteralsOffset));
int literal_offset =
FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
__ movq(rax, FieldOperand(rbx, literal_offset));
__ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
__ j(not_equal, &done);
__ movq(rbx, FieldOperand(rcx, literal_offset));
__ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
__ j(not_equal, &materialized);
// Create regexp literal using runtime function
// Result will be in rax.
__ push(rbx);
__ push(rcx);
__ Push(Smi::FromInt(expr->literal_index()));
__ Push(expr->pattern());
__ Push(expr->flags());
__ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4);
__ bind(&done);
__ movq(rbx, rax);
__ bind(&materialized);
int size = JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
Label allocated, runtime_allocate;
__ AllocateInNewSpace(size, rax, rcx, rdx, &runtime_allocate, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&runtime_allocate);
__ push(rbx);
__ Push(Smi::FromInt(size));
__ CallRuntime(Runtime::kAllocateInNewSpace, 1);
__ pop(rbx);
__ bind(&allocated);
// Copy the content into the newly allocated memory.
// (Unroll copy loop once for better throughput).
for (int i = 0; i < size - kPointerSize; i += 2 * kPointerSize) {
__ movq(rdx, FieldOperand(rbx, i));
__ movq(rcx, FieldOperand(rbx, i + kPointerSize));
__ movq(FieldOperand(rax, i), rdx);
__ movq(FieldOperand(rax, i + kPointerSize), rcx);
}
if ((size % (2 * kPointerSize)) != 0) {
__ movq(rdx, FieldOperand(rbx, size - kPointerSize));
__ movq(FieldOperand(rax, size - kPointerSize), rdx);
}
Apply(context_, rax);
}
@ -2644,6 +2672,44 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
}
void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
ASSERT_EQ(2, args->length());
Register right = rax;
Register left = rbx;
Register tmp = rcx;
VisitForValue(args->at(0), kStack);
VisitForValue(args->at(1), kAccumulator);
__ pop(left);
Label done, fail, ok;
__ cmpq(left, right);
__ j(equal, &ok);
// Fail if either is a non-HeapObject.
Condition either_smi = masm()->CheckEitherSmi(left, right, tmp);
__ j(either_smi, &fail);
__ j(zero, &fail);
__ movq(tmp, FieldOperand(left, HeapObject::kMapOffset));
__ cmpb(FieldOperand(tmp, Map::kInstanceTypeOffset),
Immediate(JS_REGEXP_TYPE));
__ j(not_equal, &fail);
__ cmpq(tmp, FieldOperand(right, HeapObject::kMapOffset));
__ j(not_equal, &fail);
__ movq(tmp, FieldOperand(left, JSRegExp::kDataOffset));
__ cmpq(tmp, FieldOperand(right, JSRegExp::kDataOffset));
__ j(equal, &ok);
__ bind(&fail);
__ Move(rax, Factory::false_value());
__ jmp(&done);
__ bind(&ok);
__ Move(rax, Factory::true_value());
__ bind(&done);
Apply(context_, rax);
}
void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
Handle<String> name = expr->name();
if (name->length() > 0 && name->Get(0) == '_') {

15
deps/v8/src/x64/ic-x64.cc

@ -599,7 +599,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// -- rdx : receiver
// -- rsp[0] : return address
// -----------------------------------
Label slow, check_string, index_smi, index_string;
Label slow, check_string, index_smi, index_string, property_array_property;
Label check_pixel_array, probe_dictionary, check_number_dictionary;
// Check that the key is a smi.
@ -692,15 +692,14 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ cmpq(rax, Operand(kScratchRegister, rdi, times_1, kPointerSize));
__ j(not_equal, &slow);
// Get field offset which is a 32-bit integer and check that it is
// an in-object property.
// Get field offset, which is a 32-bit integer.
ExternalReference cache_field_offsets
= ExternalReference::keyed_lookup_cache_field_offsets();
__ movq(kScratchRegister, cache_field_offsets);
__ movl(rdi, Operand(kScratchRegister, rcx, times_4, 0));
__ movzxbq(rcx, FieldOperand(rbx, Map::kInObjectPropertiesOffset));
__ subq(rdi, rcx);
__ j(above_equal, &slow);
__ j(above_equal, &property_array_property);
// Load in-object property.
__ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
@ -709,6 +708,14 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1);
__ ret(0);
// Load property array property.
__ bind(&property_array_property);
__ movq(rax, FieldOperand(rdx, JSObject::kPropertiesOffset));
__ movq(rax, FieldOperand(rax, rdi, times_pointer_size,
FixedArray::kHeaderSize));
__ IncrementCounter(&Counters::keyed_load_generic_lookup_cache, 1);
__ ret(0);
// Do a quick inline probe of the receiver's dictionary, if it
// exists.
__ bind(&probe_dictionary);

16
deps/v8/src/x64/macro-assembler-x64.cc

@ -882,13 +882,21 @@ Condition MacroAssembler::CheckBothPositiveSmi(Register first,
}
Condition MacroAssembler::CheckEitherSmi(Register first, Register second) {
Condition MacroAssembler::CheckEitherSmi(Register first,
Register second,
Register scratch) {
if (first.is(second)) {
return CheckSmi(first);
}
movl(kScratchRegister, first);
andl(kScratchRegister, second);
testb(kScratchRegister, Immediate(kSmiTagMask));
if (scratch.is(second)) {
andl(scratch, first);
} else {
if (!scratch.is(first)) {
movl(scratch, first);
}
andl(scratch, second);
}
testb(scratch, Immediate(kSmiTagMask));
return zero;
}

4
deps/v8/src/x64/macro-assembler-x64.h

@ -284,7 +284,9 @@ class MacroAssembler: public Assembler {
Condition CheckBothPositiveSmi(Register first, Register second);
// Are either value a tagged smi.
Condition CheckEitherSmi(Register first, Register second);
Condition CheckEitherSmi(Register first,
Register second,
Register scratch = kScratchRegister);
// Is the value the minimum smi value (since we are using
// two's complement numbers, negating the value is known to yield

28
deps/v8/src/x64/stub-cache-x64.cc

@ -820,9 +820,11 @@ void CallStubCompiler::GenerateNameCheck(String* name, Label* miss) {
}
void CallStubCompiler::GenerateMissBranch() {
Handle<Code> ic = ComputeCallMiss(arguments().immediate(), kind_);
__ Jump(ic, RelocInfo::CODE_TARGET);
Object* CallStubCompiler::GenerateMissBranch() {
Object* obj = StubCache::ComputeCallMiss(arguments().immediate(), kind_);
if (obj->IsFailure()) return obj;
__ Jump(Handle<Code>(Code::cast(obj)), RelocInfo::CODE_TARGET);
return obj;
}
@ -975,7 +977,8 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
// Handle call cache miss.
__ bind(&miss_in_smi_check);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(function);
@ -1029,7 +1032,8 @@ Object* CallStubCompiler::CompileCallField(JSObject* object,
// Handle call cache miss.
__ bind(&miss);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(FIELD, name);
@ -1186,8 +1190,8 @@ Object* CallStubCompiler::CompileArrayPushCall(Object* object,
}
__ bind(&miss);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(function);
@ -1270,8 +1274,8 @@ Object* CallStubCompiler::CompileArrayPopCall(Object* object,
argc + 1,
1);
__ bind(&miss);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(function);
@ -1357,7 +1361,8 @@ Object* CallStubCompiler::CompileCallInterceptor(JSObject* object,
// Handle load cache miss.
__ bind(&miss);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(INTERCEPTOR, name);
@ -1442,7 +1447,8 @@ Object* CallStubCompiler::CompileCallGlobal(JSObject* object,
// Handle call cache miss.
__ bind(&miss);
__ IncrementCounter(&Counters::call_global_inline_miss, 1);
GenerateMissBranch();
Object* obj = GenerateMissBranch();
if (obj->IsFailure()) return obj;
// Return the generated code.
return GetCode(NORMAL, name);

68
deps/v8/test/cctest/test-debug.cc

@ -1244,7 +1244,9 @@ TEST(GCDuringBreakPointProcessing) {
// Call the function three times with different garbage collections in between
// and make sure that the break point survives.
static void CallAndGC(v8::Local<v8::Object> recv, v8::Local<v8::Function> f) {
static void CallAndGC(v8::Local<v8::Object> recv,
v8::Local<v8::Function> f,
bool force_compaction) {
break_point_hit_count = 0;
for (int i = 0; i < 3; i++) {
@ -1258,15 +1260,14 @@ static void CallAndGC(v8::Local<v8::Object> recv, v8::Local<v8::Function> f) {
CHECK_EQ(2 + i * 3, break_point_hit_count);
// Mark sweep (and perhaps compact) and call function.
Heap::CollectAllGarbage(false);
Heap::CollectAllGarbage(force_compaction);
f->Call(recv, 0, NULL);
CHECK_EQ(3 + i * 3, break_point_hit_count);
}
}
// Test that a break point can be set at a return store location.
TEST(BreakPointSurviveGC) {
static void TestBreakPointSurviveGC(bool force_compaction) {
break_point_hit_count = 0;
v8::HandleScope scope;
DebugLocalContext env;
@ -1276,30 +1277,65 @@ TEST(BreakPointSurviveGC) {
v8::Local<v8::Function> foo;
// Test IC store break point with garbage collection.
foo = CompileFunction(&env, "function foo(){bar=0;}", "foo");
SetBreakPoint(foo, 0);
CallAndGC(env->Global(), foo);
{
v8::Local<v8::Function> bar =
CompileFunction(&env, "function foo(){}", "foo");
foo = CompileFunction(&env, "function foo(){bar=0;}", "foo");
SetBreakPoint(foo, 0);
}
CallAndGC(env->Global(), foo, force_compaction);
// Test IC load break point with garbage collection.
foo = CompileFunction(&env, "bar=1;function foo(){var x=bar;}", "foo");
SetBreakPoint(foo, 0);
CallAndGC(env->Global(), foo);
{
v8::Local<v8::Function> bar =
CompileFunction(&env, "function foo(){}", "foo");
foo = CompileFunction(&env, "bar=1;function foo(){var x=bar;}", "foo");
SetBreakPoint(foo, 0);
}
CallAndGC(env->Global(), foo, force_compaction);
// Test IC call break point with garbage collection.
foo = CompileFunction(&env, "function bar(){};function foo(){bar();}", "foo");
SetBreakPoint(foo, 0);
CallAndGC(env->Global(), foo);
{
v8::Local<v8::Function> bar =
CompileFunction(&env, "function foo(){}", "foo");
foo = CompileFunction(&env,
"function bar(){};function foo(){bar();}",
"foo");
SetBreakPoint(foo, 0);
}
CallAndGC(env->Global(), foo, force_compaction);
// Test return break point with garbage collection.
foo = CompileFunction(&env, "function foo(){}", "foo");
SetBreakPoint(foo, 0);
CallAndGC(env->Global(), foo);
{
v8::Local<v8::Function> bar =
CompileFunction(&env, "function foo(){}", "foo");
foo = CompileFunction(&env, "function foo(){}", "foo");
SetBreakPoint(foo, 0);
}
CallAndGC(env->Global(), foo, force_compaction);
// Test non IC break point with garbage collection.
{
v8::Local<v8::Function> bar =
CompileFunction(&env, "function foo(){}", "foo");
foo = CompileFunction(&env, "function foo(){var bar=0;}", "foo");
SetBreakPoint(foo, 0);
}
CallAndGC(env->Global(), foo, force_compaction);
v8::Debug::SetDebugEventListener(NULL);
CheckDebuggerUnloaded();
}
// Test that a break point can be set at a return store location.
TEST(BreakPointSurviveGC) {
TestBreakPointSurviveGC(false);
TestBreakPointSurviveGC(true);
}
// Test that break points can be set using the global Debug object.
TEST(BreakPointThroughJavaScript) {
break_point_hit_count = 0;

4
deps/v8/test/mjsunit/object-freeze.js

@ -191,3 +191,7 @@ Object.defineProperty(obj5, 'y', {configurable: false, writable: false});
Object.preventExtensions(obj5);
assertFalse(Object.isFrozen(obj5));
// Make sure that Object.freeze returns the frozen object.
var obj6 = {}
assertTrue(obj6 === Object.freeze(obj6))

4
deps/v8/test/mjsunit/object-literal.js

@ -95,13 +95,13 @@ function makeRegexpInArray() { return [ [ /a*/, {} ] ]; }
a = makeRegexpInArray();
var b = makeRegexpInArray();
assertTrue(a[0][0] === b[0][0]);
assertFalse(a[0][0] === b[0][0]);
assertFalse(a[0][1] === b[0][1]);
function makeRegexpInObject() { return { a: { b: /b*/, c: {} } }; }
a = makeRegexpInObject();
b = makeRegexpInObject();
assertTrue(a.a.b === b.a.b);
assertFalse(a.a.b === b.a.b);
assertFalse(a.a.c === b.a.c);

4
deps/v8/test/mjsunit/object-seal.js

@ -193,3 +193,7 @@ Object.defineProperty(obj4, 'y', {configurable: false, writable: false});
Object.preventExtensions(obj4);
assertFalse(Object.isSealed(obj4));
// Make sure that Object.seal returns the sealed object.
var obj4 = {}
assertTrue(obj4 === Object.seal(obj4))

Loading…
Cancel
Save