From 21e7292ea0ec1054fef8ad2af2ff68f6822c2449 Mon Sep 17 00:00:00 2001 From: Ryan Dahl Date: Mon, 19 Dec 2011 13:06:19 -0800 Subject: [PATCH] Upgrade V8 to 3.8.1 --- deps/v8/AUTHORS | 1 + deps/v8/ChangeLog | 7 + deps/v8/build/common.gypi | 1 + deps/v8/src/arm/code-stubs-arm.cc | 433 +++++++-------- deps/v8/src/arm/ic-arm.cc | 16 +- deps/v8/src/arm/lithium-codegen-arm.cc | 22 +- deps/v8/src/assembler.cc | 5 - deps/v8/src/assembler.h | 1 - deps/v8/src/d8.cc | 18 +- deps/v8/src/deoptimizer.cc | 15 +- deps/v8/src/flag-definitions.h | 30 +- deps/v8/src/flags.cc | 5 + deps/v8/src/flags.h | 3 + deps/v8/src/heap.cc | 26 +- deps/v8/src/hydrogen-instructions.cc | 4 +- deps/v8/src/hydrogen-instructions.h | 50 +- deps/v8/src/hydrogen.cc | 97 ++-- deps/v8/src/ia32/code-stubs-ia32.cc | 174 +++--- deps/v8/src/ia32/ic-ia32.cc | 13 +- deps/v8/src/ia32/lithium-codegen-ia32.cc | 22 +- deps/v8/src/ia32/macro-assembler-ia32.cc | 8 + deps/v8/src/ia32/macro-assembler-ia32.h | 3 +- deps/v8/src/ic.cc | 20 +- deps/v8/src/incremental-marking.cc | 15 +- deps/v8/src/mark-compact.cc | 16 +- deps/v8/src/mips/code-stubs-mips.cc | 508 ++++++++---------- deps/v8/src/mips/ic-mips.cc | 13 +- deps/v8/src/mips/lithium-codegen-mips.cc | 26 +- deps/v8/src/mksnapshot.cc | 2 +- deps/v8/src/objects-inl.h | 15 +- deps/v8/src/objects.cc | 16 +- deps/v8/src/objects.h | 8 +- deps/v8/src/prettyprinter.cc | 1 + deps/v8/src/runtime.cc | 36 +- deps/v8/src/runtime.h | 6 + deps/v8/src/spaces.cc | 2 +- deps/v8/src/stub-cache.cc | 9 +- deps/v8/src/stub-cache.h | 3 +- deps/v8/src/type-info.cc | 47 +- deps/v8/src/type-info.h | 4 + deps/v8/src/v8.cc | 9 +- deps/v8/src/version.cc | 2 +- deps/v8/src/x64/code-stubs-x64.cc | 386 ++++++------- deps/v8/src/x64/ic-x64.cc | 16 +- deps/v8/src/x64/lithium-codegen-x64.cc | 21 +- deps/v8/test/cctest/cctest.status | 4 + deps/v8/test/cctest/test-hashing.cc | 3 +- deps/v8/test/cctest/test-heap.cc | 80 +++ deps/v8/test/cctest/test-regexp.cc | 6 +- .../test/mjsunit/array-literal-transitions.js | 11 +- deps/v8/test/mjsunit/d8-os.js | 73 +-- .../mjsunit/function-named-self-reference.js | 45 ++ .../mjsunit/regress/regress-crbug-100859.js | 39 ++ .../v8/test/mjsunit/string-external-cached.js | 23 + deps/v8/test/test262/test262.status | 43 +- deps/v8/test/test262/testcfg.py | 1 - deps/v8/tools/grokdump.py | 328 ++++++++--- 57 files changed, 1675 insertions(+), 1116 deletions(-) create mode 100644 deps/v8/test/mjsunit/function-named-self-reference.js create mode 100644 deps/v8/test/mjsunit/regress/regress-crbug-100859.js diff --git a/deps/v8/AUTHORS b/deps/v8/AUTHORS index fcb5c205a6..148be416e0 100644 --- a/deps/v8/AUTHORS +++ b/deps/v8/AUTHORS @@ -42,6 +42,7 @@ Rodolph Perfetta Ryan Dahl Sanjoy Das Subrato K De +Tobias Burnus Vlad Burlik Yuqiang Xian Zaheer Ahmad diff --git a/deps/v8/ChangeLog b/deps/v8/ChangeLog index 33df4603ce..a81dedd45d 100644 --- a/deps/v8/ChangeLog +++ b/deps/v8/ChangeLog @@ -1,3 +1,10 @@ +2011-12-19: Version 3.8.1 + + Fixed GCC 4.7 warnings. Patch from Tobias Burnus. + + Stability improvements on all platforms. + + 2011-12-13: Version 3.8.0 Fixed handling of arrays in DefineOwnProperty. (issue 1756) diff --git a/deps/v8/build/common.gypi b/deps/v8/build/common.gypi index 861c87d29d..9129d0170c 100644 --- a/deps/v8/build/common.gypi +++ b/deps/v8/build/common.gypi @@ -303,6 +303,7 @@ }], ['OS=="win"', { 'msvs_configuration_attributes': { + 'OutputDirectory': '<(DEPTH)\\build\\$(ConfigurationName)', 'IntermediateDirectory': '$(OutDir)\\obj\\$(ProjectName)', 'CharacterSet': '1', }, diff --git a/deps/v8/src/arm/code-stubs-arm.cc b/deps/v8/src/arm/code-stubs-arm.cc index 282df15654..209c48e2a0 100644 --- a/deps/v8/src/arm/code-stubs-arm.cc +++ b/deps/v8/src/arm/code-stubs-arm.cc @@ -5785,37 +5785,23 @@ void SubStringStub::Generate(MacroAssembler* masm) { static const int kFromOffset = 1 * kPointerSize; static const int kStringOffset = 2 * kPointerSize; - // Check bounds and smi-ness. - Register to = r6; - Register from = r7; - - __ Ldrd(to, from, MemOperand(sp, kToOffset)); + __ Ldrd(r2, r3, MemOperand(sp, kToOffset)); STATIC_ASSERT(kFromOffset == kToOffset + 4); STATIC_ASSERT(kSmiTag == 0); STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); // I.e., arithmetic shift right by one un-smi-tags. - __ mov(r2, Operand(to, ASR, 1), SetCC); - __ mov(r3, Operand(from, ASR, 1), SetCC, cc); + __ mov(r2, Operand(r2, ASR, 1), SetCC); + __ mov(r3, Operand(r3, ASR, 1), SetCC, cc); // If either to or from had the smi tag bit set, then carry is set now. __ b(cs, &runtime); // Either "from" or "to" is not a smi. __ b(mi, &runtime); // From is negative. - // Both to and from are smis. + // Both r2 and r3 are untagged integers. __ sub(r2, r2, Operand(r3), SetCC); __ b(mi, &runtime); // Fail if from > to. - // Special handling of sub-strings of length 1 and 2. One character strings - // are handled in the runtime system (looked up in the single character - // cache). Two character strings are looked for in the symbol cache in - // generated code. - __ cmp(r2, Operand(2)); - __ b(lt, &runtime); - // r2: result string length - // r3: from index (untagged smi) - // r6 (a.k.a. to): to (smi) - // r7 (a.k.a. from): from offset (smi) - // Make sure first argument is a sequential (or flat) string. + // Make sure first argument is a string. __ ldr(r0, MemOperand(sp, kStringOffset)); STATIC_ASSERT(kSmiTag == 0); __ JumpIfSmi(r0, &runtime); @@ -5830,67 +5816,15 @@ void SubStringStub::Generate(MacroAssembler* masm) { __ cmp(r2, Operand(r4, ASR, 1)); __ b(eq, &return_r0); - Label create_slice; - if (FLAG_string_slices) { - __ cmp(r2, Operand(SlicedString::kMinLength)); - __ b(ge, &create_slice); - } - - // r0: original string - // r1: instance type - // r2: result string length - // r3: from index (untagged smi) - // r6 (a.k.a. to): to (smi) - // r7 (a.k.a. from): from offset (smi) - Label seq_string; - __ and_(r4, r1, Operand(kStringRepresentationMask)); - STATIC_ASSERT(kSeqStringTag < kConsStringTag); - STATIC_ASSERT(kConsStringTag < kExternalStringTag); - STATIC_ASSERT(kConsStringTag < kSlicedStringTag); - __ cmp(r4, Operand(kConsStringTag)); - __ b(gt, &runtime); // Slices and external strings go to runtime. - __ b(lt, &seq_string); // Sequential strings are handled directly. - - // Cons string. Try to recurse (once) on the first substring. - // (This adds a little more generality than necessary to handle flattened - // cons strings, but not much). - __ ldr(r0, FieldMemOperand(r0, ConsString::kFirstOffset)); - __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset)); - __ ldrb(r1, FieldMemOperand(r4, Map::kInstanceTypeOffset)); - __ tst(r1, Operand(kStringRepresentationMask)); - STATIC_ASSERT(kSeqStringTag == 0); - __ b(ne, &runtime); // Cons, slices and external strings go to runtime. - - // Definitly a sequential string. - __ bind(&seq_string); - - // r0: original string - // r1: instance type - // r2: result string length - // r3: from index (untagged smi) - // r6 (a.k.a. to): to (smi) - // r7 (a.k.a. from): from offset (smi) - __ ldr(r4, FieldMemOperand(r0, String::kLengthOffset)); - __ cmp(r4, Operand(to)); - __ b(lt, &runtime); // Fail if to > length. - to = no_reg; - - // r0: original string or left hand side of the original cons string. - // r1: instance type - // r2: result string length - // r3: from index (untagged smi) - // r7 (a.k.a. from): from offset (smi) - // Check for flat ASCII string. - Label non_ascii_flat; - __ tst(r1, Operand(kStringEncodingMask)); - STATIC_ASSERT(kTwoByteStringTag == 0); - __ b(eq, &non_ascii_flat); - Label result_longer_than_two; + // Check for special case of two character ascii string, in which case + // we do a lookup in the symbol table first. __ cmp(r2, Operand(2)); __ b(gt, &result_longer_than_two); + __ b(lt, &runtime); + + __ JumpIfInstanceTypeIsNotSequentialAscii(r1, r1, &runtime); - // Sub string of length 2 requested. // Get the two characters forming the sub string. __ add(r0, r0, Operand(r3)); __ ldrb(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize)); @@ -5900,7 +5834,6 @@ void SubStringStub::Generate(MacroAssembler* masm) { Label make_two_character_string; StringHelper::GenerateTwoCharacterSymbolTableProbe( masm, r3, r4, r1, r5, r6, r7, r9, &make_two_character_string); - Counters* counters = masm->isolate()->counters(); __ jmp(&return_r0); // r2: result string length. @@ -5911,18 +5844,114 @@ void SubStringStub::Generate(MacroAssembler* masm) { __ jmp(&return_r0); __ bind(&result_longer_than_two); + // Deal with different string types: update the index if necessary + // and put the underlying string into r5. + // r0: original string + // r1: instance type + // r2: length + // r3: from index (untagged) + Label underlying_unpacked, sliced_string, seq_or_external_string; + // If the string is not indirect, it can only be sequential or external. + STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag)); + STATIC_ASSERT(kIsIndirectStringMask != 0); + __ tst(r1, Operand(kIsIndirectStringMask)); + __ b(eq, &seq_or_external_string); + + __ tst(r1, Operand(kSlicedNotConsMask)); + __ b(ne, &sliced_string); + // Cons string. Check whether it is flat, then fetch first part. + __ ldr(r5, FieldMemOperand(r0, ConsString::kSecondOffset)); + __ CompareRoot(r5, Heap::kEmptyStringRootIndex); + __ b(ne, &runtime); + __ ldr(r5, FieldMemOperand(r0, ConsString::kFirstOffset)); + // Update instance type. + __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset)); + __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset)); + __ jmp(&underlying_unpacked); + + __ bind(&sliced_string); + // Sliced string. Fetch parent and correct start index by offset. + __ ldr(r5, FieldMemOperand(r0, SlicedString::kOffsetOffset)); + __ add(r3, r3, Operand(r5, ASR, 1)); + __ ldr(r5, FieldMemOperand(r0, SlicedString::kParentOffset)); + // Update instance type. + __ ldr(r1, FieldMemOperand(r5, HeapObject::kMapOffset)); + __ ldrb(r1, FieldMemOperand(r1, Map::kInstanceTypeOffset)); + __ jmp(&underlying_unpacked); + + __ bind(&seq_or_external_string); + // Sequential or external string. Just move string to the expected register. + __ mov(r5, r0); + + __ bind(&underlying_unpacked); - // Locate 'from' character of string. - __ add(r5, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); - __ add(r5, r5, Operand(from, ASR, 1)); + if (FLAG_string_slices) { + Label copy_routine; + // r5: underlying subject string + // r1: instance type of underlying subject string + // r2: length + // r3: adjusted start index (untagged) + __ cmp(r2, Operand(SlicedString::kMinLength)); + // Short slice. Copy instead of slicing. + __ b(lt, ©_routine); + // Allocate new sliced string. At this point we do not reload the instance + // type including the string encoding because we simply rely on the info + // provided by the original string. It does not matter if the original + // string's encoding is wrong because we always have to recheck encoding of + // the newly created string's parent anyways due to externalized strings. + Label two_byte_slice, set_slice_header; + STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0); + STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); + __ tst(r1, Operand(kStringEncodingMask)); + __ b(eq, &two_byte_slice); + __ AllocateAsciiSlicedString(r0, r2, r6, r7, &runtime); + __ jmp(&set_slice_header); + __ bind(&two_byte_slice); + __ AllocateTwoByteSlicedString(r0, r2, r6, r7, &runtime); + __ bind(&set_slice_header); + __ mov(r3, Operand(r3, LSL, 1)); + __ str(r3, FieldMemOperand(r0, SlicedString::kOffsetOffset)); + __ str(r5, FieldMemOperand(r0, SlicedString::kParentOffset)); + __ jmp(&return_r0); - // Allocate the result. - __ AllocateAsciiString(r0, r2, r3, r4, r1, &runtime); + __ bind(©_routine); + } - // r0: result string - // r2: result string length - // r5: first character of substring to copy - // r7 (a.k.a. from): from offset (smi) + // r5: underlying subject string + // r1: instance type of underlying subject string + // r2: length + // r3: adjusted start index (untagged) + Label two_byte_sequential, sequential_string, allocate_result; + STATIC_ASSERT(kExternalStringTag != 0); + STATIC_ASSERT(kSeqStringTag == 0); + __ tst(r1, Operand(kExternalStringTag)); + __ b(eq, &sequential_string); + + // Handle external string. + // Rule out short external strings. + STATIC_CHECK(kShortExternalStringTag != 0); + __ tst(r1, Operand(kShortExternalStringTag)); + __ b(ne, &runtime); + __ ldr(r5, FieldMemOperand(r5, ExternalString::kResourceDataOffset)); + // r5 already points to the first character of underlying string. + __ jmp(&allocate_result); + + __ bind(&sequential_string); + // Locate first character of underlying subject string. + STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize); + __ add(r5, r5, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); + + __ bind(&allocate_result); + // Sequential acii string. Allocate the result. + STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0); + __ tst(r1, Operand(kStringEncodingMask)); + __ b(eq, &two_byte_sequential); + + // Allocate and copy the resulting ascii string. + __ AllocateAsciiString(r0, r2, r4, r6, r7, &runtime); + + // Locate first character of substring to copy. + __ add(r5, r5, r3); // Locate first character of result. __ add(r1, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); @@ -5935,30 +5964,16 @@ void SubStringStub::Generate(MacroAssembler* masm) { COPY_ASCII | DEST_ALWAYS_ALIGNED); __ jmp(&return_r0); - __ bind(&non_ascii_flat); - // r0: original string - // r2: result string length - // r7 (a.k.a. from): from offset (smi) - // Check for flat two byte string. + // Allocate and copy the resulting two-byte string. + __ bind(&two_byte_sequential); + __ AllocateTwoByteString(r0, r2, r4, r6, r7, &runtime); - // Locate 'from' character of string. - __ add(r5, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - // As "from" is a smi it is 2 times the value which matches the size of a two - // byte character. + // Locate first character of substring to copy. STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); - __ add(r5, r5, Operand(from)); - - // Allocate the result. - __ AllocateTwoByteString(r0, r2, r1, r3, r4, &runtime); - - // r0: result string - // r2: result string length - // r5: first character of substring to copy + __ add(r5, r5, Operand(r3, LSL, 1)); // Locate first character of result. __ add(r1, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - from = no_reg; - // r0: result string. // r1: first character of result. // r2: result length. @@ -5966,69 +5981,9 @@ void SubStringStub::Generate(MacroAssembler* masm) { STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0); StringHelper::GenerateCopyCharactersLong( masm, r1, r5, r2, r3, r4, r6, r7, r9, DEST_ALWAYS_ALIGNED); - __ jmp(&return_r0); - - if (FLAG_string_slices) { - __ bind(&create_slice); - // r0: original string - // r1: instance type - // r2: length - // r3: from index (untagged smi) - // r6 (a.k.a. to): to (smi) - // r7 (a.k.a. from): from offset (smi) - Label allocate_slice, sliced_string, seq_or_external_string; - // If the string is not indirect, it can only be sequential or external. - STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag)); - STATIC_ASSERT(kIsIndirectStringMask != 0); - __ tst(r1, Operand(kIsIndirectStringMask)); - __ b(eq, &seq_or_external_string); - - __ tst(r1, Operand(kSlicedNotConsMask)); - __ b(ne, &sliced_string); - // Cons string. Check whether it is flat, then fetch first part. - __ ldr(r5, FieldMemOperand(r0, ConsString::kSecondOffset)); - __ LoadRoot(r9, Heap::kEmptyStringRootIndex); - __ cmp(r5, r9); - __ b(ne, &runtime); - __ ldr(r5, FieldMemOperand(r0, ConsString::kFirstOffset)); - __ jmp(&allocate_slice); - - __ bind(&sliced_string); - // Sliced string. Fetch parent and correct start index by offset. - __ ldr(r5, FieldMemOperand(r0, SlicedString::kOffsetOffset)); - __ add(r7, r7, r5); - __ ldr(r5, FieldMemOperand(r0, SlicedString::kParentOffset)); - __ jmp(&allocate_slice); - - __ bind(&seq_or_external_string); - // Sequential or external string. Just move string to the correct register. - __ mov(r5, r0); - - __ bind(&allocate_slice); - // r1: instance type of original string - // r2: length - // r5: underlying subject string - // r7 (a.k.a. from): from offset (smi) - // Allocate new sliced string. At this point we do not reload the instance - // type including the string encoding because we simply rely on the info - // provided by the original string. It does not matter if the original - // string's encoding is wrong because we always have to recheck encoding of - // the newly created string's parent anyways due to externalized strings. - Label two_byte_slice, set_slice_header; - STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0); - STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); - __ tst(r1, Operand(kStringEncodingMask)); - __ b(eq, &two_byte_slice); - __ AllocateAsciiSlicedString(r0, r2, r3, r4, &runtime); - __ jmp(&set_slice_header); - __ bind(&two_byte_slice); - __ AllocateTwoByteSlicedString(r0, r2, r3, r4, &runtime); - __ bind(&set_slice_header); - __ str(r7, FieldMemOperand(r0, SlicedString::kOffsetOffset)); - __ str(r5, FieldMemOperand(r0, SlicedString::kParentOffset)); - } __ bind(&return_r0); + Counters* counters = masm->isolate()->counters(); __ IncrementCounter(counters->sub_string_native(), 1, r3, r4); __ add(sp, sp, Operand(3 * kPointerSize)); __ Ret(); @@ -6185,7 +6140,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) { void StringAddStub::Generate(MacroAssembler* masm) { - Label string_add_runtime, call_builtin; + Label call_runtime, call_builtin; Builtins::JavaScript builtin_id = Builtins::ADD; Counters* counters = masm->isolate()->counters(); @@ -6200,7 +6155,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { // Make sure that both arguments are strings if not known in advance. if (flags_ == NO_STRING_ADD_FLAGS) { - __ JumpIfEitherSmi(r0, r1, &string_add_runtime); + __ JumpIfEitherSmi(r0, r1, &call_runtime); // Load instance types. __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset)); __ ldr(r5, FieldMemOperand(r1, HeapObject::kMapOffset)); @@ -6210,7 +6165,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { // If either is not a string, go to runtime. __ tst(r4, Operand(kIsNotStringMask)); __ tst(r5, Operand(kIsNotStringMask), eq); - __ b(ne, &string_add_runtime); + __ b(ne, &call_runtime); } else { // Here at least one of the arguments is definitely a string. // We convert the one that is not known to be a string. @@ -6279,7 +6234,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset)); } __ JumpIfBothInstanceTypesAreNotSequentialAscii(r4, r5, r6, r7, - &string_add_runtime); + &call_runtime); // Get the two characters forming the sub string. __ ldrb(r2, FieldMemOperand(r0, SeqAsciiString::kHeaderSize)); @@ -6301,7 +6256,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { // halfword store instruction (which assumes that processor is // in a little endian mode) __ mov(r6, Operand(2)); - __ AllocateAsciiString(r0, r6, r4, r5, r9, &string_add_runtime); + __ AllocateAsciiString(r0, r6, r4, r5, r9, &call_runtime); __ strh(r2, FieldMemOperand(r0, SeqAsciiString::kHeaderSize)); __ IncrementCounter(counters->string_add_native(), 1, r2, r3); __ add(sp, sp, Operand(2 * kPointerSize)); @@ -6316,7 +6271,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { ASSERT(IsPowerOf2(String::kMaxLength + 1)); // kMaxLength + 1 is representable as shifted literal, kMaxLength is not. __ cmp(r6, Operand(String::kMaxLength + 1)); - __ b(hs, &string_add_runtime); + __ b(hs, &call_runtime); // If result is not supposed to be flat, allocate a cons string object. // If both strings are ASCII the result is an ASCII cons string. @@ -6334,7 +6289,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { // Allocate an ASCII cons string. __ bind(&ascii_data); - __ AllocateAsciiConsString(r7, r6, r4, r5, &string_add_runtime); + __ AllocateAsciiConsString(r7, r6, r4, r5, &call_runtime); __ bind(&allocated); // Fill the fields of the cons string. __ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset)); @@ -6359,11 +6314,13 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ b(eq, &ascii_data); // Allocate a two byte cons string. - __ AllocateTwoByteConsString(r7, r6, r4, r5, &string_add_runtime); + __ AllocateTwoByteConsString(r7, r6, r4, r5, &call_runtime); __ jmp(&allocated); - // Handle creating a flat result. First check that both strings are - // sequential and that they have the same encoding. + // We cannot encounter sliced strings or cons strings here since: + STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength); + // Handle creating a flat result from either external or sequential strings. + // Locate the first characters' locations. // r0: first string // r1: second string // r2: length of first string @@ -6371,6 +6328,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { // r4: first string instance type (if flags_ == NO_STRING_ADD_FLAGS) // r5: second string instance type (if flags_ == NO_STRING_ADD_FLAGS) // r6: sum of lengths. + Label first_prepared, second_prepared; __ bind(&string_add_flat_result); if (flags_ != NO_STRING_ADD_FLAGS) { __ ldr(r4, FieldMemOperand(r0, HeapObject::kMapOffset)); @@ -6378,97 +6336,88 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ ldrb(r4, FieldMemOperand(r4, Map::kInstanceTypeOffset)); __ ldrb(r5, FieldMemOperand(r5, Map::kInstanceTypeOffset)); } - // Check that both strings are sequential. + + // Check whether both strings have same encoding + __ eor(r7, r4, Operand(r5)); + __ tst(r7, Operand(kStringEncodingMask)); + __ b(ne, &call_runtime); + STATIC_ASSERT(kSeqStringTag == 0); __ tst(r4, Operand(kStringRepresentationMask)); - __ tst(r5, Operand(kStringRepresentationMask), eq); - __ b(ne, &string_add_runtime); - // Now check if both strings have the same encoding (ASCII/Two-byte). - // r0: first string. - // r1: second string. + STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize); + __ add(r7, + r0, + Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag), + LeaveCC, + eq); + __ b(eq, &first_prepared); + // External string: rule out short external string and load string resource. + STATIC_ASSERT(kShortExternalStringTag != 0); + __ tst(r4, Operand(kShortExternalStringMask)); + __ b(ne, &call_runtime); + __ ldr(r7, FieldMemOperand(r0, ExternalString::kResourceDataOffset)); + __ bind(&first_prepared); + + STATIC_ASSERT(kSeqStringTag == 0); + __ tst(r5, Operand(kStringRepresentationMask)); + STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize); + __ add(r1, + r1, + Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag), + LeaveCC, + eq); + __ b(eq, &second_prepared); + // External string: rule out short external string and load string resource. + STATIC_ASSERT(kShortExternalStringTag != 0); + __ tst(r5, Operand(kShortExternalStringMask)); + __ b(ne, &call_runtime); + __ ldr(r1, FieldMemOperand(r1, ExternalString::kResourceDataOffset)); + __ bind(&second_prepared); + + Label non_ascii_string_add_flat_result; + // r7: first character of first string + // r1: first character of second string // r2: length of first string. // r3: length of second string. - // r6: sum of lengths.. - Label non_ascii_string_add_flat_result; - ASSERT(IsPowerOf2(kStringEncodingMask)); // Just one bit to test. - __ eor(r7, r4, Operand(r5)); - __ tst(r7, Operand(kStringEncodingMask)); - __ b(ne, &string_add_runtime); - // And see if it's ASCII or two-byte. - __ tst(r4, Operand(kStringEncodingMask)); + // r6: sum of lengths. + // Both strings have the same encoding. + STATIC_ASSERT(kTwoByteStringTag == 0); + __ tst(r5, Operand(kStringEncodingMask)); __ b(eq, &non_ascii_string_add_flat_result); - // Both strings are sequential ASCII strings. We also know that they are - // short (since the sum of the lengths is less than kMinNonFlatLength). - // r6: length of resulting flat string - __ AllocateAsciiString(r7, r6, r4, r5, r9, &string_add_runtime); - // Locate first character of result. - __ add(r6, r7, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); - // Locate first character of first argument. - __ add(r0, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); - // r0: first character of first string. - // r1: second string. + __ AllocateAsciiString(r0, r6, r4, r5, r9, &call_runtime); + __ add(r6, r0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); + // r0: result string. + // r7: first character of first string. + // r1: first character of second string. // r2: length of first string. // r3: length of second string. // r6: first character of result. - // r7: result string. - StringHelper::GenerateCopyCharacters(masm, r6, r0, r2, r4, true); - - // Load second argument and locate first character. - __ add(r1, r1, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); - // r1: first character of second string. - // r3: length of second string. + StringHelper::GenerateCopyCharacters(masm, r6, r7, r2, r4, true); // r6: next character of result. - // r7: result string. StringHelper::GenerateCopyCharacters(masm, r6, r1, r3, r4, true); - __ mov(r0, Operand(r7)); __ IncrementCounter(counters->string_add_native(), 1, r2, r3); __ add(sp, sp, Operand(2 * kPointerSize)); __ Ret(); __ bind(&non_ascii_string_add_flat_result); - // Both strings are sequential two byte strings. - // r0: first string. - // r1: second string. - // r2: length of first string. - // r3: length of second string. - // r6: sum of length of strings. - __ AllocateTwoByteString(r7, r6, r4, r5, r9, &string_add_runtime); - // r0: first string. - // r1: second string. - // r2: length of first string. - // r3: length of second string. - // r7: result string. - - // Locate first character of result. - __ add(r6, r7, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - // Locate first character of first argument. - __ add(r0, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - - // r0: first character of first string. - // r1: second string. + __ AllocateTwoByteString(r0, r6, r4, r5, r9, &call_runtime); + __ add(r6, r0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); + // r0: result string. + // r7: first character of first string. + // r1: first character of second string. // r2: length of first string. // r3: length of second string. // r6: first character of result. - // r7: result string. - StringHelper::GenerateCopyCharacters(masm, r6, r0, r2, r4, false); - - // Locate first character of second argument. - __ add(r1, r1, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - - // r1: first character of second string. - // r3: length of second string. - // r6: next character of result (after copy of first string). - // r7: result string. + StringHelper::GenerateCopyCharacters(masm, r6, r7, r2, r4, false); + // r6: next character of result. StringHelper::GenerateCopyCharacters(masm, r6, r1, r3, r4, false); - - __ mov(r0, Operand(r7)); __ IncrementCounter(counters->string_add_native(), 1, r2, r3); __ add(sp, sp, Operand(2 * kPointerSize)); __ Ret(); // Just jump to runtime to add the two strings. - __ bind(&string_add_runtime); + __ bind(&call_runtime); __ TailCallRuntime(Runtime::kStringAdd, 2, 1); if (call_builtin.is_linked()) { diff --git a/deps/v8/src/arm/ic-arm.cc b/deps/v8/src/arm/ic-arm.cc index abbac993df..219d354c7e 100644 --- a/deps/v8/src/arm/ic-arm.cc +++ b/deps/v8/src/arm/ic-arm.cc @@ -1469,11 +1469,10 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) { // -- lr : return address // ----------------------------------- // - // This accepts as a receiver anything JSObject::SetElementsLength accepts - // (currently anything except for external and pixel arrays which means - // anything with elements of FixedArray type.), but currently is restricted - // to JSArray. - // Value must be a number, but only smis are accepted as the most common case. + // This accepts as a receiver anything JSArray::SetElementsLength accepts + // (currently anything except for external arrays which means anything with + // elements of FixedArray type). Value must be a number, but only smis are + // accepted as the most common case. Label miss; @@ -1495,6 +1494,13 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) { __ CompareObjectType(scratch, scratch, scratch, FIXED_ARRAY_TYPE); __ b(ne, &miss); + // Check that the array has fast properties, otherwise the length + // property might have been redefined. + __ ldr(scratch, FieldMemOperand(receiver, JSArray::kPropertiesOffset)); + __ ldr(scratch, FieldMemOperand(scratch, FixedArray::kMapOffset)); + __ CompareRoot(scratch, Heap::kHashTableMapRootIndex); + __ b(eq, &miss); + // Check that value is a smi. __ JumpIfNotSmi(value, &miss); diff --git a/deps/v8/src/arm/lithium-codegen-arm.cc b/deps/v8/src/arm/lithium-codegen-arm.cc index 25532a2d95..cdc464c322 100644 --- a/deps/v8/src/arm/lithium-codegen-arm.cc +++ b/deps/v8/src/arm/lithium-codegen-arm.cc @@ -2306,7 +2306,11 @@ void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { if (instr->hydrogen()->RequiresHoleCheck()) { __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); __ cmp(result, ip); - DeoptimizeIf(eq, instr->environment()); + if (instr->hydrogen()->DeoptimizesOnHole()) { + DeoptimizeIf(eq, instr->environment()); + } else { + __ mov(result, Operand(factory()->undefined_value()), LeaveCC, eq); + } } } @@ -2314,14 +2318,22 @@ void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { Register context = ToRegister(instr->context()); Register value = ToRegister(instr->value()); + Register scratch = scratch0(); MemOperand target = ContextOperand(context, instr->slot_index()); + + Label skip_assignment; + if (instr->hydrogen()->RequiresHoleCheck()) { - Register scratch = scratch0(); __ ldr(scratch, target); __ LoadRoot(ip, Heap::kTheHoleValueRootIndex); __ cmp(scratch, ip); - DeoptimizeIf(eq, instr->environment()); + if (instr->hydrogen()->DeoptimizesOnHole()) { + DeoptimizeIf(eq, instr->environment()); + } else { + __ b(ne, &skip_assignment); + } } + __ str(value, target); if (instr->hydrogen()->NeedsWriteBarrier()) { HType type = instr->hydrogen()->value()->type(); @@ -2330,12 +2342,14 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { __ RecordWriteContextSlot(context, target.offset(), value, - scratch0(), + scratch, kLRHasBeenSaved, kSaveFPRegs, EMIT_REMEMBERED_SET, check_needed); } + + __ bind(&skip_assignment); } diff --git a/deps/v8/src/assembler.cc b/deps/v8/src/assembler.cc index 941f45c211..b66f448407 100644 --- a/deps/v8/src/assembler.cc +++ b/deps/v8/src/assembler.cc @@ -817,11 +817,6 @@ ExternalReference ExternalReference::compute_output_frames_function( } -ExternalReference ExternalReference::global_contexts_list(Isolate* isolate) { - return ExternalReference(isolate->heap()->global_contexts_list_address()); -} - - ExternalReference ExternalReference::keyed_lookup_cache_keys(Isolate* isolate) { return ExternalReference(isolate->keyed_lookup_cache()->keys_address()); } diff --git a/deps/v8/src/assembler.h b/deps/v8/src/assembler.h index 5c25768e6a..cec20fca07 100644 --- a/deps/v8/src/assembler.h +++ b/deps/v8/src/assembler.h @@ -590,7 +590,6 @@ class ExternalReference BASE_EMBEDDED { // Deoptimization support. static ExternalReference new_deoptimizer_function(Isolate* isolate); static ExternalReference compute_output_frames_function(Isolate* isolate); - static ExternalReference global_contexts_list(Isolate* isolate); // Static data in the keyed lookup cache. static ExternalReference keyed_lookup_cache_keys(Isolate* isolate); diff --git a/deps/v8/src/d8.cc b/deps/v8/src/d8.cc index 9eccc7e4fe..ad850f5ee7 100644 --- a/deps/v8/src/d8.cc +++ b/deps/v8/src/d8.cc @@ -296,14 +296,26 @@ Handle Shell::CreateExternalArray(const Arguments& args, ASSERT(kMaxLength == i::ExternalArray::kMaxLength); #endif // V8_SHARED size_t length = 0; + TryCatch try_catch; if (args[0]->IsUint32()) { length = args[0]->Uint32Value(); } else { Local number = args[0]->ToNumber(); - if (number.IsEmpty() || !number->IsNumber()) { - return ThrowException(String::New("Array length must be a number.")); + if (number.IsEmpty()) { + ASSERT(try_catch.HasCaught()); + return try_catch.Exception(); + } + ASSERT(number->IsNumber()); + Local int32 = number->ToInt32(); + if (int32.IsEmpty()) { + if (try_catch.HasCaught()) { + return try_catch.Exception(); + } + } + int32_t raw_length = int32->Int32Value(); + if (try_catch.HasCaught()) { + return try_catch.Exception(); } - int32_t raw_length = number->ToInt32()->Int32Value(); if (raw_length < 0) { return ThrowException(String::New("Array length must not be negative.")); } diff --git a/deps/v8/src/deoptimizer.cc b/deps/v8/src/deoptimizer.cc index 108e547f2b..aab69c3422 100644 --- a/deps/v8/src/deoptimizer.cc +++ b/deps/v8/src/deoptimizer.cc @@ -264,11 +264,16 @@ void Deoptimizer::VisitAllOptimizedFunctions( AssertNoAllocation no_allocation; // Run through the list of all global contexts and deoptimize. - Object* global = Isolate::Current()->heap()->global_contexts_list(); - while (!global->IsUndefined()) { - VisitAllOptimizedFunctionsForGlobalObject(Context::cast(global)->global(), - visitor); - global = Context::cast(global)->get(Context::NEXT_CONTEXT_LINK); + Object* context = Isolate::Current()->heap()->global_contexts_list(); + while (!context->IsUndefined()) { + // GC can happen when the context is not fully initialized, + // so the global field of the context can be undefined. + Object* global = Context::cast(context)->get(Context::GLOBAL_INDEX); + if (!global->IsUndefined()) { + VisitAllOptimizedFunctionsForGlobalObject(JSObject::cast(global), + visitor); + } + context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); } } diff --git a/deps/v8/src/flag-definitions.h b/deps/v8/src/flag-definitions.h index f145df7512..f7fe6566b5 100644 --- a/deps/v8/src/flag-definitions.h +++ b/deps/v8/src/flag-definitions.h @@ -41,6 +41,7 @@ extern ctype FLAG_##nam; #define FLAG_READONLY(ftype, ctype, nam, def, cmt) \ static ctype const FLAG_##nam = def; +#define DEFINE_implication(whenflag, thenflag) // We want to supply the actual storage and value for the flag variable in the // .cc file. We only do this for writable flags. @@ -48,6 +49,7 @@ #define FLAG_FULL(ftype, ctype, nam, def, cmt) \ ctype FLAG_##nam = def; #define FLAG_READONLY(ftype, ctype, nam, def, cmt) +#define DEFINE_implication(whenflag, thenflag) // We need to define all of our default values so that the Flag structure can // access them by pointer. These are just used internally inside of one .cc, @@ -56,7 +58,7 @@ #define FLAG_FULL(ftype, ctype, nam, def, cmt) \ static ctype const FLAGDEFAULT_##nam = def; #define FLAG_READONLY(ftype, ctype, nam, def, cmt) - +#define DEFINE_implication(whenflag, thenflag) // We want to write entries into our meta data table, for internal parsing and // printing / etc in the flag parser code. We only do this for writable flags. @@ -64,6 +66,14 @@ #define FLAG_FULL(ftype, ctype, nam, def, cmt) \ { Flag::TYPE_##ftype, #nam, &FLAG_##nam, &FLAGDEFAULT_##nam, cmt, false }, #define FLAG_READONLY(ftype, ctype, nam, def, cmt) +#define DEFINE_implication(whenflag, thenflag) + +// We produce the code to set flags when it is implied by another flag. +#elif defined(FLAG_MODE_DEFINE_IMPLICATIONS) +#define FLAG_FULL(ftype, ctype, nam, def, cmt) +#define FLAG_READONLY(ftype, ctype, nam, def, cmt) +#define DEFINE_implication(whenflag, thenflag) \ + if (FLAG_##whenflag) FLAG_##thenflag = true; #else #error No mode supplied when including flags.defs @@ -103,6 +113,10 @@ DEFINE_bool(harmony_proxies, false, "enable harmony proxies") DEFINE_bool(harmony_collections, false, "enable harmony collections (sets, maps, and weak maps)") DEFINE_bool(harmony, false, "enable all harmony features") +DEFINE_implication(harmony, harmony_typeof) +DEFINE_implication(harmony, harmony_scoping) +DEFINE_implication(harmony, harmony_proxies) +DEFINE_implication(harmony, harmony_collections) // Flags for experimental implementation features. DEFINE_bool(unbox_double_arrays, true, "automatically unbox arrays of doubles") @@ -542,6 +556,18 @@ DEFINE_bool(print_unopt_code, false, "print unoptimized code before " DEFINE_bool(print_code_verbose, false, "print more information for code") DEFINE_bool(print_builtin_code, false, "print generated code for builtins") +#ifdef ENABLE_DISASSEMBLER +DEFINE_bool(print_all_code, false, "enable all flags related to printing code") +DEFINE_implication(print_all_code, print_code) +DEFINE_implication(print_all_code, print_opt_code) +DEFINE_implication(print_all_code, print_unopt_code) +DEFINE_implication(print_all_code, print_code_verbose) +DEFINE_implication(print_all_code, print_builtin_code) +DEFINE_implication(print_all_code, print_code_stubs) +DEFINE_implication(print_all_code, trace_codegen) +DEFINE_implication(print_all_code, code_comments) +#endif + // Cleanup... #undef FLAG_FULL #undef FLAG_READONLY @@ -550,8 +576,10 @@ DEFINE_bool(print_builtin_code, false, "print generated code for builtins") #undef DEFINE_bool #undef DEFINE_int #undef DEFINE_string +#undef DEFINE_implication #undef FLAG_MODE_DECLARE #undef FLAG_MODE_DEFINE #undef FLAG_MODE_DEFINE_DEFAULTS #undef FLAG_MODE_META +#undef FLAG_MODE_DEFINE_IMPLICATIONS diff --git a/deps/v8/src/flags.cc b/deps/v8/src/flags.cc index ab5b57cedc..75e66ce34d 100644 --- a/deps/v8/src/flags.cc +++ b/deps/v8/src/flags.cc @@ -548,4 +548,9 @@ JSArguments& JSArguments::operator=(JSArguments args) { } +void FlagList::EnforceFlagImplications() { +#define FLAG_MODE_DEFINE_IMPLICATIONS +#include "flag-definitions.h" +} + } } // namespace v8::internal diff --git a/deps/v8/src/flags.h b/deps/v8/src/flags.h index f9cbde0bf7..f0b239b6f2 100644 --- a/deps/v8/src/flags.h +++ b/deps/v8/src/flags.h @@ -72,6 +72,9 @@ class FlagList { // Print help to stdout with flags, types, and default values. static void PrintHelp(); + + // Set flags as consequence of being implied by another flag. + static void EnforceFlagImplications(); }; } } // namespace v8::internal diff --git a/deps/v8/src/heap.cc b/deps/v8/src/heap.cc index bc7550ed9a..a3be8ba7c4 100644 --- a/deps/v8/src/heap.cc +++ b/deps/v8/src/heap.cc @@ -642,13 +642,17 @@ void Heap::ClearJSFunctionResultCaches() { Object* context = global_contexts_list_; while (!context->IsUndefined()) { - // Get the caches for this context: - FixedArray* caches = - Context::cast(context)->jsfunction_result_caches(); - // Clear the caches: - int length = caches->length(); - for (int i = 0; i < length; i++) { - JSFunctionResultCache::cast(caches->get(i))->Clear(); + // Get the caches for this context. GC can happen when the context + // is not fully initialized, so the caches can be undefined. + Object* caches_or_undefined = + Context::cast(context)->get(Context::JSFUNCTION_RESULT_CACHES_INDEX); + if (!caches_or_undefined->IsUndefined()) { + FixedArray* caches = FixedArray::cast(caches_or_undefined); + // Clear the caches: + int length = caches->length(); + for (int i = 0; i < length; i++) { + JSFunctionResultCache::cast(caches->get(i))->Clear(); + } } // Get the next context: context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); @@ -665,7 +669,13 @@ void Heap::ClearNormalizedMapCaches() { Object* context = global_contexts_list_; while (!context->IsUndefined()) { - Context::cast(context)->normalized_map_cache()->Clear(); + // GC can happen when the context is not fully initialized, + // so the cache can be undefined. + Object* cache = + Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX); + if (!cache->IsUndefined()) { + NormalizedMapCache::cast(cache)->Clear(); + } context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); } } diff --git a/deps/v8/src/hydrogen-instructions.cc b/deps/v8/src/hydrogen-instructions.cc index 31d7d153f9..1ff7f16fdf 100644 --- a/deps/v8/src/hydrogen-instructions.cc +++ b/deps/v8/src/hydrogen-instructions.cc @@ -1227,7 +1227,9 @@ void HConstant::PrintDataTo(StringStream* stream) { bool HArrayLiteral::IsCopyOnWrite() const { - return boilerplate_object_->elements()->map() == HEAP->fixed_cow_array_map(); + if (!boilerplate_object_->IsJSObject()) return false; + return Handle::cast(boilerplate_object_)->elements()->map() == + HEAP->fixed_cow_array_map(); } diff --git a/deps/v8/src/hydrogen-instructions.h b/deps/v8/src/hydrogen-instructions.h index d7c0eb059e..fe21c6441d 100644 --- a/deps/v8/src/hydrogen-instructions.h +++ b/deps/v8/src/hydrogen-instructions.h @@ -3449,28 +3449,45 @@ class HLoadContextSlot: public HUnaryOperation { public: enum Mode { // Perform a normal load of the context slot without checking its value. - kLoad, + kNoCheck, // Load and check the value of the context slot. Deoptimize if it's the // hole value. This is used for checking for loading of uninitialized // harmony bindings where we deoptimize into full-codegen generated code // which will subsequently throw a reference error. - kLoadCheck + kCheckDeoptimize, + // Load and check the value of the context slot. Return undefined if it's + // the hole value. This is used for non-harmony const assignments + kCheckReturnUndefined }; HLoadContextSlot(HValue* context, Variable* var) : HUnaryOperation(context), slot_index_(var->index()) { ASSERT(var->IsContextSlot()); - mode_ = (var->mode() == LET || var->mode() == CONST_HARMONY) - ? kLoadCheck : kLoad; + switch (var->mode()) { + case LET: + case CONST_HARMONY: + mode_ = kCheckDeoptimize; + break; + case CONST: + mode_ = kCheckReturnUndefined; + break; + default: + mode_ = kNoCheck; + } set_representation(Representation::Tagged()); SetFlag(kUseGVN); SetFlag(kDependsOnContextSlots); } int slot_index() const { return slot_index_; } + Mode mode() const { return mode_; } + + bool DeoptimizesOnHole() { + return mode_ == kCheckDeoptimize; + } bool RequiresHoleCheck() { - return mode_ == kLoadCheck; + return mode_ != kNoCheck; } virtual Representation RequiredInputRepresentation(int index) { @@ -3498,12 +3515,14 @@ class HStoreContextSlot: public HTemplateInstruction<2> { enum Mode { // Perform a normal store to the context slot without checking its previous // value. - kAssign, + kNoCheck, // Check the previous value of the context slot and deoptimize if it's the // hole value. This is used for checking for assignments to uninitialized // harmony bindings where we deoptimize into full-codegen generated code // which will subsequently throw a reference error. - kAssignCheck + kCheckDeoptimize, + // Check the previous value and ignore assignment if it isn't a hole value + kCheckIgnoreAssignment }; HStoreContextSlot(HValue* context, int slot_index, Mode mode, HValue* value) @@ -3522,8 +3541,12 @@ class HStoreContextSlot: public HTemplateInstruction<2> { return StoringValueNeedsWriteBarrier(value()); } + bool DeoptimizesOnHole() { + return mode_ == kCheckDeoptimize; + } + bool RequiresHoleCheck() { - return mode_ == kAssignCheck; + return mode_ != kNoCheck; } virtual Representation RequiredInputRepresentation(int index) { @@ -4202,7 +4225,7 @@ class HMaterializedLiteral: public HTemplateInstruction { class HArrayLiteral: public HMaterializedLiteral<1> { public: HArrayLiteral(HValue* context, - Handle boilerplate_object, + Handle boilerplate_object, int length, int literal_index, int depth) @@ -4214,9 +4237,12 @@ class HArrayLiteral: public HMaterializedLiteral<1> { HValue* context() { return OperandAt(0); } ElementsKind boilerplate_elements_kind() const { - return boilerplate_object_->GetElementsKind(); + if (!boilerplate_object_->IsJSObject()) { + return FAST_ELEMENTS; + } + return Handle::cast(boilerplate_object_)->GetElementsKind(); } - Handle boilerplate_object() const { return boilerplate_object_; } + Handle boilerplate_object() const { return boilerplate_object_; } int length() const { return length_; } bool IsCopyOnWrite() const; @@ -4230,7 +4256,7 @@ class HArrayLiteral: public HMaterializedLiteral<1> { private: int length_; - Handle boilerplate_object_; + Handle boilerplate_object_; }; diff --git a/deps/v8/src/hydrogen.cc b/deps/v8/src/hydrogen.cc index 36632374e1..617590af50 100644 --- a/deps/v8/src/hydrogen.cc +++ b/deps/v8/src/hydrogen.cc @@ -3285,9 +3285,6 @@ void HGraphBuilder::VisitVariableProxy(VariableProxy* expr) { } case Variable::CONTEXT: { - if (variable->mode() == CONST) { - return Bailout("reference to const context slot"); - } HValue* context = BuildContextChainWalk(variable); HLoadContextSlot* instr = new(zone()) HLoadContextSlot(context, variable); return ast_context()->ReturnInstruction(instr, expr->id()); @@ -3467,14 +3464,22 @@ void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) { Handle literals(environment()->closure()->literals()); Handle raw_boilerplate(literals->get(expr->literal_index())); - // For now, no boilerplate causes a deopt. if (raw_boilerplate->IsUndefined()) { - AddInstruction(new(zone()) HSoftDeoptimize); - return ast_context()->ReturnValue(graph()->GetConstantUndefined()); + raw_boilerplate = Runtime::CreateArrayLiteralBoilerplate( + isolate(), literals, expr->constant_elements()); + if (raw_boilerplate.is_null()) { + return Bailout("array boilerplate creation failed"); + } + literals->set(expr->literal_index(), *raw_boilerplate); + if (JSObject::cast(*raw_boilerplate)->elements()->map() == + isolate()->heap()->fixed_cow_array_map()) { + isolate()->counters()->cow_arrays_created_runtime()->Increment(); + } } - Handle boilerplate(Handle::cast(raw_boilerplate)); - ElementsKind boilerplate_elements_kind = boilerplate->GetElementsKind(); + Handle boilerplate = Handle::cast(raw_boilerplate); + ElementsKind boilerplate_elements_kind = + Handle::cast(boilerplate)->GetElementsKind(); HArrayLiteral* literal = new(zone()) HArrayLiteral( context, @@ -3805,8 +3810,8 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) { if (proxy != NULL) { Variable* var = proxy->var(); - if (var->mode() == CONST || var->mode() == LET) { - return Bailout("unsupported let or const compound assignment"); + if (var->mode() == LET) { + return Bailout("unsupported let compound assignment"); } CHECK_ALIVE(VisitForValue(operation)); @@ -3821,6 +3826,9 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) { case Variable::PARAMETER: case Variable::LOCAL: + if (var->mode() == CONST) { + return Bailout("unsupported const compound assignment"); + } Bind(var, Top()); break; @@ -3841,10 +3849,23 @@ void HGraphBuilder::HandleCompoundAssignment(Assignment* expr) { } } + HStoreContextSlot::Mode mode; + + switch (var->mode()) { + case LET: + mode = HStoreContextSlot::kCheckDeoptimize; + break; + case CONST: + return ast_context()->ReturnValue(Pop()); + case CONST_HARMONY: + // This case is checked statically so no need to + // perform checks here + UNREACHABLE(); + default: + mode = HStoreContextSlot::kNoCheck; + } + HValue* context = BuildContextChainWalk(var); - HStoreContextSlot::Mode mode = - (var->mode() == LET || var->mode() == CONST_HARMONY) - ? HStoreContextSlot::kAssignCheck : HStoreContextSlot::kAssign; HStoreContextSlot* instr = new(zone()) HStoreContextSlot(context, var->index(), mode, Top()); AddInstruction(instr); @@ -3955,17 +3976,19 @@ void HGraphBuilder::VisitAssignment(Assignment* expr) { HandlePropertyAssignment(expr); } else if (proxy != NULL) { Variable* var = proxy->var(); + if (var->mode() == CONST) { if (expr->op() != Token::INIT_CONST) { - return Bailout("non-initializer assignment to const"); + CHECK_ALIVE(VisitForValue(expr->value())); + return ast_context()->ReturnValue(Pop()); } - if (!var->IsStackAllocated()) { - return Bailout("assignment to const context slot"); + + if (var->IsStackAllocated()) { + // We insert a use of the old value to detect unsupported uses of const + // variables (e.g. initialization inside a loop). + HValue* old_value = environment()->Lookup(var); + AddInstruction(new HUseConst(old_value)); } - // We insert a use of the old value to detect unsupported uses of const - // variables (e.g. initialization inside a loop). - HValue* old_value = environment()->Lookup(var); - AddInstruction(new HUseConst(old_value)); } else if (var->mode() == CONST_HARMONY) { if (expr->op() != Token::INIT_CONST_HARMONY) { return Bailout("non-initializer assignment to const"); @@ -4004,7 +4027,6 @@ void HGraphBuilder::VisitAssignment(Assignment* expr) { } case Variable::CONTEXT: { - ASSERT(var->mode() != CONST); // Bail out if we try to mutate a parameter value in a function using // the arguments object. We do not (yet) correctly handle the // arguments property of the function. @@ -4020,17 +4042,32 @@ void HGraphBuilder::VisitAssignment(Assignment* expr) { } CHECK_ALIVE(VisitForValue(expr->value())); - HValue* context = BuildContextChainWalk(var); HStoreContextSlot::Mode mode; if (expr->op() == Token::ASSIGN) { - mode = (var->mode() == LET || var->mode() == CONST_HARMONY) - ? HStoreContextSlot::kAssignCheck : HStoreContextSlot::kAssign; + switch (var->mode()) { + case LET: + mode = HStoreContextSlot::kCheckDeoptimize; + break; + case CONST: + return ast_context()->ReturnValue(Pop()); + case CONST_HARMONY: + // This case is checked statically so no need to + // perform checks here + UNREACHABLE(); + default: + mode = HStoreContextSlot::kNoCheck; + } + } else if (expr->op() == Token::INIT_VAR || + expr->op() == Token::INIT_LET || + expr->op() == Token::INIT_CONST_HARMONY) { + mode = HStoreContextSlot::kNoCheck; } else { - ASSERT(expr->op() == Token::INIT_VAR || - expr->op() == Token::INIT_LET || - expr->op() == Token::INIT_CONST_HARMONY); - mode = HStoreContextSlot::kAssign; + ASSERT(expr->op() == Token::INIT_CONST); + + mode = HStoreContextSlot::kCheckIgnoreAssignment; } + + HValue* context = BuildContextChainWalk(var); HStoreContextSlot* instr = new(zone()) HStoreContextSlot( context, var->index(), mode, Top()); AddInstruction(instr); @@ -5643,7 +5680,7 @@ void HGraphBuilder::VisitCountOperation(CountOperation* expr) { HValue* context = BuildContextChainWalk(var); HStoreContextSlot::Mode mode = (var->mode() == LET || var->mode() == CONST_HARMONY) - ? HStoreContextSlot::kAssignCheck : HStoreContextSlot::kAssign; + ? HStoreContextSlot::kCheckDeoptimize : HStoreContextSlot::kNoCheck; HStoreContextSlot* instr = new(zone()) HStoreContextSlot(context, var->index(), mode, after); AddInstruction(instr); @@ -6251,7 +6288,7 @@ void HGraphBuilder::HandleDeclaration(VariableProxy* proxy, if (var->IsContextSlot()) { HValue* context = environment()->LookupContext(); HStoreContextSlot* store = new HStoreContextSlot( - context, var->index(), HStoreContextSlot::kAssign, value); + context, var->index(), HStoreContextSlot::kNoCheck, value); AddInstruction(store); if (store->HasObservableSideEffects()) AddSimulate(proxy->id()); } else { diff --git a/deps/v8/src/ia32/code-stubs-ia32.cc b/deps/v8/src/ia32/code-stubs-ia32.cc index eabf201d78..bb422959e7 100644 --- a/deps/v8/src/ia32/code-stubs-ia32.cc +++ b/deps/v8/src/ia32/code-stubs-ia32.cc @@ -5474,7 +5474,7 @@ void StringCharAtGenerator::GenerateSlow( void StringAddStub::Generate(MacroAssembler* masm) { - Label string_add_runtime, call_builtin; + Label call_runtime, call_builtin; Builtins::JavaScript builtin_id = Builtins::ADD; // Load the two arguments. @@ -5483,14 +5483,14 @@ void StringAddStub::Generate(MacroAssembler* masm) { // Make sure that both arguments are strings if not known in advance. if (flags_ == NO_STRING_ADD_FLAGS) { - __ JumpIfSmi(eax, &string_add_runtime); + __ JumpIfSmi(eax, &call_runtime); __ CmpObjectType(eax, FIRST_NONSTRING_TYPE, ebx); - __ j(above_equal, &string_add_runtime); + __ j(above_equal, &call_runtime); // First argument is a a string, test second. - __ JumpIfSmi(edx, &string_add_runtime); + __ JumpIfSmi(edx, &call_runtime); __ CmpObjectType(edx, FIRST_NONSTRING_TYPE, ebx); - __ j(above_equal, &string_add_runtime); + __ j(above_equal, &call_runtime); } else { // Here at least one of the arguments is definitely a string. // We convert the one that is not known to be a string. @@ -5541,15 +5541,14 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ add(ebx, ecx); STATIC_ASSERT(Smi::kMaxValue == String::kMaxLength); // Handle exceptionally long strings in the runtime system. - __ j(overflow, &string_add_runtime); + __ j(overflow, &call_runtime); // Use the symbol table when adding two one character strings, as it // helps later optimizations to return a symbol here. __ cmp(ebx, Immediate(Smi::FromInt(2))); __ j(not_equal, &longer_than_two); // Check that both strings are non-external ascii strings. - __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx, - &string_add_runtime); + __ JumpIfNotBothSequentialAsciiStrings(eax, edx, ebx, ecx, &call_runtime); // Get the two characters forming the new string. __ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize)); @@ -5574,11 +5573,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize)); __ bind(&make_two_character_string_no_reload); __ IncrementCounter(counters->string_add_make_two_char(), 1); - __ AllocateAsciiString(eax, // Result. - 2, // Length. - edi, // Scratch 1. - edx, // Scratch 2. - &string_add_runtime); + __ AllocateAsciiString(eax, 2, edi, edx, &call_runtime); // Pack both characters in ebx. __ shl(ecx, kBitsPerByte); __ or_(ebx, ecx); @@ -5606,7 +5601,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ j(zero, &non_ascii); __ bind(&ascii_data); // Allocate an acsii cons string. - __ AllocateAsciiConsString(ecx, edi, no_reg, &string_add_runtime); + __ AllocateAsciiConsString(ecx, edi, no_reg, &call_runtime); __ bind(&allocated); // Fill the fields of the cons string. if (FLAG_debug_code) __ AbortIfNotSmi(ebx); @@ -5633,64 +5628,93 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ cmp(edi, kAsciiStringTag | kAsciiDataHintTag); __ j(equal, &ascii_data); // Allocate a two byte cons string. - __ AllocateTwoByteConsString(ecx, edi, no_reg, &string_add_runtime); + __ AllocateTwoByteConsString(ecx, edi, no_reg, &call_runtime); __ jmp(&allocated); - // Handle creating a flat result. First check that both strings are not - // external strings. + // We cannot encounter sliced strings or cons strings here since: + STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength); + // Handle creating a flat result from either external or sequential strings. + // Locate the first characters' locations. // eax: first string // ebx: length of resulting flat string as a smi // edx: second string + Label first_prepared, second_prepared; + Label first_is_sequential, second_is_sequential; __ bind(&string_add_flat_result); __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); - __ and_(ecx, kStringRepresentationMask); - __ cmp(ecx, kExternalStringTag); - __ j(equal, &string_add_runtime); - __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); - __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset)); - __ and_(ecx, kStringRepresentationMask); - __ cmp(ecx, kExternalStringTag); - __ j(equal, &string_add_runtime); - // We cannot encounter sliced strings here since: - STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength); - // Now check if both strings are ascii strings. - // eax: first string - // ebx: length of resulting flat string as a smi - // edx: second string - Label non_ascii_string_add_flat_result; - STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0); - STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); - __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset)); - __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask); + // ecx: instance type of first string + STATIC_ASSERT(kSeqStringTag == 0); + __ test_b(ecx, kStringRepresentationMask); + __ j(zero, &first_is_sequential, Label::kNear); + // Rule out short external string and load string resource. + STATIC_ASSERT(kShortExternalStringTag != 0); + __ test_b(ecx, kShortExternalStringMask); + __ j(not_zero, &call_runtime); + __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset)); + STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize); + __ jmp(&first_prepared, Label::kNear); + __ bind(&first_is_sequential); + __ add(eax, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); + __ bind(&first_prepared); + + __ mov(edi, FieldOperand(edx, HeapObject::kMapOffset)); + __ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset)); + // Check whether both strings have same encoding. + // edi: instance type of second string + __ xor_(ecx, edi); + __ test_b(ecx, kStringEncodingMask); + __ j(not_zero, &call_runtime); + STATIC_ASSERT(kSeqStringTag == 0); + __ test_b(edi, kStringRepresentationMask); + __ j(zero, &second_is_sequential, Label::kNear); + // Rule out short external string and load string resource. + STATIC_ASSERT(kShortExternalStringTag != 0); + __ test_b(edi, kShortExternalStringMask); + __ j(not_zero, &call_runtime); + __ mov(edx, FieldOperand(edx, ExternalString::kResourceDataOffset)); + STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize); + __ jmp(&second_prepared, Label::kNear); + __ bind(&second_is_sequential); + __ add(edx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); + __ bind(&second_prepared); + + // Push the addresses of both strings' first characters onto the stack. + __ push(edx); + __ push(eax); + + Label non_ascii_string_add_flat_result, call_runtime_drop_two; + // edi: instance type of second string + // First string and second string have the same encoding. + STATIC_ASSERT(kTwoByteStringTag == 0); + __ test_b(edi, kStringEncodingMask); __ j(zero, &non_ascii_string_add_flat_result); - __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); - __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask); - __ j(zero, &string_add_runtime); - // Both strings are ascii strings. As they are short they are both flat. + // Both strings are ascii strings. // ebx: length of resulting flat string as a smi __ SmiUntag(ebx); - __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &string_add_runtime); + __ AllocateAsciiString(eax, ebx, ecx, edx, edi, &call_runtime_drop_two); // eax: result string __ mov(ecx, eax); // Locate first character of result. __ add(ecx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); - // Load first argument and locate first character. - __ mov(edx, Operand(esp, 2 * kPointerSize)); + // Load first argument's length and first character location. Account for + // values currently on the stack when fetching arguments from it. + __ mov(edx, Operand(esp, 4 * kPointerSize)); __ mov(edi, FieldOperand(edx, String::kLengthOffset)); __ SmiUntag(edi); - __ add(edx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); + __ pop(edx); // eax: result string // ecx: first character of result // edx: first char of first argument // edi: length of first argument StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true); - // Load second argument and locate first character. - __ mov(edx, Operand(esp, 1 * kPointerSize)); + // Load second argument's length and first character location. Account for + // values currently on the stack when fetching arguments from it. + __ mov(edx, Operand(esp, 2 * kPointerSize)); __ mov(edi, FieldOperand(edx, String::kLengthOffset)); __ SmiUntag(edi); - __ add(edx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); + __ pop(edx); // eax: result string // ecx: next character of result // edx: first char of second argument @@ -5704,34 +5728,30 @@ void StringAddStub::Generate(MacroAssembler* masm) { // ebx: length of resulting flat string as a smi // edx: second string __ bind(&non_ascii_string_add_flat_result); - __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset)); - __ test_b(FieldOperand(ecx, Map::kInstanceTypeOffset), kStringEncodingMask); - __ j(not_zero, &string_add_runtime); - // Both strings are two byte strings. As they are short they are both - // flat. + // Both strings are two byte strings. __ SmiUntag(ebx); - __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &string_add_runtime); + __ AllocateTwoByteString(eax, ebx, ecx, edx, edi, &call_runtime_drop_two); // eax: result string __ mov(ecx, eax); // Locate first character of result. - __ add(ecx, - Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - // Load first argument and locate first character. - __ mov(edx, Operand(esp, 2 * kPointerSize)); + __ add(ecx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); + // Load second argument's length and first character location. Account for + // values currently on the stack when fetching arguments from it. + __ mov(edx, Operand(esp, 4 * kPointerSize)); __ mov(edi, FieldOperand(edx, String::kLengthOffset)); __ SmiUntag(edi); - __ add(edx, - Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); + __ pop(edx); // eax: result string // ecx: first character of result // edx: first char of first argument // edi: length of first argument StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false); - // Load second argument and locate first character. - __ mov(edx, Operand(esp, 1 * kPointerSize)); + // Load second argument's length and first character location. Account for + // values currently on the stack when fetching arguments from it. + __ mov(edx, Operand(esp, 2 * kPointerSize)); __ mov(edi, FieldOperand(edx, String::kLengthOffset)); __ SmiUntag(edi); - __ add(edx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); + __ pop(edx); // eax: result string // ecx: next character of result // edx: first char of second argument @@ -5740,8 +5760,11 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ IncrementCounter(counters->string_add_native(), 1); __ ret(2 * kPointerSize); + // Recover stack pointer before jumping to runtime. + __ bind(&call_runtime_drop_two); + __ Drop(2); // Just jump to runtime to add the two strings. - __ bind(&string_add_runtime); + __ bind(&call_runtime); __ TailCallRuntime(Runtime::kStringAdd, 2, 1); if (call_builtin.is_linked()) { @@ -6120,20 +6143,20 @@ void SubStringStub::Generate(MacroAssembler* masm) { FieldOperand(eax, edx, times_1, SeqAsciiString::kHeaderSize + 1)); // Try to lookup two character string in symbol table. - Label make_two_character_string; + Label combine_two_char, save_two_char; StringHelper::GenerateTwoCharacterSymbolTableProbe( - masm, ebx, ecx, eax, edx, edi, - &make_two_character_string, &make_two_character_string); + masm, ebx, ecx, eax, edx, edi, &combine_two_char, &save_two_char); __ IncrementCounter(counters->sub_string_native(), 1); __ ret(3 * kPointerSize); - __ bind(&make_two_character_string); - // Setup registers for allocating the two character string. - __ mov(eax, Operand(esp, 3 * kPointerSize)); - __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset)); - __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset)); - __ Set(ecx, Immediate(Smi::FromInt(2))); - __ mov(edx, Operand(esp, 2 * kPointerSize)); // Load index. + __ bind(&combine_two_char); + __ shl(ecx, kBitsPerByte); + __ or_(ebx, ecx); + __ bind(&save_two_char); + __ AllocateAsciiString(eax, 2, ecx, edx, &runtime); + __ mov_w(FieldOperand(eax, SeqAsciiString::kHeaderSize), ebx); + __ IncrementCounter(counters->sub_string_native(), 1); + __ ret(3 * kPointerSize); __ bind(&result_longer_than_two); // eax: string @@ -6181,7 +6204,7 @@ void SubStringStub::Generate(MacroAssembler* masm) { if (FLAG_string_slices) { Label copy_routine; // edi: underlying subject string - // ebx: instance type of original subject string + // ebx: instance type of underlying subject string // edx: adjusted start index (smi) // ecx: length (smi) __ cmp(ecx, Immediate(Smi::FromInt(SlicedString::kMinLength))); @@ -6214,7 +6237,7 @@ void SubStringStub::Generate(MacroAssembler* masm) { } // edi: underlying subject string - // ebx: instance type of original subject string + // ebx: instance type of underlying subject string // edx: adjusted start index (smi) // ecx: length (smi) // The subject string can only be external or sequential string of either @@ -6226,7 +6249,6 @@ void SubStringStub::Generate(MacroAssembler* masm) { __ j(zero, &sequential_string); // Handle external string. - Label ascii_external, done; // Rule out short external strings. STATIC_CHECK(kShortExternalStringTag != 0); __ test_b(ebx, kShortExternalStringMask); diff --git a/deps/v8/src/ia32/ic-ia32.cc b/deps/v8/src/ia32/ic-ia32.cc index a83db129a1..82bb02a318 100644 --- a/deps/v8/src/ia32/ic-ia32.cc +++ b/deps/v8/src/ia32/ic-ia32.cc @@ -1374,10 +1374,10 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) { // -- esp[0] : return address // ----------------------------------- // - // This accepts as a receiver anything JSObject::SetElementsLength accepts + // This accepts as a receiver anything JSArray::SetElementsLength accepts // (currently anything except for external arrays which means anything with - // elements of FixedArray type.), but currently is restricted to JSArray. - // Value must be a number, but only smis are accepted as the most common case. + // elements of FixedArray type). Value must be a number, but only smis are + // accepted as the most common case. Label miss; @@ -1399,6 +1399,13 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) { __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch); __ j(not_equal, &miss); + // Check that the array has fast properties, otherwise the length + // property might have been redefined. + __ mov(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset)); + __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset), + Heap::kHashTableMapRootIndex); + __ j(equal, &miss); + // Check that value is a smi. __ JumpIfNotSmi(value, &miss); diff --git a/deps/v8/src/ia32/lithium-codegen-ia32.cc b/deps/v8/src/ia32/lithium-codegen-ia32.cc index 23db874067..7883481041 100644 --- a/deps/v8/src/ia32/lithium-codegen-ia32.cc +++ b/deps/v8/src/ia32/lithium-codegen-ia32.cc @@ -2165,9 +2165,17 @@ void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { Register context = ToRegister(instr->context()); Register result = ToRegister(instr->result()); __ mov(result, ContextOperand(context, instr->slot_index())); + if (instr->hydrogen()->RequiresHoleCheck()) { __ cmp(result, factory()->the_hole_value()); - DeoptimizeIf(equal, instr->environment()); + if (instr->hydrogen()->DeoptimizesOnHole()) { + DeoptimizeIf(equal, instr->environment()); + } else { + Label is_not_hole; + __ j(not_equal, &is_not_hole, Label::kNear); + __ mov(result, factory()->undefined_value()); + __ bind(&is_not_hole); + } } } @@ -2175,11 +2183,19 @@ void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { Register context = ToRegister(instr->context()); Register value = ToRegister(instr->value()); + + Label skip_assignment; + Operand target = ContextOperand(context, instr->slot_index()); if (instr->hydrogen()->RequiresHoleCheck()) { __ cmp(target, factory()->the_hole_value()); - DeoptimizeIf(equal, instr->environment()); + if (instr->hydrogen()->DeoptimizesOnHole()) { + DeoptimizeIf(equal, instr->environment()); + } else { + __ j(not_equal, &skip_assignment, Label::kNear); + } } + __ mov(target, value); if (instr->hydrogen()->NeedsWriteBarrier()) { HType type = instr->hydrogen()->value()->type(); @@ -2195,6 +2211,8 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { EMIT_REMEMBERED_SET, check_needed); } + + __ bind(&skip_assignment); } diff --git a/deps/v8/src/ia32/macro-assembler-ia32.cc b/deps/v8/src/ia32/macro-assembler-ia32.cc index 2e4cfa4f05..3356e81892 100644 --- a/deps/v8/src/ia32/macro-assembler-ia32.cc +++ b/deps/v8/src/ia32/macro-assembler-ia32.cc @@ -357,6 +357,14 @@ void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) { } +void MacroAssembler::CompareRoot(const Operand& with, + Heap::RootListIndex index) { + // see ROOT_ACCESSOR macro in factory.h + Handle value(&isolate()->heap()->roots_array_start()[index]); + cmp(with, value); +} + + void MacroAssembler::CmpObjectType(Register heap_object, InstanceType type, Register map) { diff --git a/deps/v8/src/ia32/macro-assembler-ia32.h b/deps/v8/src/ia32/macro-assembler-ia32.h index 46f99be200..47214ea03c 100644 --- a/deps/v8/src/ia32/macro-assembler-ia32.h +++ b/deps/v8/src/ia32/macro-assembler-ia32.h @@ -308,8 +308,9 @@ class MacroAssembler: public Assembler { void SafeSet(Register dst, const Immediate& x); void SafePush(const Immediate& x); - // Compare a register against a known root, e.g. undefined, null, true, ... + // Compare against a known root, e.g. undefined, null, true, ... void CompareRoot(Register with, Heap::RootListIndex index); + void CompareRoot(const Operand& with, Heap::RootListIndex index); // Compare object type for heap object. // Incoming register is heap_object and outgoing register is map. diff --git a/deps/v8/src/ic.cc b/deps/v8/src/ic.cc index ad447cca58..624ecd7f58 100644 --- a/deps/v8/src/ic.cc +++ b/deps/v8/src/ic.cc @@ -1272,10 +1272,13 @@ MaybeObject* StoreIC::Store(State state, return *value; } - // Use specialized code for setting the length of arrays. - if (receiver->IsJSArray() - && name->Equals(isolate()->heap()->length_symbol()) - && Handle::cast(receiver)->AllowsSetElementsLength()) { + // Use specialized code for setting the length of arrays with fast + // properties. Slow properties might indicate redefinition of the + // length property. + if (receiver->IsJSArray() && + name->Equals(isolate()->heap()->length_symbol()) && + Handle::cast(receiver)->AllowsSetElementsLength() && + receiver->HasFastProperties()) { #ifdef DEBUG if (FLAG_trace_ic) PrintF("[StoreIC : +#length /array]\n"); #endif @@ -1879,12 +1882,19 @@ RUNTIME_FUNCTION(MaybeObject*, StoreIC_ArrayLength) { NoHandleAllocation nha; ASSERT(args.length() == 2); - JSObject* receiver = JSObject::cast(args[0]); + JSArray* receiver = JSArray::cast(args[0]); Object* len = args[1]; // The generated code should filter out non-Smis before we get here. ASSERT(len->IsSmi()); +#ifdef DEBUG + // The length property has to be a writable callback property. + LookupResult debug_lookup(isolate); + receiver->LocalLookup(isolate->heap()->length_symbol(), &debug_lookup); + ASSERT(debug_lookup.type() == CALLBACKS && !debug_lookup.IsReadOnly()); +#endif + Object* result; { MaybeObject* maybe_result = receiver->SetElementsLength(len); if (!maybe_result->ToObject(&result)) return maybe_result; diff --git a/deps/v8/src/incremental-marking.cc b/deps/v8/src/incremental-marking.cc index dd54c63073..8fca305057 100644 --- a/deps/v8/src/incremental-marking.cc +++ b/deps/v8/src/incremental-marking.cc @@ -677,11 +677,16 @@ void IncrementalMarking::Hurry() { Object* context = heap_->global_contexts_list(); while (!context->IsUndefined()) { - NormalizedMapCache* cache = Context::cast(context)->normalized_map_cache(); - MarkBit mark_bit = Marking::MarkBitFrom(cache); - if (Marking::IsGrey(mark_bit)) { - Marking::GreyToBlack(mark_bit); - MemoryChunk::IncrementLiveBytes(cache->address(), cache->Size()); + // GC can happen when the context is not fully initialized, + // so the cache can be undefined. + HeapObject* cache = HeapObject::cast( + Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX)); + if (!cache->IsUndefined()) { + MarkBit mark_bit = Marking::MarkBitFrom(cache); + if (Marking::IsGrey(mark_bit)) { + Marking::GreyToBlack(mark_bit); + MemoryChunk::IncrementLiveBytes(cache->address(), cache->Size()); + } } context = Context::cast(context)->get(Context::NEXT_CONTEXT_LINK); } diff --git a/deps/v8/src/mark-compact.cc b/deps/v8/src/mark-compact.cc index a864c34674..1fb5d5f438 100644 --- a/deps/v8/src/mark-compact.cc +++ b/deps/v8/src/mark-compact.cc @@ -3623,14 +3623,6 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { continue; } - if (lazy_sweeping_active) { - if (FLAG_gc_verbose) { - PrintF("Sweeping 0x%" V8PRIxPTR " lazily postponed.\n", - reinterpret_cast(p)); - } - continue; - } - // One unused page is kept, all further are released before sweeping them. if (p->LiveBytes() == 0) { if (unused_page_present) { @@ -3644,6 +3636,14 @@ void MarkCompactCollector::SweepSpace(PagedSpace* space, SweeperType sweeper) { unused_page_present = true; } + if (lazy_sweeping_active) { + if (FLAG_gc_verbose) { + PrintF("Sweeping 0x%" V8PRIxPTR " lazily postponed.\n", + reinterpret_cast(p)); + } + continue; + } + switch (sweeper) { case CONSERVATIVE: { if (FLAG_gc_verbose) { diff --git a/deps/v8/src/mips/code-stubs-mips.cc b/deps/v8/src/mips/code-stubs-mips.cc index 47f24a0ddf..b8e97f6dfe 100644 --- a/deps/v8/src/mips/code-stubs-mips.cc +++ b/deps/v8/src/mips/code-stubs-mips.cc @@ -5972,7 +5972,7 @@ void StringHelper::GenerateHashGetHash(MacroAssembler* masm, void SubStringStub::Generate(MacroAssembler* masm) { - Label sub_string_runtime; + Label runtime; // Stack frame on entry. // ra: return address // sp[0]: to @@ -5990,53 +5990,35 @@ void SubStringStub::Generate(MacroAssembler* masm) { static const int kFromOffset = 1 * kPointerSize; static const int kStringOffset = 2 * kPointerSize; - Register to = t2; - Register from = t3; - - // Check bounds and smi-ness. - __ lw(to, MemOperand(sp, kToOffset)); - __ lw(from, MemOperand(sp, kFromOffset)); + __ lw(a2, MemOperand(sp, kToOffset)); + __ lw(a3, MemOperand(sp, kFromOffset)); STATIC_ASSERT(kFromOffset == kToOffset + 4); STATIC_ASSERT(kSmiTag == 0); STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1); - __ JumpIfNotSmi(from, &sub_string_runtime); - __ JumpIfNotSmi(to, &sub_string_runtime); - - __ sra(a3, from, kSmiTagSize); // Remove smi tag. - __ sra(t5, to, kSmiTagSize); // Remove smi tag. + // Utilize delay slots. SmiUntag doesn't emit a jump, everything else is + // safe in this case. + __ JumpIfSmi(a2, &runtime, at, USE_DELAY_SLOT); + __ SmiUntag(a2); + __ JumpIfSmi(a3, &runtime, at, USE_DELAY_SLOT); + __ SmiUntag(a3); - // a3: from index (untagged smi) - // t5: to index (untagged smi) + // Both a2 and a3 are untagged integers. - __ Branch(&sub_string_runtime, lt, a3, Operand(zero_reg)); // From < 0. + __ Branch(&runtime, lt, a3, Operand(zero_reg)); // From < 0. __ subu(a2, t5, a3); - __ Branch(&sub_string_runtime, gt, a3, Operand(t5)); // Fail if from > to. - - // Special handling of sub-strings of length 1 and 2. One character strings - // are handled in the runtime system (looked up in the single character - // cache). Two character strings are looked for in the symbol cache in - // generated code. - __ Branch(&sub_string_runtime, lt, a2, Operand(2)); - - // Both to and from are smis. + __ Branch(&runtime, gt, a3, Operand(t5)); // Fail if from > to. - // a2: result string length - // a3: from index (untagged smi) - // t2: (a.k.a. to): to (smi) - // t3: (a.k.a. from): from offset (smi) - // t5: to index (untagged smi) - - // Make sure first argument is a sequential (or flat) string. + // Make sure first argument is a string. __ lw(v0, MemOperand(sp, kStringOffset)); - __ Branch(&sub_string_runtime, eq, v0, Operand(kSmiTagMask)); + __ Branch(&runtime, eq, v0, Operand(kSmiTagMask)); __ lw(a1, FieldMemOperand(v0, HeapObject::kMapOffset)); __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset)); __ And(t4, v0, Operand(kIsNotStringMask)); - __ Branch(&sub_string_runtime, ne, t4, Operand(zero_reg)); + __ Branch(&runtime, ne, t4, Operand(zero_reg)); // Short-cut for the case of trivial substring. Label return_v0; @@ -6046,74 +6028,16 @@ void SubStringStub::Generate(MacroAssembler* masm) { __ sra(t0, t0, 1); __ Branch(&return_v0, eq, a2, Operand(t0)); - Label create_slice; - if (FLAG_string_slices) { - __ Branch(&create_slice, ge, a2, Operand(SlicedString::kMinLength)); - } - - // v0: original string - // a1: instance type - // a2: result string length - // a3: from index (untagged smi) - // t2: (a.k.a. to): to (smi) - // t3: (a.k.a. from): from offset (smi) - // t5: to index (untagged smi) - - Label seq_string; - __ And(t0, a1, Operand(kStringRepresentationMask)); - STATIC_ASSERT(kSeqStringTag < kConsStringTag); - STATIC_ASSERT(kConsStringTag < kExternalStringTag); - STATIC_ASSERT(kConsStringTag < kSlicedStringTag); - - // Slices and external strings go to runtime. - __ Branch(&sub_string_runtime, gt, t0, Operand(kConsStringTag)); - - // Sequential strings are handled directly. - __ Branch(&seq_string, lt, t0, Operand(kConsStringTag)); - - // Cons string. Try to recurse (once) on the first substring. - // (This adds a little more generality than necessary to handle flattened - // cons strings, but not much). - __ lw(v0, FieldMemOperand(v0, ConsString::kFirstOffset)); - __ lw(t0, FieldMemOperand(v0, HeapObject::kMapOffset)); - __ lbu(a1, FieldMemOperand(t0, Map::kInstanceTypeOffset)); - STATIC_ASSERT(kSeqStringTag == 0); - // Cons, slices and external strings go to runtime. - __ Branch(&sub_string_runtime, ne, a1, Operand(kStringRepresentationMask)); - - // Definitly a sequential string. - __ bind(&seq_string); - - // v0: original string - // a1: instance type - // a2: result string length - // a3: from index (untagged smi) - // t2: (a.k.a. to): to (smi) - // t3: (a.k.a. from): from offset (smi) - // t5: to index (untagged smi) - - __ lw(t0, FieldMemOperand(v0, String::kLengthOffset)); - __ Branch(&sub_string_runtime, lt, t0, Operand(to)); // Fail if to > length. - to = no_reg; - - // v0: original string or left hand side of the original cons string. - // a1: instance type - // a2: result string length - // a3: from index (untagged smi) - // t3: (a.k.a. from): from offset (smi) - // t5: to index (untagged smi) - - // Check for flat ASCII string. - Label non_ascii_flat; - STATIC_ASSERT(kTwoByteStringTag == 0); - - __ And(t4, a1, Operand(kStringEncodingMask)); - __ Branch(&non_ascii_flat, eq, t4, Operand(zero_reg)); Label result_longer_than_two; - __ Branch(&result_longer_than_two, gt, a2, Operand(2)); + // Check for special case of two character ascii string, in which case + // we do a lookup in the symbol table first. + __ li(t0, 2); + __ Branch(&result_longer_than_two, gt, a2, Operand(t0)); + __ Branch(&runtime, lt, a2, Operand(t0)); + + __ JumpIfInstanceTypeIsNotSequentialAscii(a1, a1, &runtime); - // Sub string of length 2 requested. // Get the two characters forming the sub string. __ Addu(v0, v0, Operand(a3)); __ lbu(a3, FieldMemOperand(v0, SeqAsciiString::kHeaderSize)); @@ -6123,31 +6047,126 @@ void SubStringStub::Generate(MacroAssembler* masm) { Label make_two_character_string; StringHelper::GenerateTwoCharacterSymbolTableProbe( masm, a3, t0, a1, t1, t2, t3, t4, &make_two_character_string); - Counters* counters = masm->isolate()->counters(); __ jmp(&return_v0); // a2: result string length. // a3: two characters combined into halfword in little endian byte order. __ bind(&make_two_character_string); - __ AllocateAsciiString(v0, a2, t0, t1, t4, &sub_string_runtime); + __ AllocateAsciiString(v0, a2, t0, t1, t4, &runtime); __ sh(a3, FieldMemOperand(v0, SeqAsciiString::kHeaderSize)); __ jmp(&return_v0); __ bind(&result_longer_than_two); - // Locate 'from' character of string. - __ Addu(t1, v0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); - __ sra(t4, from, 1); - __ Addu(t1, t1, t4); + // Deal with different string types: update the index if necessary + // and put the underlying string into t1. + // v0: original string + // a1: instance type + // a2: length + // a3: from index (untagged) + Label underlying_unpacked, sliced_string, seq_or_external_string; + // If the string is not indirect, it can only be sequential or external. + STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag)); + STATIC_ASSERT(kIsIndirectStringMask != 0); + __ And(t0, a1, Operand(kIsIndirectStringMask)); + __ Branch(USE_DELAY_SLOT, &seq_or_external_string, eq, t0, Operand(zero_reg)); + + __ And(t0, a1, Operand(kSlicedNotConsMask)); + __ Branch(&sliced_string, ne, t0, Operand(zero_reg)); + // Cons string. Check whether it is flat, then fetch first part. + __ lw(t1, FieldMemOperand(v0, ConsString::kSecondOffset)); + __ LoadRoot(t0, Heap::kEmptyStringRootIndex); + __ Branch(&runtime, ne, t1, Operand(t0)); + __ lw(t1, FieldMemOperand(v0, ConsString::kFirstOffset)); + // Update instance type. + __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset)); + __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset)); + __ jmp(&underlying_unpacked); + + __ bind(&sliced_string); + // Sliced string. Fetch parent and correct start index by offset. + __ lw(t1, FieldMemOperand(v0, SlicedString::kOffsetOffset)); + __ sra(t1, t1, 1); + __ Addu(a3, a3, t1); + __ lw(t1, FieldMemOperand(v0, SlicedString::kParentOffset)); + // Update instance type. + __ lw(a1, FieldMemOperand(t1, HeapObject::kMapOffset)); + __ lbu(a1, FieldMemOperand(a1, Map::kInstanceTypeOffset)); + __ jmp(&underlying_unpacked); - // Allocate the result. - __ AllocateAsciiString(v0, a2, t4, t0, a1, &sub_string_runtime); + __ bind(&seq_or_external_string); + // Sequential or external string. Just move string to the expected register. + __ mov(t1, v0); + + __ bind(&underlying_unpacked); + + if (FLAG_string_slices) { + Label copy_routine; + // t1: underlying subject string + // a1: instance type of underlying subject string + // a2: length + // a3: adjusted start index (untagged) + // Short slice. Copy instead of slicing. + __ Branch(©_routine, lt, a2, Operand(SlicedString::kMinLength)); + // Allocate new sliced string. At this point we do not reload the instance + // type including the string encoding because we simply rely on the info + // provided by the original string. It does not matter if the original + // string's encoding is wrong because we always have to recheck encoding of + // the newly created string's parent anyways due to externalized strings. + Label two_byte_slice, set_slice_header; + STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0); + STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); + __ And(t0, a1, Operand(kStringEncodingMask)); + __ Branch(&two_byte_slice, eq, t0, Operand(zero_reg)); + __ AllocateAsciiSlicedString(v0, a2, t2, t3, &runtime); + __ jmp(&set_slice_header); + __ bind(&two_byte_slice); + __ AllocateTwoByteSlicedString(v0, a2, t2, t3, &runtime); + __ bind(&set_slice_header); + __ sll(a3, a3, 1); + __ sw(a3, FieldMemOperand(v0, SlicedString::kOffsetOffset)); + __ sw(t1, FieldMemOperand(v0, SlicedString::kParentOffset)); + __ jmp(&return_v0); + + __ bind(©_routine); + } + + // t1: underlying subject string + // a1: instance type of underlying subject string + // a2: length + // a3: adjusted start index (untagged) + Label two_byte_sequential, sequential_string, allocate_result; + STATIC_ASSERT(kExternalStringTag != 0); + STATIC_ASSERT(kSeqStringTag == 0); + __ And(t0, a1, Operand(kExternalStringTag)); + __ Branch(&sequential_string, eq, t0, Operand(zero_reg)); + + // Handle external string. + // Rule out short external strings. + STATIC_CHECK(kShortExternalStringTag != 0); + __ And(t0, a1, Operand(kShortExternalStringTag)); + __ Branch(&runtime, ne, t0, Operand(zero_reg)); + __ lw(t1, FieldMemOperand(t1, ExternalString::kResourceDataOffset)); + // t1 already points to the first character of underlying string. + __ jmp(&allocate_result); + + __ bind(&sequential_string); + // Locate first character of underlying subject string. + STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize); + __ Addu(t1, t1, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); + + __ bind(&allocate_result); + // Sequential acii string. Allocate the result. + STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0); + __ And(t0, a1, Operand(kStringEncodingMask)); + __ Branch(&two_byte_sequential, eq, t0, Operand(zero_reg)); + + // Allocate and copy the resulting ascii string. + __ AllocateAsciiString(v0, a2, t0, t2, t3, &runtime); + + // Locate first character of substring to copy. + __ Addu(t1, t1, a3); - // v0: result string - // a2: result string length - // a3: from index (untagged smi) - // t1: first character of substring to copy - // t3: (a.k.a. from): from offset (smi) // Locate first character of result. __ Addu(a1, v0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); @@ -6160,30 +6179,17 @@ void SubStringStub::Generate(MacroAssembler* masm) { masm, a1, t1, a2, a3, t0, t2, t3, t4, COPY_ASCII | DEST_ALWAYS_ALIGNED); __ jmp(&return_v0); - __ bind(&non_ascii_flat); - // a2: result string length - // t1: string - // t3: (a.k.a. from): from offset (smi) - // Check for flat two byte string. - - // Locate 'from' character of string. - __ Addu(t1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - // As "from" is a smi it is 2 times the value which matches the size of a two - // byte character. - STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); - __ Addu(t1, t1, Operand(from)); - - // Allocate the result. - __ AllocateTwoByteString(v0, a2, a1, a3, t0, &sub_string_runtime); + // Allocate and copy the resulting two-byte string. + __ bind(&two_byte_sequential); + __ AllocateTwoByteString(v0, a2, t0, t2, t3, &runtime); - // v0: result string - // a2: result string length - // t1: first character of substring to copy + // Locate first character of substring to copy. + STATIC_ASSERT(kSmiTagSize == 1 && kSmiTag == 0); + __ sll(t0, a3, 1); + __ Addu(t1, t1, t0); // Locate first character of result. __ Addu(a1, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - from = no_reg; - // v0: result string. // a1: first character of result. // a2: result length. @@ -6191,75 +6197,14 @@ void SubStringStub::Generate(MacroAssembler* masm) { STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0); StringHelper::GenerateCopyCharactersLong( masm, a1, t1, a2, a3, t0, t2, t3, t4, DEST_ALWAYS_ALIGNED); - __ jmp(&return_v0); - - if (FLAG_string_slices) { - __ bind(&create_slice); - // v0: original string - // a1: instance type - // a2: length - // a3: from index (untagged smi) - // t2 (a.k.a. to): to (smi) - // t3 (a.k.a. from): from offset (smi) - Label allocate_slice, sliced_string, seq_or_external_string; - // If the string is not indirect, it can only be sequential or external. - STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag)); - STATIC_ASSERT(kIsIndirectStringMask != 0); - __ And(t4, a1, Operand(kIsIndirectStringMask)); - // External string. Jump to runtime. - __ Branch(&seq_or_external_string, eq, t4, Operand(zero_reg)); - - __ And(t4, a1, Operand(kSlicedNotConsMask)); - __ Branch(&sliced_string, ne, t4, Operand(zero_reg)); - // Cons string. Check whether it is flat, then fetch first part. - __ lw(t1, FieldMemOperand(v0, ConsString::kSecondOffset)); - __ LoadRoot(t5, Heap::kEmptyStringRootIndex); - __ Branch(&sub_string_runtime, ne, t1, Operand(t5)); - __ lw(t1, FieldMemOperand(v0, ConsString::kFirstOffset)); - __ jmp(&allocate_slice); - - __ bind(&sliced_string); - // Sliced string. Fetch parent and correct start index by offset. - __ lw(t1, FieldMemOperand(v0, SlicedString::kOffsetOffset)); - __ addu(t3, t3, t1); - __ lw(t1, FieldMemOperand(v0, SlicedString::kParentOffset)); - __ jmp(&allocate_slice); - - __ bind(&seq_or_external_string); - // Sequential or external string. Just move string to the correct register. - __ mov(t1, v0); - - __ bind(&allocate_slice); - // a1: instance type of original string - // a2: length - // t1: underlying subject string - // t3 (a.k.a. from): from offset (smi) - // Allocate new sliced string. At this point we do not reload the instance - // type including the string encoding because we simply rely on the info - // provided by the original string. It does not matter if the original - // string's encoding is wrong because we always have to recheck encoding of - // the newly created string's parent anyways due to externalized strings. - Label two_byte_slice, set_slice_header; - STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0); - STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); - __ And(t4, a1, Operand(kStringEncodingMask)); - __ Branch(&two_byte_slice, eq, t4, Operand(zero_reg)); - __ AllocateAsciiSlicedString(v0, a2, a3, t0, &sub_string_runtime); - __ jmp(&set_slice_header); - __ bind(&two_byte_slice); - __ AllocateTwoByteSlicedString(v0, a2, a3, t0, &sub_string_runtime); - __ bind(&set_slice_header); - __ sw(t3, FieldMemOperand(v0, SlicedString::kOffsetOffset)); - __ sw(t1, FieldMemOperand(v0, SlicedString::kParentOffset)); - } __ bind(&return_v0); + Counters* counters = masm->isolate()->counters(); __ IncrementCounter(counters->sub_string_native(), 1, a3, t0); - __ Addu(sp, sp, Operand(3 * kPointerSize)); - __ Ret(); + __ DropAndRet(3); // Just jump to runtime to create the sub string. - __ bind(&sub_string_runtime); + __ bind(&runtime); __ TailCallRuntime(Runtime::kSubString, 3, 1); } @@ -6417,7 +6362,7 @@ void StringCompareStub::Generate(MacroAssembler* masm) { void StringAddStub::Generate(MacroAssembler* masm) { - Label string_add_runtime, call_builtin; + Label call_runtime, call_builtin; Builtins::JavaScript builtin_id = Builtins::ADD; Counters* counters = masm->isolate()->counters(); @@ -6432,7 +6377,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { // Make sure that both arguments are strings if not known in advance. if (flags_ == NO_STRING_ADD_FLAGS) { - __ JumpIfEitherSmi(a0, a1, &string_add_runtime); + __ JumpIfEitherSmi(a0, a1, &call_runtime); // Load instance types. __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset)); __ lw(t1, FieldMemOperand(a1, HeapObject::kMapOffset)); @@ -6442,7 +6387,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { // If either is not a string, go to runtime. __ Or(t4, t0, Operand(t1)); __ And(t4, t4, Operand(kIsNotStringMask)); - __ Branch(&string_add_runtime, ne, t4, Operand(zero_reg)); + __ Branch(&call_runtime, ne, t4, Operand(zero_reg)); } else { // Here at least one of the arguments is definitely a string. // We convert the one that is not known to be a string. @@ -6481,8 +6426,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ Branch(&strings_not_empty, ne, t4, Operand(zero_reg)); __ IncrementCounter(counters->string_add_native(), 1, a2, a3); - __ Addu(sp, sp, Operand(2 * kPointerSize)); - __ Ret(); + __ DropAndRet(2); __ bind(&strings_not_empty); } @@ -6515,7 +6459,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset)); } __ JumpIfBothInstanceTypesAreNotSequentialAscii(t0, t1, t2, t3, - &string_add_runtime); + &call_runtime); // Get the two characters forming the sub string. __ lbu(a2, FieldMemOperand(a0, SeqAsciiString::kHeaderSize)); @@ -6525,10 +6469,9 @@ void StringAddStub::Generate(MacroAssembler* masm) { // just allocate a new one. Label make_two_character_string; StringHelper::GenerateTwoCharacterSymbolTableProbe( - masm, a2, a3, t2, t3, t0, t1, t4, &make_two_character_string); + masm, a2, a3, t2, t3, t0, t1, t5, &make_two_character_string); __ IncrementCounter(counters->string_add_native(), 1, a2, a3); - __ Addu(sp, sp, Operand(2 * kPointerSize)); - __ Ret(); + __ DropAndRet(2); __ bind(&make_two_character_string); // Resulting string has length 2 and first chars of two strings @@ -6537,11 +6480,10 @@ void StringAddStub::Generate(MacroAssembler* masm) { // halfword store instruction (which assumes that processor is // in a little endian mode). __ li(t2, Operand(2)); - __ AllocateAsciiString(v0, t2, t0, t1, t4, &string_add_runtime); + __ AllocateAsciiString(v0, t2, t0, t1, t5, &call_runtime); __ sh(a2, FieldMemOperand(v0, SeqAsciiString::kHeaderSize)); __ IncrementCounter(counters->string_add_native(), 1, a2, a3); - __ Addu(sp, sp, Operand(2 * kPointerSize)); - __ Ret(); + __ DropAndRet(2); __ bind(&longer_than_two); // Check if resulting string will be flat. @@ -6551,7 +6493,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0); ASSERT(IsPowerOf2(String::kMaxLength + 1)); // kMaxLength + 1 is representable as shifted literal, kMaxLength is not. - __ Branch(&string_add_runtime, hs, t2, Operand(String::kMaxLength + 1)); + __ Branch(&call_runtime, hs, t2, Operand(String::kMaxLength + 1)); // If result is not supposed to be flat, allocate a cons string object. // If both strings are ASCII the result is an ASCII cons string. @@ -6570,15 +6512,13 @@ void StringAddStub::Generate(MacroAssembler* masm) { // Allocate an ASCII cons string. __ bind(&ascii_data); - __ AllocateAsciiConsString(t3, t2, t0, t1, &string_add_runtime); + __ AllocateAsciiConsString(v0, t2, t0, t1, &call_runtime); __ bind(&allocated); // Fill the fields of the cons string. - __ sw(a0, FieldMemOperand(t3, ConsString::kFirstOffset)); - __ sw(a1, FieldMemOperand(t3, ConsString::kSecondOffset)); - __ mov(v0, t3); + __ sw(a0, FieldMemOperand(v0, ConsString::kFirstOffset)); + __ sw(a1, FieldMemOperand(v0, ConsString::kSecondOffset)); __ IncrementCounter(counters->string_add_native(), 1, a2, a3); - __ Addu(sp, sp, Operand(2 * kPointerSize)); - __ Ret(); + __ DropAndRet(2); __ bind(&non_ascii); // At least one of the strings is two-byte. Check whether it happens @@ -6596,11 +6536,13 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ Branch(&ascii_data, eq, t0, Operand(kAsciiStringTag | kAsciiDataHintTag)); // Allocate a two byte cons string. - __ AllocateTwoByteConsString(t3, t2, t0, t1, &string_add_runtime); + __ AllocateTwoByteConsString(v0, t2, t0, t1, &call_runtime); __ Branch(&allocated); - // Handle creating a flat result. First check that both strings are - // sequential and that they have the same encoding. + // We cannot encounter sliced strings or cons strings here since: + STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength); + // Handle creating a flat result from either external or sequential strings. + // Locate the first characters' locations. // a0: first string // a1: second string // a2: length of first string @@ -6608,6 +6550,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { // t0: first string instance type (if flags_ == NO_STRING_ADD_FLAGS) // t1: second string instance type (if flags_ == NO_STRING_ADD_FLAGS) // t2: sum of lengths. + Label first_prepared, second_prepared; __ bind(&string_add_flat_result); if (flags_ != NO_STRING_ADD_FLAGS) { __ lw(t0, FieldMemOperand(a0, HeapObject::kMapOffset)); @@ -6615,101 +6558,86 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ lbu(t0, FieldMemOperand(t0, Map::kInstanceTypeOffset)); __ lbu(t1, FieldMemOperand(t1, Map::kInstanceTypeOffset)); } - // Check that both strings are sequential, meaning that we - // branch to runtime if either string tag is non-zero. + // Check whether both strings have same encoding + __ Xor(t3, t0, Operand(t1)); + __ And(t3, t3, Operand(kStringEncodingMask)); + __ Branch(&call_runtime, ne, t3, Operand(zero_reg)); + STATIC_ASSERT(kSeqStringTag == 0); - __ Or(t4, t0, Operand(t1)); - __ And(t4, t4, Operand(kStringRepresentationMask)); - __ Branch(&string_add_runtime, ne, t4, Operand(zero_reg)); + __ And(t4, t0, Operand(kStringRepresentationMask)); + + STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize); + Label skip_first_add; + __ Branch(&skip_first_add, ne, t4, Operand(zero_reg)); + __ Branch(USE_DELAY_SLOT, &first_prepared); + __ addiu(t3, a0, SeqAsciiString::kHeaderSize - kHeapObjectTag); + __ bind(&skip_first_add); + // External string: rule out short external string and load string resource. + STATIC_ASSERT(kShortExternalStringTag != 0); + __ And(t4, t0, Operand(kShortExternalStringMask)); + __ Branch(&call_runtime, ne, t4, Operand(zero_reg)); + __ lw(t3, FieldMemOperand(a0, ExternalString::kResourceDataOffset)); + __ bind(&first_prepared); - // Now check if both strings have the same encoding (ASCII/Two-byte). - // a0: first string - // a1: second string + STATIC_ASSERT(kSeqStringTag == 0); + __ And(t4, t1, Operand(kStringRepresentationMask)); + STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize); + Label skip_second_add; + __ Branch(&skip_second_add, ne, t4, Operand(zero_reg)); + __ Branch(USE_DELAY_SLOT, &second_prepared); + __ addiu(a1, a1, SeqAsciiString::kHeaderSize - kHeapObjectTag); + __ bind(&skip_second_add); + // External string: rule out short external string and load string resource. + STATIC_ASSERT(kShortExternalStringTag != 0); + __ And(t4, t1, Operand(kShortExternalStringMask)); + __ Branch(&call_runtime, ne, t4, Operand(zero_reg)); + __ lw(a1, FieldMemOperand(a1, ExternalString::kResourceDataOffset)); + __ bind(&second_prepared); + + Label non_ascii_string_add_flat_result; + // t3: first character of first string + // a1: first character of second string // a2: length of first string // a3: length of second string - // t0: first string instance type - // t1: second string instance type // t2: sum of lengths. - Label non_ascii_string_add_flat_result; - ASSERT(IsPowerOf2(kStringEncodingMask)); // Just one bit to test. - __ xor_(t3, t1, t0); - __ And(t3, t3, Operand(kStringEncodingMask)); - __ Branch(&string_add_runtime, ne, t3, Operand(zero_reg)); - // And see if it's ASCII (0) or two-byte (1). - __ And(t3, t0, Operand(kStringEncodingMask)); - __ Branch(&non_ascii_string_add_flat_result, eq, t3, Operand(zero_reg)); - - // Both strings are sequential ASCII strings. We also know that they are - // short (since the sum of the lengths is less than kMinNonFlatLength). - // t2: length of resulting flat string - __ AllocateAsciiString(t3, t2, t0, t1, t4, &string_add_runtime); - // Locate first character of result. - __ Addu(t2, t3, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); - // Locate first character of first argument. - __ Addu(a0, a0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); - // a0: first character of first string. - // a1: second string. + // Both strings have the same encoding. + STATIC_ASSERT(kTwoByteStringTag == 0); + __ And(t4, t1, Operand(kStringEncodingMask)); + __ Branch(&non_ascii_string_add_flat_result, eq, t4, Operand(zero_reg)); + + __ AllocateAsciiString(v0, t2, t0, t1, t5, &call_runtime); + __ Addu(t2, v0, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); + // v0: result string. + // t3: first character of first string. + // a1: first character of second string // a2: length of first string. // a3: length of second string. // t2: first character of result. - // t3: result string. - StringHelper::GenerateCopyCharacters(masm, t2, a0, a2, t0, true); - // Load second argument and locate first character. - __ Addu(a1, a1, Operand(SeqAsciiString::kHeaderSize - kHeapObjectTag)); - // a1: first character of second string. - // a3: length of second string. + StringHelper::GenerateCopyCharacters(masm, t2, t3, a2, t0, true); // t2: next character of result. - // t3: result string. StringHelper::GenerateCopyCharacters(masm, t2, a1, a3, t0, true); - __ mov(v0, t3); __ IncrementCounter(counters->string_add_native(), 1, a2, a3); - __ Addu(sp, sp, Operand(2 * kPointerSize)); - __ Ret(); + __ DropAndRet(2); __ bind(&non_ascii_string_add_flat_result); - // Both strings are sequential two byte strings. - // a0: first string. - // a1: second string. - // a2: length of first string. - // a3: length of second string. - // t2: sum of length of strings. - __ AllocateTwoByteString(t3, t2, t0, t1, t4, &string_add_runtime); - // a0: first string. - // a1: second string. - // a2: length of first string. - // a3: length of second string. - // t3: result string. - - // Locate first character of result. - __ Addu(t2, t3, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - // Locate first character of first argument. - __ Addu(a0, a0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - - // a0: first character of first string. - // a1: second string. + __ AllocateTwoByteString(v0, t2, t0, t1, t5, &call_runtime); + __ Addu(t2, v0, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); + // v0: result string. + // t3: first character of first string. + // a1: first character of second string. // a2: length of first string. // a3: length of second string. // t2: first character of result. - // t3: result string. - StringHelper::GenerateCopyCharacters(masm, t2, a0, a2, t0, false); - - // Locate first character of second argument. - __ Addu(a1, a1, Operand(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - - // a1: first character of second string. - // a3: length of second string. - // t2: next character of result (after copy of first string). - // t3: result string. + StringHelper::GenerateCopyCharacters(masm, t2, t3, a2, t0, false); + // t2: next character of result. StringHelper::GenerateCopyCharacters(masm, t2, a1, a3, t0, false); - __ mov(v0, t3); __ IncrementCounter(counters->string_add_native(), 1, a2, a3); - __ Addu(sp, sp, Operand(2 * kPointerSize)); - __ Ret(); + __ DropAndRet(2); // Just jump to runtime to add the two strings. - __ bind(&string_add_runtime); + __ bind(&call_runtime); __ TailCallRuntime(Runtime::kStringAdd, 2, 1); if (call_builtin.is_linked()) { diff --git a/deps/v8/src/mips/ic-mips.cc b/deps/v8/src/mips/ic-mips.cc index c240125bba..1eef55af1b 100644 --- a/deps/v8/src/mips/ic-mips.cc +++ b/deps/v8/src/mips/ic-mips.cc @@ -1470,11 +1470,10 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) { // -- ra : return address // ----------------------------------- // - // This accepts as a receiver anything JSObject::SetElementsLength accepts - // (currently anything except for external and pixel arrays which means - // anything with elements of FixedArray type.), but currently is restricted - // to JSArray. - // Value must be a number, but only smis are accepted as the most common case. + // This accepts as a receiver anything JSArray::SetElementsLength accepts + // (currently anything except for external arrays which means anything with + // elements of FixedArray type). Value must be a number, but only smis are + // accepted as the most common case. Label miss; @@ -1496,6 +1495,10 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) { __ GetObjectType(scratch, scratch, scratch); __ Branch(&miss, ne, scratch, Operand(FIXED_ARRAY_TYPE)); + // Check that the array has fast properties, otherwise the length + // property might have been redefined. + // TODO(mstarzinger): Port this check to MIPS. + // Check that value is a smi. __ JumpIfNotSmi(value, &miss); diff --git a/deps/v8/src/mips/lithium-codegen-mips.cc b/deps/v8/src/mips/lithium-codegen-mips.cc index aba7516fa2..eaef6ff40d 100644 --- a/deps/v8/src/mips/lithium-codegen-mips.cc +++ b/deps/v8/src/mips/lithium-codegen-mips.cc @@ -2180,10 +2180,19 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) { void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { Register context = ToRegister(instr->context()); Register result = ToRegister(instr->result()); + __ lw(result, ContextOperand(context, instr->slot_index())); if (instr->hydrogen()->RequiresHoleCheck()) { __ LoadRoot(at, Heap::kTheHoleValueRootIndex); - DeoptimizeIf(eq, instr->environment(), result, Operand(at)); + + if (instr->hydrogen()->DeoptimizesOnHole()) { + DeoptimizeIf(eq, instr->environment(), result, Operand(at)); + } else { + Label is_not_hole; + __ Branch(&is_not_hole, ne, result, Operand(at)); + __ LoadRoot(result, Heap::kUndefinedValueRootIndex); + __ bind(&is_not_hole); + } } } @@ -2191,13 +2200,22 @@ void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { Register context = ToRegister(instr->context()); Register value = ToRegister(instr->value()); + Register scratch = scratch0(); MemOperand target = ContextOperand(context, instr->slot_index()); + + Label skip_assignment; + if (instr->hydrogen()->RequiresHoleCheck()) { - Register scratch = scratch0(); __ lw(scratch, target); __ LoadRoot(at, Heap::kTheHoleValueRootIndex); - DeoptimizeIf(eq, instr->environment(), scratch, Operand(at)); + + if (instr->hydrogen()->DeoptimizesOnHole()) { + DeoptimizeIf(eq, instr->environment(), scratch, Operand(at)); + } else { + __ Branch(&skip_assignment, ne, scratch, Operand(at)); + } } + __ sw(value, target); if (instr->hydrogen()->NeedsWriteBarrier()) { HType type = instr->hydrogen()->value()->type(); @@ -2212,6 +2230,8 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { EMIT_REMEMBERED_SET, check_needed); } + + __ bind(&skip_assignment); } diff --git a/deps/v8/src/mksnapshot.cc b/deps/v8/src/mksnapshot.cc index bc0c2fc5b4..82871c09b0 100644 --- a/deps/v8/src/mksnapshot.cc +++ b/deps/v8/src/mksnapshot.cc @@ -109,7 +109,7 @@ class PartialSnapshotSink : public i::SnapshotByteSink { if (j != 0) { fprintf(fp, ","); } - fprintf(fp, "%d", at(j)); + fprintf(fp, "%u", static_cast(at(j))); } } char at(int i) { return data_[i]; } diff --git a/deps/v8/src/objects-inl.h b/deps/v8/src/objects-inl.h index d6d65718e2..7c7f094006 100644 --- a/deps/v8/src/objects-inl.h +++ b/deps/v8/src/objects-inl.h @@ -4256,14 +4256,6 @@ bool JSObject::HasIndexedInterceptor() { } -bool JSObject::AllowsSetElementsLength() { - bool result = elements()->IsFixedArray() || - elements()->IsFixedDoubleArray(); - ASSERT(result == !HasExternalArrayElements()); - return result; -} - - MaybeObject* JSObject::EnsureWritableFastElements() { ASSERT(HasFastTypeElements()); FixedArray* elems = FixedArray::cast(elements()); @@ -4624,6 +4616,13 @@ void JSArray::set_length(Smi* length) { } +bool JSArray::AllowsSetElementsLength() { + bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray(); + ASSERT(result == !HasExternalArrayElements()); + return result; +} + + MaybeObject* JSArray::SetContent(FixedArrayBase* storage) { MaybeObject* maybe_result = EnsureCanContainElements( storage, ALLOW_COPIED_DOUBLE_ELEMENTS); diff --git a/deps/v8/src/objects.cc b/deps/v8/src/objects.cc index 2a56797c2c..e1931887b8 100644 --- a/deps/v8/src/objects.cc +++ b/deps/v8/src/objects.cc @@ -4257,10 +4257,10 @@ void JSObject::LookupCallback(String* name, LookupResult* result) { // Search for a getter or setter in an elements dictionary and update its -// attributes. Returns either undefined if the element is read-only, or the -// getter/setter pair (fixed array) if there is an existing one, or the hole -// value if the element does not exist or is a normal non-getter/setter data -// element. +// attributes. Returns either undefined if the element is non-deletable, or +// the getter/setter pair (fixed array) if there is an existing one, or the +// hole value if the element does not exist or is a normal non-getter/setter +// data element. static Object* UpdateGetterSetterInDictionary(NumberDictionary* dictionary, uint32_t index, PropertyAttributes attributes, @@ -4269,7 +4269,8 @@ static Object* UpdateGetterSetterInDictionary(NumberDictionary* dictionary, if (entry != NumberDictionary::kNotFound) { Object* result = dictionary->ValueAt(entry); PropertyDetails details = dictionary->DetailsAt(entry); - if (details.IsReadOnly()) return heap->undefined_value(); + // TODO(mstarzinger): We should check for details.IsDontDelete() here once + // we only call into the runtime once to set both getter and setter. if (details.type() == CALLBACKS && result->IsFixedArray()) { if (details.attributes() != attributes) { dictionary->DetailsAtPut(entry, @@ -4353,7 +4354,8 @@ MaybeObject* JSObject::DefineGetterSetter(String* name, LookupResult result(heap->isolate()); LocalLookup(name, &result); if (result.IsProperty()) { - if (result.IsReadOnly()) return heap->undefined_value(); + // TODO(mstarzinger): We should check for result.IsDontDelete() here once + // we only call into the runtime once to set both getter and setter. if (result.type() == CALLBACKS) { Object* obj = result.GetCallbackObject(); // Need to preserve old getters/setters. @@ -8376,7 +8378,7 @@ void JSArray::Expand(int required_size) { } -MaybeObject* JSObject::SetElementsLength(Object* len) { +MaybeObject* JSArray::SetElementsLength(Object* len) { // We should never end in here with a pixel or external array. ASSERT(AllowsSetElementsLength()); return GetElementsAccessor()->SetLength(this, len); diff --git a/deps/v8/src/objects.h b/deps/v8/src/objects.h index 2b18e67c99..5346585bd3 100644 --- a/deps/v8/src/objects.h +++ b/deps/v8/src/objects.h @@ -1471,7 +1471,6 @@ class JSObject: public JSReceiver { inline bool HasExternalDoubleElements(); bool HasFastArgumentsElements(); bool HasDictionaryArgumentsElements(); - inline bool AllowsSetElementsLength(); inline NumberDictionary* element_dictionary(); // Gets slow elements. // Requires: HasFastElements(). @@ -1733,9 +1732,6 @@ class JSObject: public JSReceiver { bool HasRealElementProperty(uint32_t index); bool HasRealNamedCallbackProperty(String* key); - // Initializes the array to a certain length - MUST_USE_RESULT MaybeObject* SetElementsLength(Object* length); - // Get the header size for a JSObject. Used to compute the index of // internal fields as well as the number of internal fields. inline int GetHeaderSize(); @@ -7398,6 +7394,10 @@ class JSArray: public JSObject { // capacity is non-zero. MUST_USE_RESULT MaybeObject* Initialize(int capacity); + // Initializes the array to a certain length. + inline bool AllowsSetElementsLength(); + MUST_USE_RESULT MaybeObject* SetElementsLength(Object* length); + // Set the content of the array to the content of storage. inline MaybeObject* SetContent(FixedArrayBase* storage); diff --git a/deps/v8/src/prettyprinter.cc b/deps/v8/src/prettyprinter.cc index 37c76ceefe..a3a328c119 100644 --- a/deps/v8/src/prettyprinter.cc +++ b/deps/v8/src/prettyprinter.cc @@ -447,6 +447,7 @@ void PrettyPrinter::Print(const char* format, ...) { void PrettyPrinter::PrintStatements(ZoneList* statements) { + if (statements == NULL) return; for (int i = 0; i < statements->length(); i++) { if (i != 0) Print(" "); Visit(statements->at(i)); diff --git a/deps/v8/src/runtime.cc b/deps/v8/src/runtime.cc index f3adc51ace..b0e1a057da 100644 --- a/deps/v8/src/runtime.cc +++ b/deps/v8/src/runtime.cc @@ -434,7 +434,8 @@ static Handle CreateObjectLiteralBoilerplate( static const int kSmiOnlyLiteralMinimumLength = 1024; -static Handle CreateArrayLiteralBoilerplate( +// static +Handle Runtime::CreateArrayLiteralBoilerplate( Isolate* isolate, Handle literals, Handle elements) { @@ -536,7 +537,8 @@ static Handle CreateLiteralBoilerplate( false, kHasNoFunctionLiteral); case CompileTimeValue::ARRAY_LITERAL: - return CreateArrayLiteralBoilerplate(isolate, literals, elements); + return Runtime::CreateArrayLiteralBoilerplate( + isolate, literals, elements); default: UNREACHABLE(); return Handle::null(); @@ -606,7 +608,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateArrayLiteral) { // Check if boilerplate exists. If not, create it first. Handle boilerplate(literals->get(literals_index), isolate); if (*boilerplate == isolate->heap()->undefined_value()) { - boilerplate = CreateArrayLiteralBoilerplate(isolate, literals, elements); + boilerplate = + Runtime::CreateArrayLiteralBoilerplate(isolate, literals, elements); if (boilerplate.is_null()) return Failure::Exception(); // Update the functions literal and return the boilerplate. literals->set(literals_index, *boilerplate); @@ -626,7 +629,8 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_CreateArrayLiteralShallow) { Handle boilerplate(literals->get(literals_index), isolate); if (*boilerplate == isolate->heap()->undefined_value()) { ASSERT(*elements != isolate->heap()->empty_fixed_array()); - boilerplate = CreateArrayLiteralBoilerplate(isolate, literals, elements); + boilerplate = + Runtime::CreateArrayLiteralBoilerplate(isolate, literals, elements); if (boilerplate.is_null()) return Failure::Exception(); // Update the functions literal and return the boilerplate. literals->set(literals_index, *boilerplate); @@ -4244,27 +4248,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineAccessorProperty) { CONVERT_CHECKED(String, name, args[1]); CONVERT_CHECKED(Smi, flag_setter, args[2]); Object* fun = args[3]; - RUNTIME_ASSERT(fun->IsSpecFunction() || fun->IsUndefined()); CONVERT_CHECKED(Smi, flag_attr, args[4]); + int unchecked = flag_attr->value(); RUNTIME_ASSERT((unchecked & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0); - RUNTIME_ASSERT(!obj->IsNull()); - LookupResult result(isolate); - obj->LocalLookupRealNamedProperty(name, &result); - PropertyAttributes attr = static_cast(unchecked); - // If an existing property is either FIELD, NORMAL or CONSTANT_FUNCTION - // delete it to avoid running into trouble in DefineAccessor, which - // handles this incorrectly if the property is readonly (does nothing) - if (result.IsProperty() && - (result.type() == FIELD || result.type() == NORMAL - || result.type() == CONSTANT_FUNCTION)) { - Object* ok; - { MaybeObject* maybe_ok = - obj->DeleteProperty(name, JSReceiver::NORMAL_DELETION); - if (!maybe_ok->ToObject(&ok)) return maybe_ok; - } - } + + RUNTIME_ASSERT(!obj->IsNull()); + RUNTIME_ASSERT(fun->IsSpecFunction() || fun->IsUndefined()); return obj->DefineAccessor(name, flag_setter->value() == 0, fun, attr); } @@ -4280,11 +4271,10 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_DefineOrRedefineDataProperty) { CONVERT_ARG_CHECKED(JSObject, js_object, 0); CONVERT_ARG_CHECKED(String, name, 1); Handle obj_value = args.at(2); - CONVERT_CHECKED(Smi, flag, args[3]); + int unchecked = flag->value(); RUNTIME_ASSERT((unchecked & ~(READ_ONLY | DONT_ENUM | DONT_DELETE)) == 0); - PropertyAttributes attr = static_cast(unchecked); // Check if this is an element. diff --git a/deps/v8/src/runtime.h b/deps/v8/src/runtime.h index b13662df0e..ff0ddbabe6 100644 --- a/deps/v8/src/runtime.h +++ b/deps/v8/src/runtime.h @@ -679,6 +679,12 @@ class Runtime : public AllStatic { // Helper functions used stubs. static void PerformGC(Object* result); + + // Used in runtime.cc and hydrogen's VisitArrayLiteral. + static Handle CreateArrayLiteralBoilerplate( + Isolate* isolate, + Handle literals, + Handle elements); }; diff --git a/deps/v8/src/spaces.cc b/deps/v8/src/spaces.cc index a5712a0f53..84b0f1bcb4 100644 --- a/deps/v8/src/spaces.cc +++ b/deps/v8/src/spaces.cc @@ -752,7 +752,7 @@ int PagedSpace::CountTotalPages() { void PagedSpace::ReleasePage(Page* page) { ASSERT(page->LiveBytes() == 0); - // Adjust list of unswept pages if the page is it's head or tail. + // Adjust list of unswept pages if the page is the head of the list. if (first_unswept_page_ == page) { first_unswept_page_ = page->next_page(); if (first_unswept_page_ == anchor()) { diff --git a/deps/v8/src/stub-cache.cc b/deps/v8/src/stub-cache.cc index 0d0105c661..787e833f4f 100644 --- a/deps/v8/src/stub-cache.cc +++ b/deps/v8/src/stub-cache.cc @@ -877,7 +877,8 @@ void StubCache::Clear() { void StubCache::CollectMatchingMaps(SmallMapList* types, String* name, - Code::Flags flags) { + Code::Flags flags, + Handle global_context) { for (int i = 0; i < kPrimaryTableSize; i++) { if (primary_[i].key == name) { Map* map = primary_[i].value->FindFirstMap(); @@ -886,7 +887,8 @@ void StubCache::CollectMatchingMaps(SmallMapList* types, if (map == NULL) continue; int offset = PrimaryOffset(name, flags, map); - if (entry(primary_, offset) == &primary_[i]) { + if (entry(primary_, offset) == &primary_[i] && + !TypeFeedbackOracle::CanRetainOtherContext(map, *global_context)) { types->Add(Handle(map)); } } @@ -909,7 +911,8 @@ void StubCache::CollectMatchingMaps(SmallMapList* types, // Lookup in secondary table and add matches. int offset = SecondaryOffset(name, flags, primary_offset); - if (entry(secondary_, offset) == &secondary_[i]) { + if (entry(secondary_, offset) == &secondary_[i] && + !TypeFeedbackOracle::CanRetainOtherContext(map, *global_context)) { types->Add(Handle(map)); } } diff --git a/deps/v8/src/stub-cache.h b/deps/v8/src/stub-cache.h index f55a36df33..720ad8b08e 100644 --- a/deps/v8/src/stub-cache.h +++ b/deps/v8/src/stub-cache.h @@ -248,7 +248,8 @@ class StubCache { // Collect all maps that match the name and flags. void CollectMatchingMaps(SmallMapList* types, String* name, - Code::Flags flags); + Code::Flags flags, + Handle global_context); // Generate code for probing the stub cache table. // Arguments extra and extra2 may be used to pass additional scratch diff --git a/deps/v8/src/type-info.cc b/deps/v8/src/type-info.cc index e722d14527..af8a8ae828 100644 --- a/deps/v8/src/type-info.cc +++ b/deps/v8/src/type-info.cc @@ -438,11 +438,45 @@ void TypeFeedbackOracle::CollectReceiverTypes(unsigned ast_id, Handle::cast(object)->ic_state() == MEGAMORPHIC) { types->Reserve(4); ASSERT(object->IsCode()); - isolate_->stub_cache()->CollectMatchingMaps(types, *name, flags); + isolate_->stub_cache()->CollectMatchingMaps(types, + *name, + flags, + global_context_); } } +// Check if a map originates from a given global context. We use this +// information to filter out maps from different context to avoid +// retaining objects from different tabs in Chrome via optimized code. +bool TypeFeedbackOracle::CanRetainOtherContext(Map* map, + Context* global_context) { + Object* constructor = map->constructor(); + ASSERT(constructor != NULL); + while (!constructor->IsJSFunction()) { + // If the constructor is not null or a JSFunction, we have to + // conservatively assume that it may retain a global context. + if (!constructor->IsNull()) return true; + + // If both, constructor and prototype are null, we conclude + // that no global context will be retained by this map. + if (map->prototype()->IsNull()) return false; + + map = JSObject::cast(map->prototype())->map(); + constructor = map->constructor(); + } + JSFunction* function = JSFunction::cast(constructor); + return CanRetainOtherContext(function, global_context); +} + + +bool TypeFeedbackOracle::CanRetainOtherContext(JSFunction* function, + Context* global_context) { + return function->context()->global() != global_context->global() + && function->context()->global() != global_context->builtins(); +} + + static void AddMapIfMissing(Handle map, SmallMapList* list) { for (int i = 0; i < list->length(); ++i) { if (list->at(i).is_identical_to(map)) return; @@ -539,7 +573,12 @@ void TypeFeedbackOracle::ProcessRelocInfos(ZoneList* infos) { SetInfo(ast_id, Smi::FromInt(target->check_type())); } else { Object* map = target->FindFirstMap(); - SetInfo(ast_id, map == NULL ? static_cast(target) : map); + if (map == NULL) { + SetInfo(ast_id, static_cast(target)); + } else if (!CanRetainOtherContext(Map::cast(map), + *global_context_)) { + SetInfo(ast_id, map); + } } } else if (target->ic_state() == MEGAMORPHIC) { SetInfo(ast_id, target); @@ -565,7 +604,9 @@ void TypeFeedbackOracle::ProcessRelocInfos(ZoneList* infos) { if (target->major_key() == CodeStub::CallFunction && target->has_function_cache()) { Object* value = CallFunctionStub::GetCachedValue(reloc_entry.pc()); - if (value->IsJSFunction()) { + if (value->IsJSFunction() && + !CanRetainOtherContext(JSFunction::cast(value), + *global_context_)) { SetInfo(ast_id, value); } } diff --git a/deps/v8/src/type-info.h b/deps/v8/src/type-info.h index eba098737d..1519fcabb4 100644 --- a/deps/v8/src/type-info.h +++ b/deps/v8/src/type-info.h @@ -256,6 +256,10 @@ class TypeFeedbackOracle BASE_EMBEDDED { void CollectKeyedReceiverTypes(unsigned ast_id, SmallMapList* types); + static bool CanRetainOtherContext(Map* map, Context* global_context); + static bool CanRetainOtherContext(JSFunction* function, + Context* global_context); + CheckType GetCallCheckType(Call* expr); Handle GetPrototypeForPrimitiveCheck(CheckType check); diff --git a/deps/v8/src/v8.cc b/deps/v8/src/v8.cc index 634c5c3e7b..0354fc101a 100644 --- a/deps/v8/src/v8.cc +++ b/deps/v8/src/v8.cc @@ -57,14 +57,7 @@ static EntropySource entropy_source; bool V8::Initialize(Deserializer* des) { - // Setting --harmony implies all other harmony flags. - // TODO(rossberg): Is there a better place to put this? - if (FLAG_harmony) { - FLAG_harmony_typeof = true; - FLAG_harmony_scoping = true; - FLAG_harmony_proxies = true; - FLAG_harmony_collections = true; - } + FlagList::EnforceFlagImplications(); InitializeOncePerProcess(); diff --git a/deps/v8/src/version.cc b/deps/v8/src/version.cc index 8385bd7768..79ea5e866b 100644 --- a/deps/v8/src/version.cc +++ b/deps/v8/src/version.cc @@ -34,7 +34,7 @@ // cannot be changed without changing the SCons build script. #define MAJOR_VERSION 3 #define MINOR_VERSION 8 -#define BUILD_NUMBER 0 +#define BUILD_NUMBER 1 #define PATCH_LEVEL 0 // Use 1 for candidates and 0 otherwise. // (Boolean macro values are not supported by all preprocessors.) diff --git a/deps/v8/src/x64/code-stubs-x64.cc b/deps/v8/src/x64/code-stubs-x64.cc index 6f3e0659fd..1d67bae60a 100644 --- a/deps/v8/src/x64/code-stubs-x64.cc +++ b/deps/v8/src/x64/code-stubs-x64.cc @@ -4434,7 +4434,7 @@ void StringCharAtGenerator::GenerateSlow( void StringAddStub::Generate(MacroAssembler* masm) { - Label string_add_runtime, call_builtin; + Label call_runtime, call_builtin; Builtins::JavaScript builtin_id = Builtins::ADD; // Load the two arguments. @@ -4443,14 +4443,14 @@ void StringAddStub::Generate(MacroAssembler* masm) { // Make sure that both arguments are strings if not known in advance. if (flags_ == NO_STRING_ADD_FLAGS) { - __ JumpIfSmi(rax, &string_add_runtime); + __ JumpIfSmi(rax, &call_runtime); __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8); - __ j(above_equal, &string_add_runtime); + __ j(above_equal, &call_runtime); // First argument is a a string, test second. - __ JumpIfSmi(rdx, &string_add_runtime); + __ JumpIfSmi(rdx, &call_runtime); __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9); - __ j(above_equal, &string_add_runtime); + __ j(above_equal, &call_runtime); } else { // Here at least one of the arguments is definitely a string. // We convert the one that is not known to be a string. @@ -4518,7 +4518,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { // Check that both strings are non-external ascii strings. __ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx, - &string_add_runtime); + &call_runtime); // Get the two characters forming the sub string. __ movzxbq(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize)); @@ -4533,8 +4533,18 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ ret(2 * kPointerSize); __ bind(&make_two_character_string); - __ Set(rbx, 2); - __ jmp(&make_flat_ascii_string); + __ Set(rdi, 2); + __ AllocateAsciiString(rax, rdi, r8, r9, r11, &call_runtime); + // rbx - first byte: first character + // rbx - second byte: *maybe* second character + // Make sure that the second byte of rbx contains the second character. + __ movzxbq(rcx, FieldOperand(rdx, SeqAsciiString::kHeaderSize)); + __ shll(rcx, Immediate(kBitsPerByte)); + __ orl(rbx, rcx); + // Write both characters to the new string. + __ movw(FieldOperand(rax, SeqAsciiString::kHeaderSize), rbx); + __ IncrementCounter(counters->string_add_native(), 1); + __ ret(2 * kPointerSize); __ bind(&longer_than_two); // Check if resulting string will be flat. @@ -4543,7 +4553,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { // Handle exceptionally long strings in the runtime system. STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0); __ SmiCompare(rbx, Smi::FromInt(String::kMaxLength)); - __ j(above, &string_add_runtime); + __ j(above, &call_runtime); // If result is not supposed to be flat, allocate a cons string object. If // both strings are ascii the result is an ascii cons string. @@ -4561,7 +4571,7 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ j(zero, &non_ascii); __ bind(&ascii_data); // Allocate an acsii cons string. - __ AllocateAsciiConsString(rcx, rdi, no_reg, &string_add_runtime); + __ AllocateAsciiConsString(rcx, rdi, no_reg, &call_runtime); __ bind(&allocated); // Fill the fields of the cons string. __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx); @@ -4586,111 +4596,103 @@ void StringAddStub::Generate(MacroAssembler* masm) { __ cmpb(r8, Immediate(kAsciiStringTag | kAsciiDataHintTag)); __ j(equal, &ascii_data); // Allocate a two byte cons string. - __ AllocateTwoByteConsString(rcx, rdi, no_reg, &string_add_runtime); + __ AllocateTwoByteConsString(rcx, rdi, no_reg, &call_runtime); __ jmp(&allocated); - // Handle creating a flat result. First check that both strings are not - // external strings. + // We cannot encounter sliced strings or cons strings here since: + STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength); + // Handle creating a flat result from either external or sequential strings. + // Locate the first characters' locations. // rax: first string // rbx: length of resulting flat string as smi // rdx: second string // r8: instance type of first string // r9: instance type of first string + Label first_prepared, second_prepared; + Label first_is_sequential, second_is_sequential; __ bind(&string_add_flat_result); - __ SmiToInteger32(rbx, rbx); - __ movl(rcx, r8); - __ and_(rcx, Immediate(kStringRepresentationMask)); - __ cmpl(rcx, Immediate(kExternalStringTag)); - __ j(equal, &string_add_runtime); - __ movl(rcx, r9); - __ and_(rcx, Immediate(kStringRepresentationMask)); - __ cmpl(rcx, Immediate(kExternalStringTag)); - __ j(equal, &string_add_runtime); - // We cannot encounter sliced strings here since: - STATIC_ASSERT(SlicedString::kMinLength >= String::kMinNonFlatLength); - // Now check if both strings are ascii strings. - // rax: first string - // rbx: length of resulting flat string - // rdx: second string - // r8: instance type of first string - // r9: instance type of second string + + __ SmiToInteger32(r14, FieldOperand(rax, SeqString::kLengthOffset)); + // r14: length of first string + STATIC_ASSERT(kSeqStringTag == 0); + __ testb(r8, Immediate(kStringRepresentationMask)); + __ j(zero, &first_is_sequential, Label::kNear); + // Rule out short external string and load string resource. + STATIC_ASSERT(kShortExternalStringTag != 0); + __ testb(r8, Immediate(kShortExternalStringMask)); + __ j(not_zero, &call_runtime); + __ movq(rcx, FieldOperand(rax, ExternalString::kResourceDataOffset)); + __ jmp(&first_prepared, Label::kNear); + __ bind(&first_is_sequential); + STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize); + __ lea(rcx, FieldOperand(rax, SeqAsciiString::kHeaderSize)); + __ bind(&first_prepared); + + // Check whether both strings have same encoding. + __ xorl(r8, r9); + __ testb(r8, Immediate(kStringEncodingMask)); + __ j(not_zero, &call_runtime); + + __ SmiToInteger32(r15, FieldOperand(rdx, SeqString::kLengthOffset)); + // r15: length of second string + STATIC_ASSERT(kSeqStringTag == 0); + __ testb(r9, Immediate(kStringRepresentationMask)); + __ j(zero, &second_is_sequential, Label::kNear); + // Rule out short external string and load string resource. + STATIC_ASSERT(kShortExternalStringTag != 0); + __ testb(r9, Immediate(kShortExternalStringMask)); + __ j(not_zero, &call_runtime); + __ movq(rdx, FieldOperand(rdx, ExternalString::kResourceDataOffset)); + __ jmp(&second_prepared, Label::kNear); + __ bind(&second_is_sequential); + STATIC_ASSERT(SeqAsciiString::kHeaderSize == SeqTwoByteString::kHeaderSize); + __ lea(rdx, FieldOperand(rdx, SeqAsciiString::kHeaderSize)); + __ bind(&second_prepared); + Label non_ascii_string_add_flat_result; - STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0); - STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); - __ testl(r8, Immediate(kStringEncodingMask)); + // r9: instance type of second string + // First string and second string have the same encoding. + STATIC_ASSERT(kTwoByteStringTag == 0); + __ SmiToInteger32(rbx, rbx); + __ testb(r9, Immediate(kStringEncodingMask)); __ j(zero, &non_ascii_string_add_flat_result); - __ testl(r9, Immediate(kStringEncodingMask)); - __ j(zero, &string_add_runtime); __ bind(&make_flat_ascii_string); // Both strings are ascii strings. As they are short they are both flat. - __ AllocateAsciiString(rcx, rbx, rdi, r14, r11, &string_add_runtime); - // rcx: result string - __ movq(rbx, rcx); + __ AllocateAsciiString(rax, rbx, rdi, r8, r9, &call_runtime); + // rax: result string // Locate first character of result. - __ addq(rcx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); - // Locate first character of first argument - __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset)); - __ addq(rax, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); - // rax: first char of first argument - // rbx: result string - // rcx: first character of result - // rdx: second string - // rdi: length of first argument - StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, true); - // Locate first character of second argument. - __ SmiToInteger32(rdi, FieldOperand(rdx, String::kLengthOffset)); - __ addq(rdx, Immediate(SeqAsciiString::kHeaderSize - kHeapObjectTag)); - // rbx: result string - // rcx: next character of result - // rdx: first char of second argument - // rdi: length of second argument - StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, true); - __ movq(rax, rbx); + __ lea(rbx, FieldOperand(rax, SeqAsciiString::kHeaderSize)); + // rcx: first char of first string + // rbx: first character of result + // r14: length of first string + StringHelper::GenerateCopyCharacters(masm, rbx, rcx, r14, true); + // rbx: next character of result + // rdx: first char of second string + // r15: length of second string + StringHelper::GenerateCopyCharacters(masm, rbx, rdx, r15, true); __ IncrementCounter(counters->string_add_native(), 1); __ ret(2 * kPointerSize); - // Handle creating a flat two byte result. - // rax: first string - known to be two byte - // rbx: length of resulting flat string - // rdx: second string - // r8: instance type of first string - // r9: instance type of first string __ bind(&non_ascii_string_add_flat_result); - STATIC_ASSERT((kStringEncodingMask & kAsciiStringTag) != 0); - STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); - __ and_(r9, Immediate(kStringEncodingMask)); - __ j(not_zero, &string_add_runtime); - // Both strings are two byte strings. As they are short they are both - // flat. - __ AllocateTwoByteString(rcx, rbx, rdi, r14, r11, &string_add_runtime); - // rcx: result string - __ movq(rbx, rcx); + // Both strings are ascii strings. As they are short they are both flat. + __ AllocateTwoByteString(rax, rbx, rdi, r8, r9, &call_runtime); + // rax: result string // Locate first character of result. - __ addq(rcx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - // Locate first character of first argument. - __ SmiToInteger32(rdi, FieldOperand(rax, String::kLengthOffset)); - __ addq(rax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - // rax: first char of first argument - // rbx: result string - // rcx: first character of result - // rdx: second argument - // rdi: length of first argument - StringHelper::GenerateCopyCharacters(masm, rcx, rax, rdi, false); - // Locate first character of second argument. - __ SmiToInteger32(rdi, FieldOperand(rdx, String::kLengthOffset)); - __ addq(rdx, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); - // rbx: result string - // rcx: next character of result - // rdx: first char of second argument - // rdi: length of second argument - StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, false); - __ movq(rax, rbx); + __ lea(rbx, FieldOperand(rax, SeqTwoByteString::kHeaderSize)); + // rcx: first char of first string + // rbx: first character of result + // r14: length of first string + StringHelper::GenerateCopyCharacters(masm, rbx, rcx, r14, false); + // rbx: next character of result + // rdx: first char of second string + // r15: length of second string + StringHelper::GenerateCopyCharacters(masm, rbx, rdx, r15, false); __ IncrementCounter(counters->string_add_native(), 1); __ ret(2 * kPointerSize); // Just jump to runtime to add the two strings. - __ bind(&string_add_runtime); + __ bind(&call_runtime); __ TailCallRuntime(Runtime::kStringAdd, 2, 1); if (call_builtin.is_linked()) { @@ -5040,8 +5042,12 @@ void SubStringStub::Generate(MacroAssembler* masm) { __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen. __ cmpq(FieldOperand(rax, String::kLengthOffset), rcx); - Label return_rax; - __ j(equal, &return_rax); + Label not_original_string; + __ j(not_equal, ¬_original_string, Label::kNear); + Counters* counters = masm->isolate()->counters(); + __ IncrementCounter(counters->sub_string_native(), 1); + __ ret(kArgumentsSize); + __ bind(¬_original_string); // Special handling of sub-strings of length 1 and 2. One character strings // are handled in the runtime system (looked up in the single character // cache). Two character strings are looked for in the symbol cache. @@ -5060,68 +5066,77 @@ void SubStringStub::Generate(MacroAssembler* masm) { // Get the two characters forming the sub string. __ SmiToInteger32(rdx, rdx); // From index is no longer smi. __ movzxbq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize)); - __ movzxbq(rcx, + __ movzxbq(rdi, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize + 1)); // Try to lookup two character string in symbol table. Label make_two_character_string; StringHelper::GenerateTwoCharacterSymbolTableProbe( - masm, rbx, rcx, rax, rdx, rdi, r14, &make_two_character_string); + masm, rbx, rdi, r9, r11, r14, r15, &make_two_character_string); + __ IncrementCounter(counters->sub_string_native(), 1); __ ret(3 * kPointerSize); __ bind(&make_two_character_string); // Setup registers for allocating the two character string. - __ movq(rax, Operand(rsp, kStringOffset)); - __ movq(rbx, FieldOperand(rax, HeapObject::kMapOffset)); + __ movzxwq(rbx, FieldOperand(rax, rdx, times_1, SeqAsciiString::kHeaderSize)); + __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime); + __ movw(FieldOperand(rax, SeqAsciiString::kHeaderSize), rbx); + __ IncrementCounter(counters->sub_string_native(), 1); + __ ret(3 * kPointerSize); + + __ bind(&result_longer_than_two); + // rax: string + // rbx: instance type + // rcx: sub string length + // rdx: from index (smi) + // Deal with different string types: update the index if necessary + // and put the underlying string into edi. + Label underlying_unpacked, sliced_string, seq_or_external_string; + // If the string is not indirect, it can only be sequential or external. + STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag)); + STATIC_ASSERT(kIsIndirectStringMask != 0); + __ testb(rbx, Immediate(kIsIndirectStringMask)); + __ j(zero, &seq_or_external_string, Label::kNear); + + __ testb(rbx, Immediate(kSlicedNotConsMask)); + __ j(not_zero, &sliced_string, Label::kNear); + // Cons string. Check whether it is flat, then fetch first part. + // Flat cons strings have an empty second part. + __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset), + Heap::kEmptyStringRootIndex); + __ j(not_equal, &runtime); + __ movq(rdi, FieldOperand(rax, ConsString::kFirstOffset)); + // Update instance type. + __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); - __ Set(rcx, 2); + __ jmp(&underlying_unpacked, Label::kNear); + + __ bind(&sliced_string); + // Sliced string. Fetch parent and correct start index by offset. + __ addq(rdx, FieldOperand(rax, SlicedString::kOffsetOffset)); + __ movq(rdi, FieldOperand(rax, SlicedString::kParentOffset)); + // Update instance type. + __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset)); + __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset)); + __ jmp(&underlying_unpacked, Label::kNear); + + __ bind(&seq_or_external_string); + // Sequential or external string. Just move string to the correct register. + __ movq(rdi, rax); + + __ bind(&underlying_unpacked); if (FLAG_string_slices) { Label copy_routine; + // rdi: underlying subject string + // rbx: instance type of underlying subject string + // rdx: adjusted start index (smi) + // rcx: length // If coming from the make_two_character_string path, the string // is too short to be sliced anyways. - STATIC_ASSERT(2 < SlicedString::kMinLength); - __ jmp(©_routine); - __ bind(&result_longer_than_two); - - // rax: string - // rbx: instance type - // rcx: sub string length - // rdx: from index (smi) - Label allocate_slice, sliced_string, seq_or_external_string; __ cmpq(rcx, Immediate(SlicedString::kMinLength)); // Short slice. Copy instead of slicing. __ j(less, ©_routine); - // If the string is not indirect, it can only be sequential or external. - STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag)); - STATIC_ASSERT(kIsIndirectStringMask != 0); - __ testb(rbx, Immediate(kIsIndirectStringMask)); - __ j(zero, &seq_or_external_string, Label::kNear); - - __ testb(rbx, Immediate(kSlicedNotConsMask)); - __ j(not_zero, &sliced_string, Label::kNear); - // Cons string. Check whether it is flat, then fetch first part. - __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset), - Heap::kEmptyStringRootIndex); - __ j(not_equal, &runtime); - __ movq(rdi, FieldOperand(rax, ConsString::kFirstOffset)); - __ jmp(&allocate_slice, Label::kNear); - - __ bind(&sliced_string); - // Sliced string. Fetch parent and correct start index by offset. - __ addq(rdx, FieldOperand(rax, SlicedString::kOffsetOffset)); - __ movq(rdi, FieldOperand(rax, SlicedString::kParentOffset)); - __ jmp(&allocate_slice, Label::kNear); - - __ bind(&seq_or_external_string); - // Sequential or external string. Just move string to the correct register. - __ movq(rdi, rax); - - __ bind(&allocate_slice); - // edi: underlying subject string - // ebx: instance type of original subject string - // edx: offset - // ecx: length // Allocate new sliced string. At this point we do not reload the instance // type including the string encoding because we simply rely on the info // provided by the original string. It does not matter if the original @@ -5132,10 +5147,10 @@ void SubStringStub::Generate(MacroAssembler* masm) { STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0); __ testb(rbx, Immediate(kStringEncodingMask)); __ j(zero, &two_byte_slice, Label::kNear); - __ AllocateAsciiSlicedString(rax, rbx, no_reg, &runtime); + __ AllocateAsciiSlicedString(rax, rbx, r14, &runtime); __ jmp(&set_slice_header, Label::kNear); __ bind(&two_byte_slice); - __ AllocateTwoByteSlicedString(rax, rbx, no_reg, &runtime); + __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime); __ bind(&set_slice_header); __ movq(FieldOperand(rax, SlicedString::kOffsetOffset), rdx); __ Integer32ToSmi(rcx, rcx); @@ -5143,82 +5158,85 @@ void SubStringStub::Generate(MacroAssembler* masm) { __ movq(FieldOperand(rax, SlicedString::kParentOffset), rdi); __ movq(FieldOperand(rax, SlicedString::kHashFieldOffset), Immediate(String::kEmptyHashField)); - __ jmp(&return_rax); + __ IncrementCounter(counters->sub_string_native(), 1); + __ ret(kArgumentsSize); __ bind(©_routine); - } else { - __ bind(&result_longer_than_two); } - // rax: string - // rbx: instance type - // rcx: result string length - // Check for flat ascii string - Label non_ascii_flat; - __ JumpIfInstanceTypeIsNotSequentialAscii(rbx, rbx, &non_ascii_flat); + // rdi: underlying subject string + // rbx: instance type of underlying subject string + // rdx: adjusted start index (smi) + // rcx: length + // The subject string can only be external or sequential string of either + // encoding at this point. + Label two_byte_sequential, sequential_string; + STATIC_ASSERT(kExternalStringTag != 0); + STATIC_ASSERT(kSeqStringTag == 0); + __ testb(rbx, Immediate(kExternalStringTag)); + __ j(zero, &sequential_string); + + // Handle external string. + // Rule out short external strings. + STATIC_CHECK(kShortExternalStringTag != 0); + __ testb(rbx, Immediate(kShortExternalStringMask)); + __ j(not_zero, &runtime); + __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset)); + // Move the pointer so that offset-wise, it looks like a sequential string. + STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqAsciiString::kHeaderSize); + __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag)); + + __ bind(&sequential_string); + STATIC_ASSERT((kAsciiStringTag & kStringEncodingMask) != 0); + __ testb(rbx, Immediate(kStringEncodingMask)); + __ j(zero, &two_byte_sequential); // Allocate the result. - __ AllocateAsciiString(rax, rcx, rbx, rdx, rdi, &runtime); + __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime); // rax: result string // rcx: result string length - __ movq(rdx, rsi); // esi used by following code. - // Locate first character of result. - __ lea(rdi, FieldOperand(rax, SeqAsciiString::kHeaderSize)); - // Load string argument and locate character of sub string start. - __ movq(rsi, Operand(rsp, kStringOffset)); - __ movq(rbx, Operand(rsp, kFromOffset)); - { - SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_1); - __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale, + __ movq(r14, rsi); // esi used by following code. + { // Locate character of sub string start. + SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1); + __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale, SeqAsciiString::kHeaderSize - kHeapObjectTag)); } + // Locate first character of result. + __ lea(rdi, FieldOperand(rax, SeqAsciiString::kHeaderSize)); // rax: result string // rcx: result length - // rdx: original value of rsi // rdi: first character of result // rsi: character of sub string start + // r14: original value of rsi StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true); - __ movq(rsi, rdx); // Restore rsi. - Counters* counters = masm->isolate()->counters(); + __ movq(rsi, r14); // Restore rsi. __ IncrementCounter(counters->sub_string_native(), 1); __ ret(kArgumentsSize); - __ bind(&non_ascii_flat); - // rax: string - // rbx: instance type & kStringRepresentationMask | kStringEncodingMask - // rcx: result string length - // Check for sequential two byte string - __ cmpb(rbx, Immediate(kSeqStringTag | kTwoByteStringTag)); - __ j(not_equal, &runtime); - + __ bind(&two_byte_sequential); // Allocate the result. - __ AllocateTwoByteString(rax, rcx, rbx, rdx, rdi, &runtime); + __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime); // rax: result string // rcx: result string length - __ movq(rdx, rsi); // esi used by following code. - // Locate first character of result. - __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize)); - // Load string argument and locate character of sub string start. - __ movq(rsi, Operand(rsp, kStringOffset)); - __ movq(rbx, Operand(rsp, kFromOffset)); - { - SmiIndex smi_as_index = masm->SmiToIndex(rbx, rbx, times_2); - __ lea(rsi, Operand(rsi, smi_as_index.reg, smi_as_index.scale, + __ movq(r14, rsi); // esi used by following code. + { // Locate character of sub string start. + SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2); + __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale, SeqAsciiString::kHeaderSize - kHeapObjectTag)); } + // Locate first character of result. + __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize)); // rax: result string // rcx: result length - // rdx: original value of rsi // rdi: first character of result // rsi: character of sub string start + // r14: original value of rsi StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false); - __ movq(rsi, rdx); // Restore esi. - - __ bind(&return_rax); + __ movq(rsi, r14); // Restore esi. __ IncrementCounter(counters->sub_string_native(), 1); __ ret(kArgumentsSize); diff --git a/deps/v8/src/x64/ic-x64.cc b/deps/v8/src/x64/ic-x64.cc index b3a94227af..020446008f 100644 --- a/deps/v8/src/x64/ic-x64.cc +++ b/deps/v8/src/x64/ic-x64.cc @@ -1397,11 +1397,10 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) { // -- rsp[0] : return address // ----------------------------------- // - // This accepts as a receiver anything JSObject::SetElementsLength accepts - // (currently anything except for external and pixel arrays which means - // anything with elements of FixedArray type.), but currently is restricted - // to JSArray. - // Value must be a number, but only smis are accepted as the most common case. + // This accepts as a receiver anything JSArray::SetElementsLength accepts + // (currently anything except for external arrays which means anything with + // elements of FixedArray type). Value must be a number, but only smis are + // accepted as the most common case. Label miss; @@ -1423,6 +1422,13 @@ void StoreIC::GenerateArrayLength(MacroAssembler* masm) { __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch); __ j(not_equal, &miss); + // Check that the array has fast properties, otherwise the length + // property might have been redefined. + __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset)); + __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset), + Heap::kHashTableMapRootIndex); + __ j(equal, &miss); + // Check that value is a smi. __ JumpIfNotSmi(value, &miss); diff --git a/deps/v8/src/x64/lithium-codegen-x64.cc b/deps/v8/src/x64/lithium-codegen-x64.cc index 293a1db615..a96d140d49 100644 --- a/deps/v8/src/x64/lithium-codegen-x64.cc +++ b/deps/v8/src/x64/lithium-codegen-x64.cc @@ -2069,7 +2069,14 @@ void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { __ movq(result, ContextOperand(context, instr->slot_index())); if (instr->hydrogen()->RequiresHoleCheck()) { __ CompareRoot(result, Heap::kTheHoleValueRootIndex); - DeoptimizeIf(equal, instr->environment()); + if (instr->hydrogen()->DeoptimizesOnHole()) { + DeoptimizeIf(equal, instr->environment()); + } else { + Label is_not_hole; + __ j(not_equal, &is_not_hole, Label::kNear); + __ LoadRoot(result, Heap::kUndefinedValueRootIndex); + __ bind(&is_not_hole); + } } } @@ -2077,12 +2084,20 @@ void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) { void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { Register context = ToRegister(instr->context()); Register value = ToRegister(instr->value()); + Operand target = ContextOperand(context, instr->slot_index()); + + Label skip_assignment; if (instr->hydrogen()->RequiresHoleCheck()) { __ CompareRoot(target, Heap::kTheHoleValueRootIndex); - DeoptimizeIf(equal, instr->environment()); + if (instr->hydrogen()->DeoptimizesOnHole()) { + DeoptimizeIf(equal, instr->environment()); + } else { + __ j(not_equal, &skip_assignment); + } } __ movq(target, value); + if (instr->hydrogen()->NeedsWriteBarrier()) { HType type = instr->hydrogen()->value()->type(); SmiCheck check_needed = @@ -2097,6 +2112,8 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { EMIT_REMEMBERED_SET, check_needed); } + + __ bind(&skip_assignment); } diff --git a/deps/v8/test/cctest/cctest.status b/deps/v8/test/cctest/cctest.status index 2de0afba17..6d36aa6740 100644 --- a/deps/v8/test/cctest/cctest.status +++ b/deps/v8/test/cctest/cctest.status @@ -52,6 +52,10 @@ test-profile-generator/RecordStackTraceAtStartProfiling: PASS || FAIL # We do not yet shrink weak maps after they have been emptied by the GC test-weakmaps/Shrinking: FAIL +# TODO(1823): Fails without snapshot. Temporarily disabled until fixed. +test-heap/LeakGlobalContextViaMap: SKIP +test-heap/LeakGlobalContextViaFunction: SKIP + ############################################################################## [ $arch == arm ] diff --git a/deps/v8/test/cctest/test-hashing.cc b/deps/v8/test/cctest/test-hashing.cc index 9c342a9efe..df1ab20013 100644 --- a/deps/v8/test/cctest/test-hashing.cc +++ b/deps/v8/test/cctest/test-hashing.cc @@ -106,6 +106,7 @@ void generate(MacroAssembler* assm, i::Vector string) { } StringHelper::GenerateHashGetHash(assm, v0); __ jr(ra); + __ nop(); #endif } @@ -125,7 +126,7 @@ void check(i::Vector string) { Handle(HEAP->undefined_value()))->ToObjectChecked()); CHECK(code->IsCode()); - HASH_FUNCTION hash = FUNCTION_CAST(Code::cast(code)->entry()); + HASH_FUNCTION hash = FUNCTION_CAST(code->entry()); Handle v8_string = FACTORY->NewStringFromAscii(string); v8_string->set_hash_field(String::kEmptyHashField); #ifdef USE_SIMULATOR diff --git a/deps/v8/test/cctest/test-heap.cc b/deps/v8/test/cctest/test-heap.cc index 014eefb5be..0e09ee38e1 100644 --- a/deps/v8/test/cctest/test-heap.cc +++ b/deps/v8/test/cctest/test-heap.cc @@ -1318,3 +1318,83 @@ TEST(IdleNotificationAdvancesIncrementalMarking) { intptr_t new_size = HEAP->SizeOfObjects(); CHECK(no_idle_work || new_size < old_size); } + + +static int NumberOfGlobalObjects() { + int count = 0; + HeapIterator iterator; + for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) { + if (obj->IsGlobalObject()) count++; + } + return count; +} + + +// Test that we don't embed maps from foreign contexts into +// optimized code. +TEST(LeakGlobalContextViaMap) { + v8::HandleScope outer_scope; + v8::Persistent ctx1 = v8::Context::New(); + v8::Persistent ctx2 = v8::Context::New(); + ctx1->Enter(); + + HEAP->CollectAllAvailableGarbage(); + CHECK_EQ(4, NumberOfGlobalObjects()); + + { + v8::HandleScope inner_scope; + CompileRun("var v = {x: 42}"); + v8::Local v = ctx1->Global()->Get(v8_str("v")); + ctx2->Enter(); + ctx2->Global()->Set(v8_str("o"), v); + v8::Local res = CompileRun( + "function f() { return o.x; }" + "for (var i = 0; i < 1000000; ++i) f();" + "f();"); + CHECK_EQ(42, res->Int32Value()); + ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0)); + ctx2->Exit(); + ctx1->Exit(); + ctx1.Dispose(); + } + HEAP->CollectAllAvailableGarbage(); + CHECK_EQ(2, NumberOfGlobalObjects()); + ctx2.Dispose(); + HEAP->CollectAllAvailableGarbage(); + CHECK_EQ(0, NumberOfGlobalObjects()); +} + + +// Test that we don't embed functions from foreign contexts into +// optimized code. +TEST(LeakGlobalContextViaFunction) { + v8::HandleScope outer_scope; + v8::Persistent ctx1 = v8::Context::New(); + v8::Persistent ctx2 = v8::Context::New(); + ctx1->Enter(); + + HEAP->CollectAllAvailableGarbage(); + CHECK_EQ(4, NumberOfGlobalObjects()); + + { + v8::HandleScope inner_scope; + CompileRun("var v = function() { return 42; }"); + v8::Local v = ctx1->Global()->Get(v8_str("v")); + ctx2->Enter(); + ctx2->Global()->Set(v8_str("o"), v); + v8::Local res = CompileRun( + "function f(x) { return x(); }" + "for (var i = 0; i < 1000000; ++i) f(o);" + "f(o);"); + CHECK_EQ(42, res->Int32Value()); + ctx2->Global()->Set(v8_str("o"), v8::Int32::New(0)); + ctx2->Exit(); + ctx1->Exit(); + ctx1.Dispose(); + } + HEAP->CollectAllAvailableGarbage(); + CHECK_EQ(2, NumberOfGlobalObjects()); + ctx2.Dispose(); + HEAP->CollectAllAvailableGarbage(); + CHECK_EQ(0, NumberOfGlobalObjects()); +} diff --git a/deps/v8/test/cctest/test-regexp.cc b/deps/v8/test/cctest/test-regexp.cc index b778478833..3070e16446 100644 --- a/deps/v8/test/cctest/test-regexp.cc +++ b/deps/v8/test/cctest/test-regexp.cc @@ -836,7 +836,8 @@ TEST(MacroAssemblerNativeSimpleUC16) { Handle code = Handle::cast(code_object); int captures[4] = {42, 37, 87, 117}; - const uc16 input_data[6] = {'f', 'o', 'o', 'f', 'o', '\xa0'}; + const uc16 input_data[6] = {'f', 'o', 'o', 'f', 'o', + static_cast('\xa0')}; Handle input = factory->NewStringFromTwoByte(Vector(input_data, 6)); Handle seq_input = Handle::cast(input); @@ -856,7 +857,8 @@ TEST(MacroAssemblerNativeSimpleUC16) { CHECK_EQ(-1, captures[2]); CHECK_EQ(-1, captures[3]); - const uc16 input_data2[9] = {'b', 'a', 'r', 'b', 'a', 'r', 'b', 'a', '\xa0'}; + const uc16 input_data2[9] = {'b', 'a', 'r', 'b', 'a', 'r', 'b', 'a', + static_cast('\xa0')}; input = factory->NewStringFromTwoByte(Vector(input_data2, 9)); seq_input = Handle::cast(input); start_adr = seq_input->GetCharsAddress(); diff --git a/deps/v8/test/mjsunit/array-literal-transitions.js b/deps/v8/test/mjsunit/array-literal-transitions.js index 4ddf2cb63d..f657525eb6 100644 --- a/deps/v8/test/mjsunit/array-literal-transitions.js +++ b/deps/v8/test/mjsunit/array-literal-transitions.js @@ -144,9 +144,9 @@ if (support_smi_only_arrays) { var array = deopt_array(false); assertTrue(2 != %GetOptimizationStatus(deopt_array)); deopt_array(true); - assertTrue(1 != %GetOptimizationStatus(deopt_array)); + assertTrue(2 != %GetOptimizationStatus(deopt_array)); array = deopt_array(false); - assertTrue(1 != %GetOptimizationStatus(deopt_array)); + assertTrue(2 != %GetOptimizationStatus(deopt_array)); // Check that unexpected changes in the objects stored into the boilerplate // also force a deopt. @@ -201,3 +201,10 @@ if (support_smi_only_arrays) { assertEquals(1, array[1]); assertEquals(foo, array[2]); } + +(function literals_after_osr() { + var color = [0]; + // Trigger OSR. + while (%GetOptimizationStatus(literals_after_osr) == 2) {} + return [color[0]]; +})(); diff --git a/deps/v8/test/mjsunit/d8-os.js b/deps/v8/test/mjsunit/d8-os.js index 5640326856..8292ab9261 100644 --- a/deps/v8/test/mjsunit/d8-os.js +++ b/deps/v8/test/mjsunit/d8-os.js @@ -54,6 +54,8 @@ function str_error(str) { if (this.os && os.system) { + // Ensure that we have a valid working directory. + os.chdir("/tmp"); try { // Delete the dir if it is lying around from last time. os.system("ls", [TEST_DIR]); @@ -143,42 +145,43 @@ if (this.os && os.system) { assertEquals("baz\n", os.system("echo", ["baz"])); //} } + + // Too few args. + arg_error("os.umask();"); + arg_error("os.system();"); + arg_error("os.mkdirp();"); + arg_error("os.chdir();"); + arg_error("os.setenv();"); + arg_error("os.rmdir();"); + + // Too many args. + arg_error("os.setenv('FOO=bar');"); + arg_error("os.umask(0, 0);"); + arg_error("os.system('ls', [], -1, -1, -1);"); + arg_error("os.mkdirp('foo', 0, 0)"); + arg_error("os.chdir('foo', 'bar')"); + arg_error("os.rmdir('foo', 'bar');"); + + // Wrong kind of args. + arg_error("os.umask([]);"); + arg_error("os.system('ls', 'foo');"); + arg_error("os.system('ls', 123);"); + arg_error("os.system('ls', [], 'foo');"); + arg_error("os.system('ls', [], -1, 'foo');"); + arg_error("os.mkdirp('foo', 'bar');"); + + // Test broken toString(). + str_error("os.system(e);"); + str_error("os.system('ls', [e]);"); + str_error("os.system('ls', ['.', e]);"); + str_error("os.system('ls', [e, '.']);"); + str_error("os.mkdirp(e);"); + str_error("os.setenv(e, 'goo');"); + str_error("os.setenv('goo', e);"); + str_error("os.chdir(e);"); + str_error("os.rmdir(e);"); + } finally { os.system("rm", ["-r", TEST_DIR]); } - - // Too few args. - arg_error("os.umask();"); - arg_error("os.system();"); - arg_error("os.mkdirp();"); - arg_error("os.chdir();"); - arg_error("os.setenv();"); - arg_error("os.rmdir();"); - - // Too many args. - arg_error("os.setenv('FOO=bar');"); - arg_error("os.umask(0, 0);"); - arg_error("os.system('ls', [], -1, -1, -1);"); - arg_error("os.mkdirp('foo', 0, 0)"); - arg_error("os.chdir('foo', 'bar')"); - arg_error("os.rmdir('foo', 'bar');"); - - // Wrong kind of args. - arg_error("os.umask([]);"); - arg_error("os.system('ls', 'foo');"); - arg_error("os.system('ls', 123);"); - arg_error("os.system('ls', [], 'foo');"); - arg_error("os.system('ls', [], -1, 'foo');"); - arg_error("os.mkdirp('foo', 'bar');"); - - // Test broken toString(). - str_error("os.system(e);"); - str_error("os.system('ls', [e]);"); - str_error("os.system('ls', ['.', e]);"); - str_error("os.system('ls', [e, '.']);"); - str_error("os.mkdirp(e);"); - str_error("os.setenv(e, 'goo');"); - str_error("os.setenv('goo', e);"); - str_error("os.chdir(e);"); - str_error("os.rmdir(e);"); } diff --git a/deps/v8/test/mjsunit/function-named-self-reference.js b/deps/v8/test/mjsunit/function-named-self-reference.js new file mode 100644 index 0000000000..5b03b094b7 --- /dev/null +++ b/deps/v8/test/mjsunit/function-named-self-reference.js @@ -0,0 +1,45 @@ +// Copyright 2011 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// Flags: --allow-natives-syntax + +var fn = function fn(val) { + if (val) return val; + + %OptimizeFunctionOnNextCall(fn); + + function run(val) { + var res = fn((val + 1) << 1); + + return res; + } + + return run(0); +} + +var res = fn(); +assertEquals(res, 2); diff --git a/deps/v8/test/mjsunit/regress/regress-crbug-100859.js b/deps/v8/test/mjsunit/regress/regress-crbug-100859.js new file mode 100644 index 0000000000..6824426271 --- /dev/null +++ b/deps/v8/test/mjsunit/regress/regress-crbug-100859.js @@ -0,0 +1,39 @@ +// Copyright 2011 the V8 project authors. All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following +// disclaimer in the documentation and/or other materials provided +// with the distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived +// from this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// This used to trigger a crash because of an unhandled stack overflow. +function setx() { + setx(typeof new Uint16Array('x') === 'object'); +} +var exception = false; +try { + setx(); +} catch (ex) { + assertTrue(ex instanceof RangeError); + exception = true; +} +assertTrue(exception); diff --git a/deps/v8/test/mjsunit/string-external-cached.js b/deps/v8/test/mjsunit/string-external-cached.js index 0a95830d0b..6e24285331 100644 --- a/deps/v8/test/mjsunit/string-external-cached.js +++ b/deps/v8/test/mjsunit/string-external-cached.js @@ -86,6 +86,29 @@ function test() { assertEquals("DEFG", ascii_cons.substr(3, 4)); assertEquals("DEFG", twobyte_cons.substr(4, 4)); } + + // Test adding external strings + var short_ascii = "E="; + var long_ascii = "MCsquared"; + var short_twobyte = "E\u1234"; + var long_twobyte = "MCsquare\u1234"; + try { // String can only be externalized once + externalizeString(short_ascii, false); + externalizeString(long_ascii, false); + externalizeString(short_twobyte, true); + externalizeString(long_twobyte, true); + assertTrue(isAsciiString(short_asii) && isAsciiString(long_ascii)); + assertFalse(isAsciiString(short_twobyte) || isAsciiString(long_twobyte)); + } catch (ex) { } + assertEquals("E=MCsquared", short_ascii + long_ascii); + assertTrue(isAsciiString(short_ascii + long_ascii)); + assertEquals("MCsquaredE=", long_ascii + short_ascii); + assertEquals("E\u1234MCsquare\u1234", short_twobyte + long_twobyte); + assertFalse(isAsciiString(short_twobyte + long_twobyte)); + assertEquals("E=MCsquared", "E=" + long_ascii); + assertEquals("E\u1234MCsquared", short_twobyte + "MCsquared"); + assertEquals("E\u1234MCsquared", short_twobyte + long_ascii); + assertFalse(isAsciiString(short_twobyte + long_ascii)); } // Run the test many times to ensure IC-s don't break things. diff --git a/deps/v8/test/test262/test262.status b/deps/v8/test/test262/test262.status index c7d363d0db..5fa0ba8518 100644 --- a/deps/v8/test/test262/test262.status +++ b/deps/v8/test/test262/test262.status @@ -42,13 +42,11 @@ S10.4.2.1_A1: FAIL # V8 Bug: http://code.google.com/p/v8/issues/detail?id=1530 S15.3.3.1_A4: FAIL -# V8 Bug: http://code.google.com/p/v8/issues/detail?id=1756 -15.2.3.6-4-167: FAIL || PASS -15.2.3.6-4-181: FAIL || PASS -15.2.3.7-6-a-163: FAIL || PASS -15.2.3.7-6-a-164: FAIL || PASS -15.2.3.7-6-a-176: FAIL || PASS -15.2.3.7-6-a-177: FAIL || PASS +# V8 Bug: http://code.google.com/p/v8/issues/detail?id=1475 +15.2.3.6-4-405: FAIL +15.2.3.6-4-410: FAIL +15.2.3.6-4-415: FAIL +15.2.3.6-4-420: FAIL # V8 Bug: http://code.google.com/p/v8/issues/detail?id=1772 15.2.3.6-4-292-1: FAIL @@ -207,37 +205,6 @@ S15.1.1.3_A2_T2: FAIL_OK # undefined S15.4.4.2_A2_T1: FAIL_OK S15.4.4.3_A2_T1: FAIL_OK -######################### UNANALYZED FAILURES ########################## - -# Bug? ES5 Attributes - Updating indexed data property 'P' whose attributes are -# [[Writable]]: false, [[Enumerable]]: true, [[Configurable]]: true to an -# accessor property, 'A' is an Array object (8.12.9 - step 9.b.i) -15.2.3.6-4-360-1: FAIL -# Bug? ES5 Attributes - Updating indexed data property 'P' whose attributes are -# [[Writable]]: false, [[Enumerable]]: true, [[Configurable]]: true to an -# accessor property, 'O' is an Arguments object (8.12.9 - step 9.b.i) -15.2.3.6-4-360-6: FAIL -# Bug? ES5 Attributes - Updating indexed data property 'P' whose attributes are -# [[Writable]]: false, [[Enumerable]]: true, [[Configurable]]: true to an -# accessor property, 'O' is the global object (8.12.9 - step 9.b.i) -15.2.3.6-4-360-7: FAIL -# Bug? ES5 Attributes - Failed to add a property to an object when the object's -# object has a property with same name and [[Writable]] attribute is set to -# false (Number instance) -15.2.3.6-4-405: FAIL -# Bug? ES5 Attributes - Failed to add a property to an object when the object's -# prototype has a property with the same name and [[Writable]] set to false -# (JSON) -15.2.3.6-4-410: FAIL -# Bug? ES5 Attributes - Failed to add properties to an object when the object's -# prototype has properties with the same name and [[Writable]] set to false -# (Object.create) -15.2.3.6-4-415: FAIL -# Bug? ES5 Attributes - Failed to add a property to an object when the object's -# prototype has a property with the same name and [[Writable]] set to -# false(Function.prototype.bind) -15.2.3.6-4-420: FAIL - ############################ SKIPPED TESTS ############################# # These tests take a looong time to run in debug mode. diff --git a/deps/v8/test/test262/testcfg.py b/deps/v8/test/test262/testcfg.py index 52127cdd2f..aefda196a3 100644 --- a/deps/v8/test/test262/testcfg.py +++ b/deps/v8/test/test262/testcfg.py @@ -55,7 +55,6 @@ class Test262TestCase(test.TestCase): def GetCommand(self): result = self.context.GetVmCommand(self, self.mode) - result += ['-e', 'var window = this'] result += self.framework result.append(self.filename) return result diff --git a/deps/v8/tools/grokdump.py b/deps/v8/tools/grokdump.py index 6bc49c68a8..0c7e125e4b 100755 --- a/deps/v8/tools/grokdump.py +++ b/deps/v8/tools/grokdump.py @@ -52,6 +52,7 @@ Examples: $ %prog 12345678-1234-1234-1234-123456789abcd-full.dmp """ + DEBUG=False @@ -233,6 +234,80 @@ MINIDUMP_CONTEXT_X86 = Descriptor([ MD_CONTEXT_X86_EXTENDED_REGISTERS)) ]) +MD_CONTEXT_AMD64 = 0x00100000 +MD_CONTEXT_AMD64_CONTROL = (MD_CONTEXT_AMD64 | 0x00000001) +MD_CONTEXT_AMD64_INTEGER = (MD_CONTEXT_AMD64 | 0x00000002) +MD_CONTEXT_AMD64_SEGMENTS = (MD_CONTEXT_AMD64 | 0x00000004) +MD_CONTEXT_AMD64_FLOATING_POINT = (MD_CONTEXT_AMD64 | 0x00000008) +MD_CONTEXT_AMD64_DEBUG_REGISTERS = (MD_CONTEXT_AMD64 | 0x00000010) + +MINIDUMP_CONTEXT_AMD64 = Descriptor([ + ("p1_home", ctypes.c_uint64), + ("p2_home", ctypes.c_uint64), + ("p3_home", ctypes.c_uint64), + ("p4_home", ctypes.c_uint64), + ("p5_home", ctypes.c_uint64), + ("p6_home", ctypes.c_uint64), + ("context_flags", ctypes.c_uint32), + ("mx_csr", ctypes.c_uint32), + # MD_CONTEXT_AMD64_CONTROL. + ("cs", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_CONTROL)), + # MD_CONTEXT_AMD64_SEGMENTS + ("ds", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)), + ("es", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)), + ("fs", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)), + ("gs", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_SEGMENTS)), + # MD_CONTEXT_AMD64_CONTROL. + ("ss", EnableOnFlag(ctypes.c_uint16, MD_CONTEXT_AMD64_CONTROL)), + ("eflags", EnableOnFlag(ctypes.c_uint32, MD_CONTEXT_AMD64_CONTROL)), + # MD_CONTEXT_AMD64_DEBUG_REGISTERS. + ("dr0", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)), + ("dr1", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)), + ("dr2", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)), + ("dr3", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)), + ("dr6", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)), + ("dr7", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_DEBUG_REGISTERS)), + # MD_CONTEXT_AMD64_INTEGER. + ("rax", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + ("rcx", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + ("rdx", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + ("rbx", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + # MD_CONTEXT_AMD64_CONTROL. + ("rsp", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_CONTROL)), + # MD_CONTEXT_AMD64_INTEGER. + ("rbp", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + ("rsi", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + ("rdi", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + ("r8", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + ("r9", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + ("r10", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + ("r11", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + ("r12", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + ("r13", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + ("r14", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + ("r15", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_INTEGER)), + # MD_CONTEXT_AMD64_CONTROL. + ("rip", EnableOnFlag(ctypes.c_uint64, MD_CONTEXT_AMD64_CONTROL)), + # MD_CONTEXT_AMD64_FLOATING_POINT + ("sse_registers", EnableOnFlag(ctypes.c_uint8 * (16 * 26), + MD_CONTEXT_AMD64_FLOATING_POINT)), + ("vector_registers", EnableOnFlag(ctypes.c_uint8 * (16 * 26), + MD_CONTEXT_AMD64_FLOATING_POINT)), + ("vector_control", EnableOnFlag(ctypes.c_uint64, + MD_CONTEXT_AMD64_FLOATING_POINT)), + # MD_CONTEXT_AMD64_DEBUG_REGISTERS. + ("debug_control", EnableOnFlag(ctypes.c_uint64, + MD_CONTEXT_AMD64_DEBUG_REGISTERS)), + ("last_branch_to_rip", EnableOnFlag(ctypes.c_uint64, + MD_CONTEXT_AMD64_DEBUG_REGISTERS)), + ("last_branch_from_rip", EnableOnFlag(ctypes.c_uint64, + MD_CONTEXT_AMD64_DEBUG_REGISTERS)), + ("last_exception_to_rip", EnableOnFlag(ctypes.c_uint64, + MD_CONTEXT_AMD64_DEBUG_REGISTERS)), + ("last_exception_from_rip", EnableOnFlag(ctypes.c_uint64, + MD_CONTEXT_AMD64_DEBUG_REGISTERS)) +]) + MINIDUMP_MEMORY_DESCRIPTOR = Descriptor([ ("start", ctypes.c_uint64), ("memory", MINIDUMP_LOCATION_DESCRIPTOR.ctype) @@ -269,6 +344,12 @@ MINIDUMP_THREAD_LIST = Descriptor([ ("threads", lambda t: MINIDUMP_THREAD.ctype * t.thread_count) ]) +MINIDUMP_RAW_SYSTEM_INFO = Descriptor([ + ("processor_architecture", ctypes.c_uint16) +]) + +MD_CPU_ARCHITECTURE_X86 = 0 +MD_CPU_ARCHITECTURE_AMD64 = 9 class MinidumpReader(object): """Minidump (.dmp) reader.""" @@ -288,20 +369,34 @@ class MinidumpReader(object): for _ in xrange(self.header.stream_count): directories.append(MINIDUMP_DIRECTORY.Read(self.minidump, offset)) offset += MINIDUMP_DIRECTORY.size + self.arch = None self.exception = None self.exception_context = None self.memory_list = None self.memory_list64 = None self.thread_map = {} + + # Find MDRawSystemInfo stream and determine arch. + for d in directories: + if d.stream_type == MD_SYSTEM_INFO_STREAM: + system_info = MINIDUMP_RAW_SYSTEM_INFO.Read( + self.minidump, d.location.rva) + self.arch = system_info.processor_architecture + assert self.arch in [MD_CPU_ARCHITECTURE_AMD64, MD_CPU_ARCHITECTURE_X86] + assert not self.arch is None + for d in directories: DebugPrint(d) - # TODO(vitalyr): extract system info including CPU features. if d.stream_type == MD_EXCEPTION_STREAM: self.exception = MINIDUMP_EXCEPTION_STREAM.Read( self.minidump, d.location.rva) DebugPrint(self.exception) - self.exception_context = MINIDUMP_CONTEXT_X86.Read( - self.minidump, self.exception.thread_context.rva) + if self.arch == MD_CPU_ARCHITECTURE_X86: + self.exception_context = MINIDUMP_CONTEXT_X86.Read( + self.minidump, self.exception.thread_context.rva) + elif self.arch == MD_CPU_ARCHITECTURE_AMD64: + self.exception_context = MINIDUMP_CONTEXT_AMD64.Read( + self.minidump, self.exception.thread_context.rva) DebugPrint(self.exception_context) elif d.stream_type == MD_THREAD_LIST_STREAM: thread_list = MINIDUMP_THREAD_LIST.Read(self.minidump, d.location.rva) @@ -335,6 +430,16 @@ class MinidumpReader(object): location = self.FindLocation(address) return ctypes.c_uint32.from_buffer(self.minidump, location).value + def ReadU64(self, address): + location = self.FindLocation(address) + return ctypes.c_uint64.from_buffer(self.minidump, location).value + + def ReadUIntPtr(self, address): + if self.arch == MD_CPU_ARCHITECTURE_AMD64: + return self.ReadU64(address) + elif self.arch == MD_CPU_ARCHITECTURE_X86: + return self.ReadU32(address) + def ReadBytes(self, address, size): location = self.FindLocation(address) return self.minidump[location:location + size] @@ -355,10 +460,15 @@ class MinidumpReader(object): def GetDisasmLines(self, address, size): location = self.FindLocation(address) if location is None: return [] + arch = None + if self.arch == MD_CPU_ARCHITECTURE_X86: + arch = "ia32" + elif self.arch == MD_CPU_ARCHITECTURE_AMD64: + arch = "x64" return disasm.GetDisasmLines(self.minidump_name, location, size, - "ia32", + arch, False) @@ -366,6 +476,33 @@ class MinidumpReader(object): self.minidump.close() self.minidump_file.close() + def ExceptionIP(self): + if self.arch == MD_CPU_ARCHITECTURE_AMD64: + return self.exception_context.rip + elif self.arch == MD_CPU_ARCHITECTURE_X86: + return self.exception_context.eip + + def ExceptionSP(self): + if self.arch == MD_CPU_ARCHITECTURE_AMD64: + return self.exception_context.rsp + elif self.arch == MD_CPU_ARCHITECTURE_X86: + return self.exception_context.rbp + + def FormatIntPtr(self, value): + if self.arch == MD_CPU_ARCHITECTURE_AMD64: + return "%016x" % value + elif self.arch == MD_CPU_ARCHITECTURE_X86: + return "%08x" % value + + def PointerSize(self): + if self.arch == MD_CPU_ARCHITECTURE_AMD64: + return 8 + elif self.arch == MD_CPU_ARCHITECTURE_X86: + return 4 + + def Register(self, name): + return self.exception_context.__getattribute__(name) + # List of V8 instance types. Obtained by adding the code below to any .cc file. # @@ -501,34 +638,36 @@ class HeapObject(object): p.Print(str(self)) def __str__(self): - return "HeapObject(%08x, %s)" % (self.address, - INSTANCE_TYPES[self.map.instance_type]) + return "HeapObject(%s, %s)" % (self.heap.reader.FormatIntPtr(self.address), + INSTANCE_TYPES[self.map.instance_type]) def ObjectField(self, offset): - field_value = self.heap.reader.ReadU32(self.address + offset) + field_value = self.heap.reader.ReadUIntPtr(self.address + offset) return self.heap.FindObjectOrSmi(field_value) def SmiField(self, offset): - field_value = self.heap.reader.ReadU32(self.address + offset) + field_value = self.heap.reader.ReadUIntPtr(self.address + offset) assert (field_value & 1) == 0 return field_value / 2 class Map(HeapObject): - INSTANCE_TYPE_OFFSET = 8 + def InstanceTypeOffset(): + return self.heap.PointerSize() + self.heap.IntSize() def __init__(self, heap, map, address): HeapObject.__init__(self, heap, map, address) self.instance_type = \ - heap.reader.ReadU8(self.address + Map.INSTANCE_TYPE_OFFSET) + heap.reader.ReadU8(self.address + self.InstanceTypeOffset()) class String(HeapObject): - LENGTH_OFFSET = 4 + def LengthOffset(self): + return self.heap.PointerSize() def __init__(self, heap, map, address): HeapObject.__init__(self, heap, map, address) - self.length = self.SmiField(String.LENGTH_OFFSET) + self.length = self.SmiField(self.LengthOffset()) def GetChars(self): return "?string?" @@ -541,11 +680,12 @@ class String(HeapObject): class SeqString(String): - CHARS_OFFSET = 12 + def CharsOffset(self): + return self.heap.PointerSize() * 3 def __init__(self, heap, map, address): String.__init__(self, heap, map, address) - self.chars = heap.reader.ReadBytes(self.address + SeqString.CHARS_OFFSET, + self.chars = heap.reader.ReadBytes(self.address + self.CharsOffset(), self.length) def GetChars(self): @@ -553,6 +693,7 @@ class SeqString(String): class ExternalString(String): + # TODO(vegorov) fix ExternalString for X64 architecture RESOURCE_OFFSET = 12 WEBKIT_RESOUCE_STRING_IMPL_OFFSET = 4 @@ -582,24 +723,28 @@ class ExternalString(String): class ConsString(String): - LEFT_OFFSET = 12 - RIGHT_OFFSET = 16 + def LeftOffset(self): + return self.heap.PointerSize() * 3 + + def RightOffset(self): + return self.heap.PointerSize() * 4 def __init__(self, heap, map, address): String.__init__(self, heap, map, address) - self.left = self.ObjectField(ConsString.LEFT_OFFSET) - self.right = self.ObjectField(ConsString.RIGHT_OFFSET) + self.left = self.ObjectField(self.LeftOffset()) + self.right = self.ObjectField(self.RightOffset()) def GetChars(self): return self.left.GetChars() + self.right.GetChars() class Oddball(HeapObject): - TO_STRING_OFFSET = 4 + def ToStringOffset(self): + return self.heap.PointerSize() def __init__(self, heap, map, address): HeapObject.__init__(self, heap, map, address) - self.to_string = self.ObjectField(Oddball.TO_STRING_OFFSET) + self.to_string = self.ObjectField(self.ToStringOffset()) def Print(self, p): p.Print(str(self)) @@ -609,19 +754,23 @@ class Oddball(HeapObject): class FixedArray(HeapObject): - LENGTH_OFFSET = 4 - ELEMENTS_OFFSET = 8 + def LengthOffset(self): + return self.heap.PointerSize() + + def ElementsOffset(self): + return self.heap.PointerSize() * 2 def __init__(self, heap, map, address): HeapObject.__init__(self, heap, map, address) - self.length = self.SmiField(FixedArray.LENGTH_OFFSET) + self.length = self.SmiField(self.LengthOffset()) def Print(self, p): - p.Print("FixedArray(%08x) {" % self.address) + p.Print("FixedArray(%s) {" % self.heap.reader.FormatIntPtr(self.address)) p.Indent() p.Print("length: %d" % self.length) + base_offset = self.ElementsOffset() for i in xrange(self.length): - offset = FixedArray.ELEMENTS_OFFSET + 4 * i + offset = base_offset + 4 * i p.Print("[%08d] = %s" % (i, self.ObjectField(offset))) p.Dedent() p.Print("}") @@ -631,19 +780,22 @@ class FixedArray(HeapObject): class JSFunction(HeapObject): - CODE_ENTRY_OFFSET = 12 - SHARED_OFFSET = 20 + def CodeEntryOffset(self): + return 3 * self.heap.PointerSize() + + def SharedOffset(self): + return 5 * self.heap.PointerSize() def __init__(self, heap, map, address): HeapObject.__init__(self, heap, map, address) code_entry = \ - heap.reader.ReadU32(self.address + JSFunction.CODE_ENTRY_OFFSET) - self.code = heap.FindObject(code_entry - Code.ENTRY_OFFSET + 1) - self.shared = self.ObjectField(JSFunction.SHARED_OFFSET) + heap.reader.ReadU32(self.address + self.CodeEntryOffset()) + self.code = heap.FindObject(code_entry - Code.HeaderSize(heap) + 1) + self.shared = self.ObjectField(self.SharedOffset()) def Print(self, p): source = "\n".join(" %s" % line for line in self._GetSource().split("\n")) - p.Print("JSFunction(%08x) {" % self.address) + p.Print("JSFunction(%s) {" % self.heap.reader.FormatIntPtr(self.address)) p.Indent() p.Print("inferred name: %s" % self.shared.inferred_name) if self.shared.script.Is(Script) and self.shared.script.name.Is(String): @@ -662,7 +814,8 @@ class JSFunction(HeapObject): inferred_name = "" if self.shared.Is(SharedFunctionInfo): inferred_name = self.shared.inferred_name - return "JSFunction(%08x, %s)" % (self.address, inferred_name) + return "JSFunction(%s, %s)" % \ + (self.heap.reader.FormatIntPtr(self.address), inferred_name) def _GetSource(self): source = "?source?" @@ -675,47 +828,75 @@ class JSFunction(HeapObject): class SharedFunctionInfo(HeapObject): - CODE_OFFSET = 2 * 4 - SCRIPT_OFFSET = 7 * 4 - INFERRED_NAME_OFFSET = 9 * 4 - START_POSITION_AND_TYPE_OFFSET = 17 * 4 - END_POSITION_OFFSET = 18 * 4 + def CodeOffset(self): + return 2 * self.heap.PointerSize() + + def ScriptOffset(self): + return 7 * self.heap.PointerSize() + + def InferredNameOffset(self): + return 9 * self.heap.PointerSize() + + def EndPositionOffset(self): + return 12 * self.heap.PointerSize() + 4 * self.heap.IntSize() + + def StartPositionAndTypeOffset(self): + return 12 * self.heap.PointerSize() + 5 * self.heap.IntSize() def __init__(self, heap, map, address): HeapObject.__init__(self, heap, map, address) - self.code = self.ObjectField(SharedFunctionInfo.CODE_OFFSET) - self.script = self.ObjectField(SharedFunctionInfo.SCRIPT_OFFSET) - self.inferred_name = \ - self.ObjectField(SharedFunctionInfo.INFERRED_NAME_OFFSET) - start_position_and_type = \ - self.SmiField(SharedFunctionInfo.START_POSITION_AND_TYPE_OFFSET) - self.start_position = start_position_and_type >> 2 - self.end_position = self.SmiField(SharedFunctionInfo.END_POSITION_OFFSET) + self.code = self.ObjectField(self.CodeOffset()) + self.script = self.ObjectField(self.ScriptOffset()) + self.inferred_name = self.ObjectField(self.InferredNameOffset()) + if heap.PointerSize() == 8: + start_position_and_type = \ + heap.reader.ReadU32(self.StartPositionAndTypeOffset()) + self.start_position = start_position_and_type >> 2 + pseudo_smi_end_position = \ + heap.reader.ReadU32(self.EndPositionOffset()) + self.end_position = pseudo_smi_end_position >> 2 + else: + start_position_and_type = \ + self.SmiField(self.StartPositionAndTypeOffset()) + self.start_position = start_position_and_type >> 2 + self.end_position = \ + self.SmiField(self.EndPositionOffset()) class Script(HeapObject): - SOURCE_OFFSET = 4 - NAME_OFFSET = 8 + def SourceOffset(self): + return self.heap.PointerSize() + + def NameOffset(self): + return self.SourceOffset() + self.heap.PointerSize() def __init__(self, heap, map, address): HeapObject.__init__(self, heap, map, address) - self.source = self.ObjectField(Script.SOURCE_OFFSET) - self.name = self.ObjectField(Script.NAME_OFFSET) + self.source = self.ObjectField(self.SourceOffset()) + self.name = self.ObjectField(self.NameOffset()) class Code(HeapObject): - INSTRUCTION_SIZE_OFFSET = 4 - ENTRY_OFFSET = 32 + CODE_ALIGNMENT_MASK = (1 << 5) - 1 + + def InstructionSizeOffset(self): + return self.heap.PointerSize() + + @staticmethod + def HeaderSize(heap): + return (heap.PointerSize() + heap.IntSize() + \ + 4 * heap.PointerSize() + 3 * heap.IntSize() + \ + CODE_ALIGNMENT_MASK) & ~CODE_ALIGNMENT_MASK def __init__(self, heap, map, address): HeapObject.__init__(self, heap, map, address) - self.entry = self.address + Code.ENTRY_OFFSET + self.entry = self.address + Code.HeaderSize(heap) self.instruction_size = \ - heap.reader.ReadU32(self.address + Code.INSTRUCTION_SIZE_OFFSET) + heap.reader.ReadU32(self.address + self.InstructionSizeOffset()) def Print(self, p): lines = self.heap.reader.GetDisasmLines(self.entry, self.instruction_size) - p.Print("Code(%08x) {" % self.address) + p.Print("Code(%s) {" % self.heap.reader.FormatIntPtr(self.address)) p.Indent() p.Print("instruction_size: %d" % self.instruction_size) p.PrintLines(self._FormatLine(line) for line in lines) @@ -767,7 +948,7 @@ class V8Heap(object): if (tagged_address & 1) != 1: return None address = tagged_address - 1 if not self.reader.IsValidAddress(address): return None - map_tagged_address = self.reader.ReadU32(address) + map_tagged_address = self.reader.ReadUIntPtr(address) if tagged_address == map_tagged_address: # Meta map? meta_map = Map(self, None, address) @@ -785,9 +966,19 @@ class V8Heap(object): self.objects[tagged_address] = object return object + def PointerSize(self): + return self.reader.PointerSize() + + EIP_PROXIMITY = 64 +CONTEXT_FOR_ARCH = { + MD_CPU_ARCHITECTURE_AMD64: + ['rax', 'rbx', 'rcx', 'rdx', 'rdi', 'rsi', 'rbp', 'rsp', 'rip'], + MD_CPU_ARCHITECTURE_X86: + ['eax', 'ebx', 'ecx', 'edx', 'edi', 'esi', 'ebp', 'esp', 'eip'] +} def AnalyzeMinidump(options, minidump_name): reader = MinidumpReader(options, minidump_name) @@ -800,40 +991,35 @@ def AnalyzeMinidump(options, minidump_name): print " thread id: %d" % exception_thread.id print " code: %08X" % reader.exception.exception.code print " context:" - print " eax: %08x" % reader.exception_context.eax - print " ebx: %08x" % reader.exception_context.ebx - print " ecx: %08x" % reader.exception_context.ecx - print " edx: %08x" % reader.exception_context.edx - print " edi: %08x" % reader.exception_context.edi - print " esi: %08x" % reader.exception_context.esi - print " ebp: %08x" % reader.exception_context.ebp - print " esp: %08x" % reader.exception_context.esp - print " eip: %08x" % reader.exception_context.eip + for r in CONTEXT_FOR_ARCH[reader.arch]: + print " %s: %s" % (r, reader.FormatIntPtr(reader.Register(r))) # TODO(vitalyr): decode eflags. print " eflags: %s" % bin(reader.exception_context.eflags)[2:] print + stack_top = reader.ExceptionSP() stack_bottom = exception_thread.stack.start + \ exception_thread.stack.memory.data_size - stack_map = {reader.exception_context.eip: -1} - for slot in xrange(reader.exception_context.esp, stack_bottom, 4): - maybe_address = reader.ReadU32(slot) + stack_map = {reader.ExceptionIP(): -1} + for slot in xrange(stack_top, stack_bottom, reader.PointerSize()): + maybe_address = reader.ReadUIntPtr(slot) if not maybe_address in stack_map: stack_map[maybe_address] = slot heap = V8Heap(reader, stack_map) print "Disassembly around exception.eip:" - start = reader.exception_context.eip - EIP_PROXIMITY + start = reader.ExceptionIP() - EIP_PROXIMITY lines = reader.GetDisasmLines(start, 2 * EIP_PROXIMITY) for line in lines: print FormatDisasmLine(start, heap, line) print print "Annotated stack (from exception.esp to bottom):" - for slot in xrange(reader.exception_context.esp, stack_bottom, 4): - maybe_address = reader.ReadU32(slot) + for slot in xrange(stack_top, stack_bottom, reader.PointerSize()): + maybe_address = reader.ReadUIntPtr(slot) heap_object = heap.FindObject(maybe_address) - print "%08x: %08x" % (slot, maybe_address) + print "%s: %s" % (reader.FormatIntPtr(slot), + reader.FormatIntPtr(maybe_address)) if heap_object: heap_object.Print(Printer()) print