Browse Source

Upgrade V8 to 3.1.8.5

v0.7.4-release
Ryan Dahl 14 years ago
parent
commit
c8ee19a618
  1. 1
      deps/v8/SConstruct
  2. 5
      deps/v8/src/arm/deoptimizer-arm.cc
  3. 1
      deps/v8/src/arm/lithium-codegen-arm.cc
  4. 1
      deps/v8/src/assembler.cc
  5. 3
      deps/v8/src/assembler.h
  6. 7
      deps/v8/src/deoptimizer.h
  7. 9
      deps/v8/src/hydrogen-instructions.h
  8. 74
      deps/v8/src/ia32/deoptimizer-ia32.cc
  9. 40
      deps/v8/src/ia32/lithium-codegen-ia32.cc
  10. 2
      deps/v8/src/version.cc
  11. 5
      deps/v8/src/x64/deoptimizer-x64.cc
  12. 18
      deps/v8/src/x64/lithium-codegen-x64.cc
  13. 65
      deps/v8/test/mjsunit/compiler/regress-loadfield.js
  14. 52
      deps/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js

1
deps/v8/SConstruct

@ -302,6 +302,7 @@ V8_EXTRA_FLAGS = {
'gcc': { 'gcc': {
'all': { 'all': {
'WARNINGFLAGS': ['-Wall', 'WARNINGFLAGS': ['-Wall',
'-Werror',
'-W', '-W',
'-Wno-unused-parameter', '-Wno-unused-parameter',
'-Wnon-virtual-dtor'] '-Wnon-virtual-dtor']

5
deps/v8/src/arm/deoptimizer-arm.cc

@ -44,6 +44,11 @@ int Deoptimizer::patch_size() {
} }
void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
// Nothing to do. No new relocation information is written for lazy
// deoptimization on ARM.
}
void Deoptimizer::DeoptimizeFunction(JSFunction* function) { void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
AssertNoAllocation no_allocation; AssertNoAllocation no_allocation;

1
deps/v8/src/arm/lithium-codegen-arm.cc

@ -75,6 +75,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
code->set_stack_slots(StackSlotCount()); code->set_stack_slots(StackSlotCount());
code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
PopulateDeoptimizationData(code); PopulateDeoptimizationData(code);
Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
} }

1
deps/v8/src/assembler.cc

@ -139,6 +139,7 @@ const int kPCJumpTag = (1 << kExtraTagBits) - 1;
const int kSmallPCDeltaBits = kBitsPerByte - kTagBits; const int kSmallPCDeltaBits = kBitsPerByte - kTagBits;
const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1; const int kSmallPCDeltaMask = (1 << kSmallPCDeltaBits) - 1;
const int RelocInfo::kMaxSmallPCDelta = kSmallPCDeltaMask;
const int kVariableLengthPCJumpTopTag = 1; const int kVariableLengthPCJumpTopTag = 1;
const int kChunkBits = 7; const int kChunkBits = 7;

3
deps/v8/src/assembler.h

@ -192,6 +192,9 @@ class RelocInfo BASE_EMBEDDED {
// The maximum size for a call instruction including pc-jump. // The maximum size for a call instruction including pc-jump.
static const int kMaxCallSize = 6; static const int kMaxCallSize = 6;
// The maximum pc delta that will use the short encoding.
static const int kMaxSmallPCDelta;
enum Mode { enum Mode {
// Please note the order is important (see IsCodeTarget, IsGCRelocMode). // Please note the order is important (see IsCodeTarget, IsGCRelocMode).
CONSTRUCT_CALL, // code target that is a call to a JavaScript constructor. CONSTRUCT_CALL, // code target that is a call to a JavaScript constructor.

7
deps/v8/src/deoptimizer.h

@ -110,6 +110,13 @@ class Deoptimizer : public Malloced {
int fp_to_sp_delta); int fp_to_sp_delta);
static Deoptimizer* Grab(); static Deoptimizer* Grab();
// Makes sure that there is enough room in the relocation
// information of a code object to perform lazy deoptimization
// patching. If there is not enough room a new relocation
// information object is allocated and comments are added until it
// is big enough.
static void EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code);
// Deoptimize the function now. Its current optimized code will never be run // Deoptimize the function now. Its current optimized code will never be run
// again and any activations of the optimized code will get deoptimized when // again and any activations of the optimized code will get deoptimized when
// execution returns. // execution returns.

9
deps/v8/src/hydrogen-instructions.h

@ -1423,8 +1423,9 @@ class HJSArrayLength: public HUnaryOperation {
// object. It is guaranteed to be 32 bit integer, but it can be // object. It is guaranteed to be 32 bit integer, but it can be
// represented as either a smi or heap number. // represented as either a smi or heap number.
set_representation(Representation::Tagged()); set_representation(Representation::Tagged());
SetFlag(kDependsOnArrayLengths);
SetFlag(kUseGVN); SetFlag(kUseGVN);
SetFlag(kDependsOnArrayLengths);
SetFlag(kDependsOnMaps);
} }
virtual Representation RequiredInputRepresentation(int index) const { virtual Representation RequiredInputRepresentation(int index) const {
@ -1442,8 +1443,8 @@ class HFixedArrayLength: public HUnaryOperation {
public: public:
explicit HFixedArrayLength(HValue* value) : HUnaryOperation(value) { explicit HFixedArrayLength(HValue* value) : HUnaryOperation(value) {
set_representation(Representation::Tagged()); set_representation(Representation::Tagged());
SetFlag(kDependsOnArrayLengths);
SetFlag(kUseGVN); SetFlag(kUseGVN);
SetFlag(kDependsOnArrayLengths);
} }
virtual Representation RequiredInputRepresentation(int index) const { virtual Representation RequiredInputRepresentation(int index) const {
@ -2268,6 +2269,7 @@ class HCompareJSObjectEq: public HBinaryOperation {
: HBinaryOperation(left, right) { : HBinaryOperation(left, right) {
set_representation(Representation::Tagged()); set_representation(Representation::Tagged());
SetFlag(kUseGVN); SetFlag(kUseGVN);
SetFlag(kDependsOnMaps);
} }
virtual bool EmitAtUses() const { virtual bool EmitAtUses() const {
@ -2943,6 +2945,7 @@ class HLoadNamedField: public HUnaryOperation {
offset_(offset) { offset_(offset) {
set_representation(Representation::Tagged()); set_representation(Representation::Tagged());
SetFlag(kUseGVN); SetFlag(kUseGVN);
SetFlag(kDependsOnMaps);
if (is_in_object) { if (is_in_object) {
SetFlag(kDependsOnInobjectFields); SetFlag(kDependsOnInobjectFields);
} else { } else {
@ -3269,6 +3272,7 @@ class HStringCharCodeAt: public HBinaryOperation {
: HBinaryOperation(string, index) { : HBinaryOperation(string, index) {
set_representation(Representation::Integer32()); set_representation(Representation::Integer32());
SetFlag(kUseGVN); SetFlag(kUseGVN);
SetFlag(kDependsOnMaps);
} }
virtual Representation RequiredInputRepresentation(int index) const { virtual Representation RequiredInputRepresentation(int index) const {
@ -3296,6 +3300,7 @@ class HStringLength: public HUnaryOperation {
explicit HStringLength(HValue* string) : HUnaryOperation(string) { explicit HStringLength(HValue* string) : HUnaryOperation(string) {
set_representation(Representation::Tagged()); set_representation(Representation::Tagged());
SetFlag(kUseGVN); SetFlag(kUseGVN);
SetFlag(kDependsOnMaps);
} }
virtual Representation RequiredInputRepresentation(int index) const { virtual Representation RequiredInputRepresentation(int index) const {

74
deps/v8/src/ia32/deoptimizer-ia32.cc

@ -55,6 +55,80 @@ static void ZapCodeRange(Address start, Address end) {
} }
void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
HandleScope scope;
// Compute the size of relocation information needed for the code
// patching in Deoptimizer::DeoptimizeFunction.
int min_reloc_size = 0;
Address prev_reloc_address = code->instruction_start();
Address code_start_address = code->instruction_start();
SafepointTable table(*code);
for (unsigned i = 0; i < table.length(); ++i) {
Address curr_reloc_address = code_start_address + table.GetPcOffset(i);
ASSERT_GE(curr_reloc_address, prev_reloc_address);
SafepointEntry safepoint_entry = table.GetEntry(i);
int deoptimization_index = safepoint_entry.deoptimization_index();
if (deoptimization_index != Safepoint::kNoDeoptimizationIndex) {
// The gap code is needed to get to the state expected at the
// bailout and we need to skip the call opcode to get to the
// address that needs reloc.
curr_reloc_address += safepoint_entry.gap_code_size() + 1;
int pc_delta = curr_reloc_address - prev_reloc_address;
// We use RUNTIME_ENTRY reloc info which has a size of 2 bytes
// if encodable with small pc delta encoding and up to 6 bytes
// otherwise.
if (pc_delta <= RelocInfo::kMaxSmallPCDelta) {
min_reloc_size += 2;
} else {
min_reloc_size += 6;
}
prev_reloc_address = curr_reloc_address;
}
}
// If the relocation information is not big enough we create a new
// relocation info object that is padded with comments to make it
// big enough for lazy doptimization.
int reloc_length = code->relocation_info()->length();
if (min_reloc_size > reloc_length) {
int comment_reloc_size = RelocInfo::kMinRelocCommentSize;
// Padding needed.
int min_padding = min_reloc_size - reloc_length;
// Number of comments needed to take up at least that much space.
int additional_comments =
(min_padding + comment_reloc_size - 1) / comment_reloc_size;
// Actual padding size.
int padding = additional_comments * comment_reloc_size;
// Allocate new relocation info and copy old relocation to the end
// of the new relocation info array because relocation info is
// written and read backwards.
Handle<ByteArray> new_reloc =
Factory::NewByteArray(reloc_length + padding, TENURED);
memcpy(new_reloc->GetDataStartAddress() + padding,
code->relocation_info()->GetDataStartAddress(),
reloc_length);
// Create a relocation writer to write the comments in the padding
// space. Use position 0 for everything to ensure short encoding.
RelocInfoWriter reloc_info_writer(
new_reloc->GetDataStartAddress() + padding, 0);
intptr_t comment_string
= reinterpret_cast<intptr_t>(RelocInfo::kFillerCommentString);
RelocInfo rinfo(0, RelocInfo::COMMENT, comment_string);
for (int i = 0; i < additional_comments; ++i) {
#ifdef DEBUG
byte* pos_before = reloc_info_writer.pos();
#endif
reloc_info_writer.Write(&rinfo);
ASSERT(RelocInfo::kMinRelocCommentSize ==
pos_before - reloc_info_writer.pos());
}
// Replace relocation information on the code object.
code->set_relocation_info(*new_reloc);
}
}
void Deoptimizer::DeoptimizeFunction(JSFunction* function) { void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
AssertNoAllocation no_allocation; AssertNoAllocation no_allocation;

40
deps/v8/src/ia32/lithium-codegen-ia32.cc

@ -31,6 +31,7 @@
#include "ia32/lithium-codegen-ia32.h" #include "ia32/lithium-codegen-ia32.h"
#include "code-stubs.h" #include "code-stubs.h"
#include "deoptimizer.h"
#include "stub-cache.h" #include "stub-cache.h"
namespace v8 { namespace v8 {
@ -43,20 +44,13 @@ class SafepointGenerator : public PostCallGenerator {
public: public:
SafepointGenerator(LCodeGen* codegen, SafepointGenerator(LCodeGen* codegen,
LPointerMap* pointers, LPointerMap* pointers,
int deoptimization_index, int deoptimization_index)
bool ensure_reloc_space = false)
: codegen_(codegen), : codegen_(codegen),
pointers_(pointers), pointers_(pointers),
deoptimization_index_(deoptimization_index), deoptimization_index_(deoptimization_index) {}
ensure_reloc_space_(ensure_reloc_space) { }
virtual ~SafepointGenerator() { } virtual ~SafepointGenerator() { }
virtual void Generate() { virtual void Generate() {
// Ensure that we have enough space in the reloc info to patch
// this with calls when doing deoptimization.
if (ensure_reloc_space_) {
codegen_->EnsureRelocSpaceForDeoptimization();
}
codegen_->RecordSafepoint(pointers_, deoptimization_index_); codegen_->RecordSafepoint(pointers_, deoptimization_index_);
} }
@ -64,7 +58,6 @@ class SafepointGenerator : public PostCallGenerator {
LCodeGen* codegen_; LCodeGen* codegen_;
LPointerMap* pointers_; LPointerMap* pointers_;
int deoptimization_index_; int deoptimization_index_;
bool ensure_reloc_space_;
}; };
@ -78,7 +71,6 @@ bool LCodeGen::GenerateCode() {
return GeneratePrologue() && return GeneratePrologue() &&
GenerateBody() && GenerateBody() &&
GenerateDeferredCode() && GenerateDeferredCode() &&
GenerateRelocPadding() &&
GenerateSafepointTable(); GenerateSafepointTable();
} }
@ -88,6 +80,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
code->set_stack_slots(StackSlotCount()); code->set_stack_slots(StackSlotCount());
code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
PopulateDeoptimizationData(code); PopulateDeoptimizationData(code);
Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
} }
@ -385,22 +378,6 @@ void LCodeGen::WriteTranslation(LEnvironment* environment,
} }
void LCodeGen::EnsureRelocSpaceForDeoptimization() {
// Since we patch the reloc info with RUNTIME_ENTRY calls every patch
// site will take up 2 bytes + any pc-jumps.
// We are conservative and always reserver 6 bytes in case where a
// simple pc-jump is not enough.
uint32_t pc_delta =
masm()->pc_offset() - deoptimization_reloc_size.last_pc_offset;
if (is_uintn(pc_delta, 6)) {
deoptimization_reloc_size.min_size += 2;
} else {
deoptimization_reloc_size.min_size += 6;
}
deoptimization_reloc_size.last_pc_offset = masm()->pc_offset();
}
void LCodeGen::AddToTranslation(Translation* translation, void LCodeGen::AddToTranslation(Translation* translation,
LOperand* op, LOperand* op,
bool is_tagged) { bool is_tagged) {
@ -454,7 +431,6 @@ void LCodeGen::CallCode(Handle<Code> code,
} }
__ call(code, mode); __ call(code, mode);
EnsureRelocSpaceForDeoptimization();
RegisterLazyDeoptimization(instr); RegisterLazyDeoptimization(instr);
// Signal that we don't inline smi code before these stubs in the // Signal that we don't inline smi code before these stubs in the
@ -479,6 +455,7 @@ void LCodeGen::CallRuntime(Runtime::Function* fun,
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
} }
__ CallRuntime(fun, argc); __ CallRuntime(fun, argc);
RegisterLazyDeoptimization(instr); RegisterLazyDeoptimization(instr);
} }
@ -2299,8 +2276,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
RegisterEnvironmentForDeoptimization(env); RegisterEnvironmentForDeoptimization(env);
SafepointGenerator safepoint_generator(this, SafepointGenerator safepoint_generator(this,
pointers, pointers,
env->deoptimization_index(), env->deoptimization_index());
true);
v8::internal::ParameterCount actual(eax); v8::internal::ParameterCount actual(eax);
__ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator); __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
} }
@ -2372,7 +2348,6 @@ void LCodeGen::CallKnownFunction(Handle<JSFunction> function,
__ CallSelf(); __ CallSelf();
} else { } else {
__ call(FieldOperand(edi, JSFunction::kCodeEntryOffset)); __ call(FieldOperand(edi, JSFunction::kCodeEntryOffset));
EnsureRelocSpaceForDeoptimization();
} }
// Setup deoptimization. // Setup deoptimization.
@ -3835,8 +3810,7 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
// builtin) // builtin)
SafepointGenerator safepoint_generator(this, SafepointGenerator safepoint_generator(this,
pointers, pointers,
env->deoptimization_index(), env->deoptimization_index());
true);
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
__ push(Immediate(Smi::FromInt(strict_mode_flag()))); __ push(Immediate(Smi::FromInt(strict_mode_flag())));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator); __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);

2
deps/v8/src/version.cc

@ -35,7 +35,7 @@
#define MAJOR_VERSION 3 #define MAJOR_VERSION 3
#define MINOR_VERSION 1 #define MINOR_VERSION 1
#define BUILD_NUMBER 8 #define BUILD_NUMBER 8
#define PATCH_LEVEL 3 #define PATCH_LEVEL 5
#define CANDIDATE_VERSION false #define CANDIDATE_VERSION false
// Define SONAME to have the SCons build the put a specific SONAME into the // Define SONAME to have the SCons build the put a specific SONAME into the

5
deps/v8/src/x64/deoptimizer-x64.cc

@ -101,6 +101,11 @@ class SafepointTableDeoptimiztionEntryIterator {
}; };
void Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(Handle<Code> code) {
// TODO(1276): Implement.
}
void Deoptimizer::DeoptimizeFunction(JSFunction* function) { void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
AssertNoAllocation no_allocation; AssertNoAllocation no_allocation;

18
deps/v8/src/x64/lithium-codegen-x64.cc

@ -43,20 +43,16 @@ class SafepointGenerator : public PostCallGenerator {
public: public:
SafepointGenerator(LCodeGen* codegen, SafepointGenerator(LCodeGen* codegen,
LPointerMap* pointers, LPointerMap* pointers,
int deoptimization_index, int deoptimization_index)
bool ensure_reloc_space = false)
: codegen_(codegen), : codegen_(codegen),
pointers_(pointers), pointers_(pointers),
deoptimization_index_(deoptimization_index), deoptimization_index_(deoptimization_index) { }
ensure_reloc_space_(ensure_reloc_space) { }
virtual ~SafepointGenerator() { } virtual ~SafepointGenerator() { }
virtual void Generate() { virtual void Generate() {
// Ensure that we have enough space in the reloc info to patch // Ensure that we have enough space in the reloc info to patch
// this with calls when doing deoptimization. // this with calls when doing deoptimization.
if (ensure_reloc_space_) { codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true);
codegen_->masm()->RecordComment(RelocInfo::kFillerCommentString, true);
}
codegen_->RecordSafepoint(pointers_, deoptimization_index_); codegen_->RecordSafepoint(pointers_, deoptimization_index_);
} }
@ -64,7 +60,6 @@ class SafepointGenerator : public PostCallGenerator {
LCodeGen* codegen_; LCodeGen* codegen_;
LPointerMap* pointers_; LPointerMap* pointers_;
int deoptimization_index_; int deoptimization_index_;
bool ensure_reloc_space_;
}; };
@ -87,6 +82,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
code->set_stack_slots(StackSlotCount()); code->set_stack_slots(StackSlotCount());
code->set_safepoint_table_offset(safepoints_.GetCodeOffset()); code->set_safepoint_table_offset(safepoints_.GetCodeOffset());
PopulateDeoptimizationData(code); PopulateDeoptimizationData(code);
Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(code);
} }
@ -2220,8 +2216,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
RegisterEnvironmentForDeoptimization(env); RegisterEnvironmentForDeoptimization(env);
SafepointGenerator safepoint_generator(this, SafepointGenerator safepoint_generator(this,
pointers, pointers,
env->deoptimization_index(), env->deoptimization_index());
true);
v8::internal::ParameterCount actual(rax); v8::internal::ParameterCount actual(rax);
__ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator); __ InvokeFunction(function, actual, CALL_FUNCTION, &safepoint_generator);
} }
@ -3597,8 +3592,7 @@ void LCodeGen::DoDeleteProperty(LDeleteProperty* instr) {
// builtin) // builtin)
SafepointGenerator safepoint_generator(this, SafepointGenerator safepoint_generator(this,
pointers, pointers,
env->deoptimization_index(), env->deoptimization_index());
true);
__ Push(Smi::FromInt(strict_mode_flag())); __ Push(Smi::FromInt(strict_mode_flag()));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator); __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION, &safepoint_generator);
} }

65
deps/v8/test/mjsunit/compiler/regress-loadfield.js

@ -0,0 +1,65 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Regression test for GVN on field loads.
function bar() {}
// Make sure there is a transition on adding "bar" inobject property.
var b = new bar();
b.bar = "bar";
function test(a) {
var b = new Array(10);
for (var i = 0; i < 10; i++) {
b[i] = new bar();
}
for (var i = 0; i < 10; i++) {
b[i].bar = a.foo;
}
}
// Create an object with fast backing store properties.
var a = {};
a.p1 = "";
a.p2 = "";
a.p3 = "";
a.p4 = "";
a.p5 = "";
a.p6 = "";
a.p7 = "";
a.p8 = "";
a.p9 = "";
a.p10 = "";
a.p11 = "";
a.foo = "foo";
for (var i = 0; i < 100000; i++) {
test(a);
}
test("");

52
deps/v8/test/mjsunit/regress/regress-lazy-deopt-reloc.js

@ -0,0 +1,52 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Do not generate debug code since that will space things differently
// in the generated code.
// Flags: --allow-natives-syntax --expose-gc --nodebug-code
// Regression test for issue where we did not pad the relocation
// information enough to have room for lazy deoptimization.
function kaboom() {
var a = function () {},
b = function () {},
c, d = function () { var d = []; },
e = function () { var e = {}; };
c = function () { d(); b(); };
return function (x, y) {
c();
a();
return function f() { }({});
};
}
kaboom();
%DeoptimizeFunction(kaboom);
gc();
Loading…
Cancel
Save