Browse Source

v8: upgrade to 3.24.35.22

v0.11.13-release
Trevor Norris 11 years ago
parent
commit
8d6fa72d97
  1. 1
      deps/v8/include/v8.h
  2. 15
      deps/v8/src/api.cc
  3. 44
      deps/v8/src/arm/lithium-arm.cc
  4. 11
      deps/v8/src/arm/lithium-codegen-arm.cc
  5. 1
      deps/v8/src/arm/lithium-codegen-arm.h
  6. 5
      deps/v8/src/arraybuffer.js
  7. 2
      deps/v8/src/code-stubs-hydrogen.cc
  8. 11
      deps/v8/src/code-stubs.h
  9. 24
      deps/v8/src/deoptimizer.cc
  10. 3
      deps/v8/src/heap.cc
  11. 20
      deps/v8/src/hydrogen-bce.cc
  12. 2
      deps/v8/src/hydrogen-instructions.cc
  13. 6
      deps/v8/src/hydrogen-instructions.h
  14. 9
      deps/v8/src/hydrogen.cc
  15. 7
      deps/v8/src/ia32/lithium-codegen-ia32.cc
  16. 45
      deps/v8/src/ia32/lithium-ia32.cc
  17. 6
      deps/v8/src/ia32/lithium-ia32.h
  18. 10
      deps/v8/src/mips/lithium-codegen-mips.cc
  19. 1
      deps/v8/src/mips/lithium-codegen-mips.h
  20. 44
      deps/v8/src/mips/lithium-mips.cc
  21. 6
      deps/v8/src/mips/lithium-mips.h
  22. 50
      deps/v8/src/runtime.cc
  23. 3
      deps/v8/src/runtime.h
  24. 3
      deps/v8/src/safepoint-table.h
  25. 2
      deps/v8/src/serialize.h
  26. 38
      deps/v8/src/typedarray.js
  27. 2
      deps/v8/src/version.cc
  28. 11
      deps/v8/src/x64/lithium-codegen-x64.cc
  29. 1
      deps/v8/src/x64/lithium-codegen-x64.h
  30. 44
      deps/v8/src/x64/lithium-x64.cc
  31. 6
      deps/v8/src/x64/lithium-x64.h
  32. 45
      deps/v8/test/cctest/test-api.cc
  33. 45
      deps/v8/test/mjsunit/regress/regress-350863.js
  34. 51
      deps/v8/test/mjsunit/regress/regress-352982.js
  35. 74
      deps/v8/test/mjsunit/regress/regress-353004.js
  36. 33
      deps/v8/test/mjsunit/regress/regress-crbug-352929.js

1
deps/v8/include/v8.h

@ -131,7 +131,6 @@ template<class T, class P> class WeakCallbackObject;
class FunctionTemplate;
class ObjectTemplate;
class Data;
template<typename T> class FunctionCallbackInfo;
template<typename T> class PropertyCallbackInfo;
class StackTrace;
class StackFrame;

15
deps/v8/src/api.cc

@ -5805,20 +5805,7 @@ void v8::ArrayBuffer::Neuter() {
"Only externalized ArrayBuffers can be neutered");
LOG_API(obj->GetIsolate(), "v8::ArrayBuffer::Neuter()");
ENTER_V8(isolate);
for (i::Handle<i::Object> view_obj(obj->weak_first_view(), isolate);
!view_obj->IsUndefined();) {
i::Handle<i::JSArrayBufferView> view(i::JSArrayBufferView::cast(*view_obj));
if (view->IsJSTypedArray()) {
i::JSTypedArray::cast(*view)->Neuter();
} else if (view->IsJSDataView()) {
i::JSDataView::cast(*view)->Neuter();
} else {
UNREACHABLE();
}
view_obj = i::handle(view->weak_next(), isolate);
}
obj->Neuter();
i::Runtime::NeuterArrayBuffer(obj);
}

44
deps/v8/src/arm/lithium-arm.cc

@ -614,15 +614,6 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
instr->MarkAsCall();
instr = AssignPointerMap(instr);
if (hinstr->HasObservableSideEffects()) {
ASSERT(hinstr->next()->IsSimulate());
HSimulate* sim = HSimulate::cast(hinstr->next());
ASSERT(instruction_pending_deoptimization_environment_ == NULL);
ASSERT(pending_deoptimization_ast_id_.IsNone());
instruction_pending_deoptimization_environment_ = instr;
pending_deoptimization_ast_id_ = sim->ast_id();
}
// If instruction does not have side-effects lazy deoptimization
// after the call will try to deoptimize to the point before the call.
// Thus we still need to attach environment to this call even if
@ -906,6 +897,26 @@ void LChunkBuilder::VisitInstruction(HInstruction* current) {
instr = AssignEnvironment(instr);
}
chunk_->AddInstruction(instr, current_block_);
if (instr->IsCall()) {
HValue* hydrogen_value_for_lazy_bailout = current;
LInstruction* instruction_needing_environment = NULL;
if (current->HasObservableSideEffects()) {
HSimulate* sim = HSimulate::cast(current->next());
instruction_needing_environment = instr;
sim->ReplayEnvironment(current_block_->last_environment());
hydrogen_value_for_lazy_bailout = sim;
}
LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
chunk_->AddInstruction(bailout, current_block_);
if (instruction_needing_environment != NULL) {
// Store the lazy deopt environment with the instruction if needed.
// Right now it is only used for LInstanceOfKnownGlobal.
instruction_needing_environment->
SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
}
}
}
current_instruction_ = old_current;
}
@ -2378,21 +2389,6 @@ LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
instr->ReplayEnvironment(current_block_->last_environment());
// If there is an instruction pending deoptimization environment create a
// lazy bailout instruction to capture the environment.
if (pending_deoptimization_ast_id_ == instr->ast_id()) {
LInstruction* result = new(zone()) LLazyBailout;
result = AssignEnvironment(result);
// Store the lazy deopt environment with the instruction if needed. Right
// now it is only used for LInstanceOfKnownGlobal.
instruction_pending_deoptimization_environment_->
SetDeferredLazyDeoptimizationEnvironment(result->environment());
instruction_pending_deoptimization_environment_ = NULL;
pending_deoptimization_ast_id_ = BailoutId::None();
return result;
}
return NULL;
}

11
deps/v8/src/arm/lithium-codegen-arm.cc

@ -269,6 +269,13 @@ void LCodeGen::GenerateOsrPrologue() {
}
void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
if (!instr->IsLazyBailout() && !instr->IsGap()) {
safepoints_.BumpLastLazySafepointIndex();
}
}
bool LCodeGen::GenerateDeferredCode() {
ASSERT(is_generating());
if (deferred_.length() > 0) {
@ -2112,7 +2119,6 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
// is in the correct position.
Assembler::BlockConstPoolScope block_const_pool(masm());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
__ nop(); // Signals no inlined code.
}
@ -4402,7 +4408,8 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
this, Safepoint::kWithRegistersAndDoubles);
__ Move(r0, object_reg);
__ Move(r1, to_map);
TransitionElementsKindStub stub(from_kind, to_kind);
bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
TransitionElementsKindStub stub(from_kind, to_kind, is_js_array);
__ CallStub(&stub);
RecordSafepointWithRegistersAndDoubles(
instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);

1
deps/v8/src/arm/lithium-codegen-arm.h

@ -191,6 +191,7 @@ class LCodeGen: public LCodeGenBase {
// Code generation passes. Returns true if code generation should
// continue.
void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
bool GeneratePrologue();
bool GenerateDeferredCode();
bool GenerateDeoptJumpTable();

5
deps/v8/src/arraybuffer.js

@ -56,6 +56,9 @@ function ArrayBufferSlice(start, end) {
}
var relativeStart = TO_INTEGER(start);
if (!IS_UNDEFINED(end)) {
end = TO_INTEGER(end);
}
var first;
var byte_length = %ArrayBufferGetByteLength(this);
if (relativeStart < 0) {
@ -63,7 +66,7 @@ function ArrayBufferSlice(start, end) {
} else {
first = MathMin(relativeStart, byte_length);
}
var relativeEnd = IS_UNDEFINED(end) ? byte_length : TO_INTEGER(end);
var relativeEnd = IS_UNDEFINED(end) ? byte_length : end;
var fin;
if (relativeEnd < 0) {
fin = MathMax(byte_length + relativeEnd, 0);

2
deps/v8/src/code-stubs-hydrogen.cc

@ -618,7 +618,7 @@ HValue* CodeStubGraphBuilder<TransitionElementsKindStub>::BuildCodeStub() {
GetParameter(1),
casted_stub()->from_kind(),
casted_stub()->to_kind(),
true);
casted_stub()->is_js_array());
return GetParameter(0);
}

11
deps/v8/src/code-stubs.h

@ -2009,9 +2009,11 @@ class KeyedStoreFastElementStub : public HydrogenCodeStub {
class TransitionElementsKindStub : public HydrogenCodeStub {
public:
TransitionElementsKindStub(ElementsKind from_kind,
ElementsKind to_kind) {
ElementsKind to_kind,
bool is_js_array) {
bit_field_ = FromKindBits::encode(from_kind) |
ToKindBits::encode(to_kind);
ToKindBits::encode(to_kind) |
IsJSArrayBits::encode(is_js_array);
}
ElementsKind from_kind() const {
@ -2022,6 +2024,10 @@ class TransitionElementsKindStub : public HydrogenCodeStub {
return ToKindBits::decode(bit_field_);
}
bool is_js_array() const {
return IsJSArrayBits::decode(bit_field_);
}
virtual Handle<Code> GenerateCode(Isolate* isolate);
virtual void InitializeInterfaceDescriptor(
@ -2031,6 +2037,7 @@ class TransitionElementsKindStub : public HydrogenCodeStub {
private:
class FromKindBits: public BitField<ElementsKind, 8, 8> {};
class ToKindBits: public BitField<ElementsKind, 0, 8> {};
class IsJSArrayBits: public BitField<bool, 16, 1> {};
uint32_t bit_field_;
Major MajorKey() { return TransitionElementsKind; }

24
deps/v8/src/deoptimizer.cc

@ -393,9 +393,33 @@ void Deoptimizer::DeoptimizeMarkedCodeForContext(Context* context) {
element = next;
}
#ifdef DEBUG
// Make sure all activations of optimized code can deopt at their current PC.
for (StackFrameIterator it(isolate, isolate->thread_local_top());
!it.done(); it.Advance()) {
StackFrame::Type type = it.frame()->type();
if (type == StackFrame::OPTIMIZED) {
Code* code = it.frame()->LookupCode();
if (FLAG_trace_deopt) {
JSFunction* function =
static_cast<OptimizedFrame*>(it.frame())->function();
CodeTracer::Scope scope(isolate->GetCodeTracer());
PrintF(scope.file(), "[deoptimizer patches for lazy deopt: ");
function->PrintName(scope.file());
PrintF(scope.file(),
" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
}
SafepointEntry safepoint = code->GetSafepointEntry(it.frame()->pc());
int deopt_index = safepoint.deoptimization_index();
CHECK(deopt_index != Safepoint::kNoDeoptimizationIndex);
}
}
#endif
// TODO(titzer): we need a handle scope only because of the macro assembler,
// which is only used in EnsureCodeForDeoptimizationEntry.
HandleScope scope(isolate);
// Now patch all the codes for deoptimization.
for (int i = 0; i < codes.length(); i++) {
// It is finally time to die, code object.

3
deps/v8/src/heap.cc

@ -590,6 +590,9 @@ void Heap::GarbageCollectionEpilogue() {
if (FLAG_code_stats) ReportCodeStatistics("After GC");
#endif
if (FLAG_deopt_every_n_garbage_collections > 0) {
// TODO(jkummerow/ulan/jarin): This is not safe! We can't assume that
// the topmost optimized frame can be deoptimized safely, because it
// might not have a lazy bailout point right after its current PC.
if (++gcs_since_last_deopt_ == FLAG_deopt_every_n_garbage_collections) {
Deoptimizer::DeoptimizeAll(isolate());
gcs_since_last_deopt_ = 0;

20
deps/v8/src/hydrogen-bce.cc

@ -132,6 +132,24 @@ class BoundsCheckBbData: public ZoneObject {
bool HasSingleCheck() { return lower_check_ == upper_check_; }
void UpdateUpperOffsets(HBoundsCheck* check, int32_t offset) {
BoundsCheckBbData* data = FatherInDominatorTree();
while (data != NULL && data->UpperCheck() == check) {
ASSERT(data->upper_offset_ <= offset);
data->upper_offset_ = offset;
data = data->FatherInDominatorTree();
}
}
void UpdateLowerOffsets(HBoundsCheck* check, int32_t offset) {
BoundsCheckBbData* data = FatherInDominatorTree();
while (data != NULL && data->LowerCheck() == check) {
ASSERT(data->lower_offset_ > offset);
data->lower_offset_ = offset;
data = data->FatherInDominatorTree();
}
}
// The goal of this method is to modify either upper_offset_ or
// lower_offset_ so that also new_offset is covered (the covered
// range grows).
@ -156,6 +174,7 @@ class BoundsCheckBbData: public ZoneObject {
upper_check_ = new_check;
} else {
TightenCheck(upper_check_, new_check);
UpdateUpperOffsets(upper_check_, upper_offset_);
}
} else if (new_offset < lower_offset_) {
lower_offset_ = new_offset;
@ -164,6 +183,7 @@ class BoundsCheckBbData: public ZoneObject {
lower_check_ = new_check;
} else {
TightenCheck(lower_check_, new_check);
UpdateLowerOffsets(lower_check_, lower_offset_);
}
} else {
// Should never have called CoverCheck() in this case.

2
deps/v8/src/hydrogen-instructions.cc

@ -2413,6 +2413,7 @@ void HSimulate::PrintDataTo(StringStream* stream) {
void HSimulate::ReplayEnvironment(HEnvironment* env) {
if (done_with_replay_) return;
ASSERT(env != NULL);
env->set_ast_id(ast_id());
env->Drop(pop_count());
@ -2424,6 +2425,7 @@ void HSimulate::ReplayEnvironment(HEnvironment* env) {
env->Push(value);
}
}
done_with_replay_ = true;
}

6
deps/v8/src/hydrogen-instructions.h

@ -1799,7 +1799,8 @@ class HSimulate V8_FINAL : public HInstruction {
values_(2, zone),
assigned_indexes_(2, zone),
zone_(zone),
removable_(removable) {}
removable_(removable),
done_with_replay_(false) {}
~HSimulate() {}
virtual void PrintDataTo(StringStream* stream) V8_OVERRIDE;
@ -1882,7 +1883,8 @@ class HSimulate V8_FINAL : public HInstruction {
ZoneList<HValue*> values_;
ZoneList<int> assigned_indexes_;
Zone* zone_;
RemovableSimulate removable_;
RemovableSimulate removable_ : 2;
bool done_with_replay_ : 1;
#ifdef DEBUG
Handle<JSFunction> closure_;

9
deps/v8/src/hydrogen.cc

@ -9764,6 +9764,15 @@ HInstruction* HOptimizedGraphBuilder::BuildFastLiteral(
HInstruction* object = Add<HAllocate>(object_size_constant, type,
pretenure_flag, instance_type, site_context->current());
// If allocation folding reaches Page::kMaxRegularHeapObjectSize the
// elements array may not get folded into the object. Hence, we set the
// elements pointer to empty fixed array and let store elimination remove
// this store in the folding case.
HConstant* empty_fixed_array = Add<HConstant>(
isolate()->factory()->empty_fixed_array());
Add<HStoreNamedField>(object, HObjectAccess::ForElementsPointer(),
empty_fixed_array, INITIALIZING_STORE);
BuildEmitObjectHeader(boilerplate_object, object);
Handle<FixedArrayBase> elements(boilerplate_object->elements());

7
deps/v8/src/ia32/lithium-codegen-ia32.cc

@ -390,6 +390,9 @@ void LCodeGen::GenerateOsrPrologue() {
void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
if (!instr->IsLazyBailout() && !instr->IsGap()) {
safepoints_.BumpLastLazySafepointIndex();
}
if (!CpuFeatures::IsSupported(SSE2)) FlushX87StackIfNecessary(instr);
}
@ -2274,7 +2277,6 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
BinaryOpICStub stub(instr->op(), NO_OVERWRITE);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
__ nop(); // Signals no inlined code.
}
@ -4696,7 +4698,8 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
__ mov(eax, object_reg);
}
__ mov(ebx, to_map);
TransitionElementsKindStub stub(from_kind, to_kind);
bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
TransitionElementsKindStub stub(from_kind, to_kind, is_js_array);
__ CallStub(&stub);
RecordSafepointWithRegisters(
instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);

45
deps/v8/src/ia32/lithium-ia32.cc

@ -679,15 +679,6 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
instr->MarkAsCall();
instr = AssignPointerMap(instr);
if (hinstr->HasObservableSideEffects()) {
ASSERT(hinstr->next()->IsSimulate());
HSimulate* sim = HSimulate::cast(hinstr->next());
ASSERT(instruction_pending_deoptimization_environment_ == NULL);
ASSERT(pending_deoptimization_ast_id_.IsNone());
instruction_pending_deoptimization_environment_ = instr;
pending_deoptimization_ast_id_ = sim->ast_id();
}
// If instruction does not have side-effects lazy deoptimization
// after the call will try to deoptimize to the point before the call.
// Thus we still need to attach environment to this call even if
@ -980,6 +971,26 @@ void LChunkBuilder::VisitInstruction(HInstruction* current) {
chunk_->AddInstruction(clobber, current_block_);
}
chunk_->AddInstruction(instr, current_block_);
if (instr->IsCall()) {
HValue* hydrogen_value_for_lazy_bailout = current;
LInstruction* instruction_needing_environment = NULL;
if (current->HasObservableSideEffects()) {
HSimulate* sim = HSimulate::cast(current->next());
instruction_needing_environment = instr;
sim->ReplayEnvironment(current_block_->last_environment());
hydrogen_value_for_lazy_bailout = sim;
}
LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
chunk_->AddInstruction(bailout, current_block_);
if (instruction_needing_environment != NULL) {
// Store the lazy deopt environment with the instruction if needed.
// Right now it is only used for LInstanceOfKnownGlobal.
instruction_needing_environment->
SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
}
}
}
current_instruction_ = old_current;
}
@ -2486,22 +2497,6 @@ LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
instr->ReplayEnvironment(current_block_->last_environment());
// If there is an instruction pending deoptimization environment create a
// lazy bailout instruction to capture the environment.
if (!pending_deoptimization_ast_id_.IsNone()) {
ASSERT(pending_deoptimization_ast_id_ == instr->ast_id());
LLazyBailout* lazy_bailout = new(zone()) LLazyBailout;
LInstruction* result = AssignEnvironment(lazy_bailout);
// Store the lazy deopt environment with the instruction if needed. Right
// now it is only used for LInstanceOfKnownGlobal.
instruction_pending_deoptimization_environment_->
SetDeferredLazyDeoptimizationEnvironment(result->environment());
instruction_pending_deoptimization_environment_ = NULL;
pending_deoptimization_ast_id_ = BailoutId::None();
return result;
}
return NULL;
}

6
deps/v8/src/ia32/lithium-ia32.h

@ -2591,9 +2591,7 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase {
current_instruction_(NULL),
current_block_(NULL),
next_block_(NULL),
allocator_(allocator),
instruction_pending_deoptimization_environment_(NULL),
pending_deoptimization_ast_id_(BailoutId::None()) { }
allocator_(allocator) { }
// Build the sequence for the graph.
LPlatformChunk* Build();
@ -2736,8 +2734,6 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase {
HBasicBlock* current_block_;
HBasicBlock* next_block_;
LAllocator* allocator_;
LInstruction* instruction_pending_deoptimization_environment_;
BailoutId pending_deoptimization_ast_id_;
DISALLOW_COPY_AND_ASSIGN(LChunkBuilder);
};

10
deps/v8/src/mips/lithium-codegen-mips.cc

@ -259,6 +259,13 @@ void LCodeGen::GenerateOsrPrologue() {
}
void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
if (!instr->IsLazyBailout() && !instr->IsGap()) {
safepoints_.BumpLastLazySafepointIndex();
}
}
bool LCodeGen::GenerateDeferredCode() {
ASSERT(is_generating());
if (deferred_.length() > 0) {
@ -4336,7 +4343,8 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
this, Safepoint::kWithRegistersAndDoubles);
__ mov(a0, object_reg);
__ li(a1, Operand(to_map));
TransitionElementsKindStub stub(from_kind, to_kind);
bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
TransitionElementsKindStub stub(from_kind, to_kind, is_js_array);
__ CallStub(&stub);
RecordSafepointWithRegistersAndDoubles(
instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);

1
deps/v8/src/mips/lithium-codegen-mips.h

@ -191,6 +191,7 @@ class LCodeGen: public LCodeGenBase {
// Code generation passes. Returns true if code generation should
// continue.
void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
bool GeneratePrologue();
bool GenerateDeferredCode();
bool GenerateDeoptJumpTable();

44
deps/v8/src/mips/lithium-mips.cc

@ -619,15 +619,6 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
instr->MarkAsCall();
instr = AssignPointerMap(instr);
if (hinstr->HasObservableSideEffects()) {
ASSERT(hinstr->next()->IsSimulate());
HSimulate* sim = HSimulate::cast(hinstr->next());
ASSERT(instruction_pending_deoptimization_environment_ == NULL);
ASSERT(pending_deoptimization_ast_id_.IsNone());
instruction_pending_deoptimization_environment_ = instr;
pending_deoptimization_ast_id_ = sim->ast_id();
}
// If instruction does not have side-effects lazy deoptimization
// after the call will try to deoptimize to the point before the call.
// Thus we still need to attach environment to this call even if
@ -914,6 +905,26 @@ void LChunkBuilder::VisitInstruction(HInstruction* current) {
instr = AssignEnvironment(instr);
}
chunk_->AddInstruction(instr, current_block_);
if (instr->IsCall()) {
HValue* hydrogen_value_for_lazy_bailout = current;
LInstruction* instruction_needing_environment = NULL;
if (current->HasObservableSideEffects()) {
HSimulate* sim = HSimulate::cast(current->next());
instruction_needing_environment = instr;
sim->ReplayEnvironment(current_block_->last_environment());
hydrogen_value_for_lazy_bailout = sim;
}
LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
chunk_->AddInstruction(bailout, current_block_);
if (instruction_needing_environment != NULL) {
// Store the lazy deopt environment with the instruction if needed.
// Right now it is only used for LInstanceOfKnownGlobal.
instruction_needing_environment->
SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
}
}
}
current_instruction_ = old_current;
}
@ -2307,21 +2318,6 @@ LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
instr->ReplayEnvironment(current_block_->last_environment());
// If there is an instruction pending deoptimization environment create a
// lazy bailout instruction to capture the environment.
if (pending_deoptimization_ast_id_ == instr->ast_id()) {
LInstruction* result = new(zone()) LLazyBailout;
result = AssignEnvironment(result);
// Store the lazy deopt environment with the instruction if needed. Right
// now it is only used for LInstanceOfKnownGlobal.
instruction_pending_deoptimization_environment_->
SetDeferredLazyDeoptimizationEnvironment(result->environment());
instruction_pending_deoptimization_environment_ = NULL;
pending_deoptimization_ast_id_ = BailoutId::None();
return result;
}
return NULL;
}

6
deps/v8/src/mips/lithium-mips.h

@ -2554,9 +2554,7 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase {
current_block_(NULL),
next_block_(NULL),
allocator_(allocator),
position_(RelocInfo::kNoPosition),
instruction_pending_deoptimization_environment_(NULL),
pending_deoptimization_ast_id_(BailoutId::None()) { }
position_(RelocInfo::kNoPosition) { }
// Build the sequence for the graph.
LPlatformChunk* Build();
@ -2691,8 +2689,6 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase {
HBasicBlock* next_block_;
LAllocator* allocator_;
int position_;
LInstruction* instruction_pending_deoptimization_environment_;
BailoutId pending_deoptimization_ast_id_;
DISALLOW_COPY_AND_ASSIGN(LChunkBuilder);
};

50
deps/v8/src/runtime.cc

@ -796,6 +796,24 @@ bool Runtime::SetupArrayBufferAllocatingData(
}
void Runtime::NeuterArrayBuffer(Handle<JSArrayBuffer> array_buffer) {
Isolate* isolate = array_buffer->GetIsolate();
for (Handle<Object> view_obj(array_buffer->weak_first_view(), isolate);
!view_obj->IsUndefined();) {
Handle<JSArrayBufferView> view(JSArrayBufferView::cast(*view_obj));
if (view->IsJSTypedArray()) {
JSTypedArray::cast(*view)->Neuter();
} else if (view->IsJSDataView()) {
JSDataView::cast(*view)->Neuter();
} else {
UNREACHABLE();
}
view_obj = handle(view->weak_next(), isolate);
}
array_buffer->Neuter();
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayBufferInitialize) {
HandleScope scope(isolate);
ASSERT(args.length() == 2);
@ -849,7 +867,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayBufferSliceImpl) {
if (target_length == 0) return isolate->heap()->undefined_value();
ASSERT(NumberToSize(isolate, source->byte_length()) - target_length >= start);
size_t source_byte_length = NumberToSize(isolate, source->byte_length());
CHECK(start <= source_byte_length);
CHECK(source_byte_length - start >= target_length);
uint8_t* source_data = reinterpret_cast<uint8_t*>(source->backing_store());
uint8_t* target_data = reinterpret_cast<uint8_t*>(target->backing_store());
CopyBytes(target_data, source_data + start, target_length);
@ -867,6 +887,19 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayBufferIsView) {
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_ArrayBufferNeuter) {
HandleScope scope(isolate);
CONVERT_ARG_HANDLE_CHECKED(JSArrayBuffer, array_buffer, 0);
ASSERT(!array_buffer->is_external());
void* backing_store = array_buffer->backing_store();
size_t byte_length = NumberToSize(isolate, array_buffer->byte_length());
array_buffer->set_is_external(true);
Runtime::NeuterArrayBuffer(array_buffer);
V8::ArrayBufferAllocator()->Free(backing_store, byte_length);
return isolate->heap()->undefined_value();
}
void Runtime::ArrayIdToTypeAndSize(
int arrayId, ExternalArrayType* array_type, size_t* element_size) {
switch (arrayId) {
@ -910,7 +943,12 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_TypedArrayInitialize) {
size_t byte_offset = NumberToSize(isolate, *byte_offset_object);
size_t byte_length = NumberToSize(isolate, *byte_length_object);
ASSERT(byte_length % element_size == 0);
size_t array_buffer_byte_length =
NumberToSize(isolate, buffer->byte_length());
CHECK(byte_offset <= array_buffer_byte_length);
CHECK(array_buffer_byte_length - byte_offset >= byte_length);
CHECK_EQ(0, static_cast<int>(byte_length % element_size));
size_t length = byte_length / element_size;
if (length > static_cast<unsigned>(Smi::kMaxValue)) {
@ -1645,6 +1683,14 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_SetPrototype) {
ASSERT(args.length() == 2);
CONVERT_ARG_HANDLE_CHECKED(JSObject, obj, 0);
CONVERT_ARG_HANDLE_CHECKED(Object, prototype, 1);
if (obj->IsAccessCheckNeeded() &&
!isolate->MayNamedAccessWrapper(obj,
isolate->factory()->proto_string(),
v8::ACCESS_SET)) {
isolate->ReportFailedAccessCheck(*obj, v8::ACCESS_SET);
RETURN_IF_SCHEDULED_EXCEPTION(isolate);
return isolate->heap()->undefined_value();
}
if (FLAG_harmony_observation && obj->map()->is_observed()) {
Handle<Object> old_value(
GetPrototypeSkipHiddenPrototypes(isolate, *obj), isolate);

3
deps/v8/src/runtime.h

@ -365,6 +365,7 @@ namespace internal {
F(ArrayBufferGetByteLength, 1, 1)\
F(ArrayBufferSliceImpl, 3, 1) \
F(ArrayBufferIsView, 1, 1) \
F(ArrayBufferNeuter, 1, 1) \
\
F(TypedArrayInitialize, 5, 1) \
F(TypedArrayInitializeFromArrayLike, 4, 1) \
@ -827,6 +828,8 @@ class Runtime : public AllStatic {
size_t allocated_length,
bool initialize = true);
static void NeuterArrayBuffer(Handle<JSArrayBuffer> array_buffer);
static void FreeArrayBuffer(
Isolate* isolate,
JSArrayBuffer* phantom_array_buffer);

3
deps/v8/src/safepoint-table.h

@ -219,6 +219,9 @@ class SafepointTableBuilder BASE_EMBEDDED {
// Record deoptimization index for lazy deoptimization for the last
// outstanding safepoints.
void RecordLazyDeoptimizationIndex(int index);
void BumpLastLazySafepointIndex() {
last_lazy_safepoint_ = deopt_index_list_.length();
}
// Emit the safepoint table after the body. The number of bits per
// entry must be enough to hold all the pointer indexes.

2
deps/v8/src/serialize.h

@ -60,7 +60,7 @@ const int kReferenceTypeShift = kReferenceIdBits;
const int kDebugRegisterBits = 4;
const int kDebugIdShift = kDebugRegisterBits;
const int kDeoptTableSerializeEntryCount = 8;
const int kDeoptTableSerializeEntryCount = 12;
// ExternalReferenceTable is a helper class that defines the relationship
// between external references and their encodings. It is used to build

38
deps/v8/src/typedarray.js

@ -49,12 +49,20 @@ endmacro
macro TYPED_ARRAY_CONSTRUCTOR(ARRAY_ID, NAME, ELEMENT_SIZE)
function NAMEConstructByArrayBuffer(obj, buffer, byteOffset, length) {
if (!IS_UNDEFINED(byteOffset)) {
byteOffset =
ToPositiveInteger(byteOffset, "invalid_typed_array_length");
}
if (!IS_UNDEFINED(length)) {
length = ToPositiveInteger(length, "invalid_typed_array_length");
}
var bufferByteLength = %ArrayBufferGetByteLength(buffer);
var offset;
if (IS_UNDEFINED(byteOffset)) {
offset = 0;
} else {
offset = ToPositiveInteger(byteOffset, "invalid_typed_array_length");
offset = byteOffset;
if (offset % ELEMENT_SIZE !== 0) {
throw MakeRangeError("invalid_typed_array_alignment",
@ -75,7 +83,7 @@ macro TYPED_ARRAY_CONSTRUCTOR(ARRAY_ID, NAME, ELEMENT_SIZE)
newByteLength = bufferByteLength - offset;
newLength = newByteLength / ELEMENT_SIZE;
} else {
var newLength = ToPositiveInteger(length, "invalid_typed_array_length");
var newLength = length;
newByteLength = newLength * ELEMENT_SIZE;
}
if ((offset + newByteLength > bufferByteLength)
@ -99,6 +107,7 @@ macro TYPED_ARRAY_CONSTRUCTOR(ARRAY_ID, NAME, ELEMENT_SIZE)
function NAMEConstructByArrayLike(obj, arrayLike) {
var length = arrayLike.length;
var l = ToPositiveInteger(length, "invalid_typed_array_length");
if (l > %MaxSmi()) {
throw MakeRangeError("invalid_typed_array_length");
}
@ -148,15 +157,19 @@ function TypedArrayGetLength() {
function CreateSubArray(elementSize, constructor) {
return function(begin, end) {
var srcLength = %TypedArrayGetLength(this);
var beginInt = TO_INTEGER(begin);
if (!IS_UNDEFINED(end)) {
end = TO_INTEGER(end);
}
var srcLength = %TypedArrayGetLength(this);
if (beginInt < 0) {
beginInt = MathMax(0, srcLength + beginInt);
} else {
beginInt = MathMin(srcLength, beginInt);
}
var endInt = IS_UNDEFINED(end) ? srcLength : TO_INTEGER(end);
var endInt = IS_UNDEFINED(end) ? srcLength : end;
if (endInt < 0) {
endInt = MathMax(0, srcLength + endInt);
} else {
@ -317,14 +330,23 @@ function DataViewConstructor(buffer, byteOffset, byteLength) { // length = 3
if (!IS_ARRAYBUFFER(buffer)) {
throw MakeTypeError('data_view_not_array_buffer', []);
}
if (!IS_UNDEFINED(byteOffset)) {
byteOffset = ToPositiveInteger(byteOffset, 'invalid_data_view_offset');
}
if (!IS_UNDEFINED(byteLength)) {
byteLength = TO_INTEGER(byteLength);
}
var bufferByteLength = %ArrayBufferGetByteLength(buffer);
var offset = IS_UNDEFINED(byteOffset) ?
0 : ToPositiveInteger(byteOffset, 'invalid_data_view_offset');
var offset = IS_UNDEFINED(byteOffset) ? 0 : byteOffset;
if (offset > bufferByteLength) {
throw MakeRangeError('invalid_data_view_offset');
}
var length = IS_UNDEFINED(byteLength) ?
bufferByteLength - offset : TO_INTEGER(byteLength);
var length = IS_UNDEFINED(byteLength)
? bufferByteLength - offset
: byteLength;
if (length < 0 || offset + length > bufferByteLength) {
throw new MakeRangeError('invalid_data_view_length');
}

2
deps/v8/src/version.cc

@ -35,7 +35,7 @@
#define MAJOR_VERSION 3
#define MINOR_VERSION 24
#define BUILD_NUMBER 35
#define PATCH_LEVEL 17
#define PATCH_LEVEL 22
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)
#define IS_CANDIDATE_VERSION 0

11
deps/v8/src/x64/lithium-codegen-x64.cc

@ -273,6 +273,13 @@ void LCodeGen::GenerateOsrPrologue() {
}
void LCodeGen::GenerateBodyInstructionPre(LInstruction* instr) {
if (!instr->IsLazyBailout() && !instr->IsGap()) {
safepoints_.BumpLastLazySafepointIndex();
}
}
bool LCodeGen::GenerateJumpTable() {
Label needs_frame;
if (jump_table_.length() > 0) {
@ -1870,7 +1877,6 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
BinaryOpICStub stub(instr->op(), NO_OVERWRITE);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
__ nop(); // Signals no inlined code.
}
@ -4289,7 +4295,8 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
__ movp(rax, object_reg);
}
__ Move(rbx, to_map);
TransitionElementsKindStub stub(from_kind, to_kind);
bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
TransitionElementsKindStub stub(from_kind, to_kind, is_js_array);
__ CallStub(&stub);
RecordSafepointWithRegisters(
instr->pointer_map(), 0, Safepoint::kNoLazyDeopt);

1
deps/v8/src/x64/lithium-codegen-x64.h

@ -159,6 +159,7 @@ class LCodeGen: public LCodeGenBase {
// Code generation passes. Returns true if code generation should
// continue.
void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
bool GeneratePrologue();
bool GenerateDeferredCode();
bool GenerateJumpTable();

44
deps/v8/src/x64/lithium-x64.cc

@ -630,15 +630,6 @@ LInstruction* LChunkBuilder::MarkAsCall(LInstruction* instr,
instr->MarkAsCall();
instr = AssignPointerMap(instr);
if (hinstr->HasObservableSideEffects()) {
ASSERT(hinstr->next()->IsSimulate());
HSimulate* sim = HSimulate::cast(hinstr->next());
ASSERT(instruction_pending_deoptimization_environment_ == NULL);
ASSERT(pending_deoptimization_ast_id_.IsNone());
instruction_pending_deoptimization_environment_ = instr;
pending_deoptimization_ast_id_ = sim->ast_id();
}
// If instruction does not have side-effects lazy deoptimization
// after the call will try to deoptimize to the point before the call.
// Thus we still need to attach environment to this call even if
@ -916,6 +907,26 @@ void LChunkBuilder::VisitInstruction(HInstruction* current) {
instr = AssignEnvironment(instr);
}
chunk_->AddInstruction(instr, current_block_);
if (instr->IsCall()) {
HValue* hydrogen_value_for_lazy_bailout = current;
LInstruction* instruction_needing_environment = NULL;
if (current->HasObservableSideEffects()) {
HSimulate* sim = HSimulate::cast(current->next());
instruction_needing_environment = instr;
sim->ReplayEnvironment(current_block_->last_environment());
hydrogen_value_for_lazy_bailout = sim;
}
LInstruction* bailout = AssignEnvironment(new(zone()) LLazyBailout());
bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
chunk_->AddInstruction(bailout, current_block_);
if (instruction_needing_environment != NULL) {
// Store the lazy deopt environment with the instruction if needed.
// Right now it is only used for LInstanceOfKnownGlobal.
instruction_needing_environment->
SetDeferredLazyDeoptimizationEnvironment(bailout->environment());
}
}
}
current_instruction_ = old_current;
}
@ -2334,21 +2345,6 @@ LInstruction* LChunkBuilder::DoIsConstructCallAndBranch(
LInstruction* LChunkBuilder::DoSimulate(HSimulate* instr) {
instr->ReplayEnvironment(current_block_->last_environment());
// If there is an instruction pending deoptimization environment create a
// lazy bailout instruction to capture the environment.
if (pending_deoptimization_ast_id_ == instr->ast_id()) {
LLazyBailout* lazy_bailout = new(zone()) LLazyBailout;
LInstruction* result = AssignEnvironment(lazy_bailout);
// Store the lazy deopt environment with the instruction if needed. Right
// now it is only used for LInstanceOfKnownGlobal.
instruction_pending_deoptimization_environment_->
SetDeferredLazyDeoptimizationEnvironment(result->environment());
instruction_pending_deoptimization_environment_ = NULL;
pending_deoptimization_ast_id_ = BailoutId::None();
return result;
}
return NULL;
}

6
deps/v8/src/x64/lithium-x64.h

@ -2510,9 +2510,7 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase {
current_instruction_(NULL),
current_block_(NULL),
next_block_(NULL),
allocator_(allocator),
instruction_pending_deoptimization_environment_(NULL),
pending_deoptimization_ast_id_(BailoutId::None()) { }
allocator_(allocator) { }
// Build the sequence for the graph.
LPlatformChunk* Build();
@ -2648,8 +2646,6 @@ class LChunkBuilder V8_FINAL : public LChunkBuilderBase {
HBasicBlock* current_block_;
HBasicBlock* next_block_;
LAllocator* allocator_;
LInstruction* instruction_pending_deoptimization_environment_;
BailoutId pending_deoptimization_ast_id_;
DISALLOW_COPY_AND_ASSIGN(LChunkBuilder);
};

45
deps/v8/test/cctest/test-api.cc

@ -21965,3 +21965,48 @@ TEST(TestFunctionCallOptimization) {
checker.Run(true, false);
checker.Run(false, false);
}
TEST(Regress354123) {
LocalContext current;
v8::Isolate* isolate = current->GetIsolate();
v8::HandleScope scope(isolate);
v8::Handle<v8::ObjectTemplate> templ = v8::ObjectTemplate::New(isolate);
templ->SetAccessCheckCallbacks(NamedAccessCounter, IndexedAccessCounter);
current->Global()->Set(v8_str("friend"), templ->NewInstance());
// Test access using __proto__ from the prototype chain.
named_access_count = 0;
CompileRun("friend.__proto__ = {};");
CHECK_EQ(2, named_access_count);
CompileRun("friend.__proto__;");
CHECK_EQ(4, named_access_count);
// Test access using __proto__ as a hijacked function (A).
named_access_count = 0;
CompileRun("var p = Object.prototype;"
"var f = Object.getOwnPropertyDescriptor(p, '__proto__').set;"
"f.call(friend, {});");
CHECK_EQ(1, named_access_count);
CompileRun("var p = Object.prototype;"
"var f = Object.getOwnPropertyDescriptor(p, '__proto__').get;"
"f.call(friend);");
CHECK_EQ(2, named_access_count);
// Test access using __proto__ as a hijacked function (B).
named_access_count = 0;
CompileRun("var f = Object.prototype.__lookupSetter__('__proto__');"
"f.call(friend, {});");
CHECK_EQ(1, named_access_count);
CompileRun("var f = Object.prototype.__lookupGetter__('__proto__');"
"f.call(friend);");
CHECK_EQ(2, named_access_count);
// Test access using Object.setPrototypeOf reflective method.
named_access_count = 0;
CompileRun("Object.setPrototypeOf(friend, {});");
CHECK_EQ(1, named_access_count);
CompileRun("Object.getPrototypeOf(friend);");
CHECK_EQ(2, named_access_count);
}

45
deps/v8/test/mjsunit/regress/regress-350863.js

@ -0,0 +1,45 @@
// Copyright 2014 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Flags: --allow-natives-syntax
var __v_7 = { };
function __f_8(base, condition) {
__v_7[base + 3] = 0;
__v_7[base + 4] = 0;
if (condition) {
__v_7[base + 0] = 0;
__v_7[base + 5] = 0;
} else {
__v_7[base + 0] = 0;
__v_7[base + 18] = 0;
}
}
__f_8(1, true);
__f_8(1, false);
%OptimizeFunctionOnNextCall(__f_8);
__f_8(5, false);

51
deps/v8/test/mjsunit/regress/regress-352982.js

@ -0,0 +1,51 @@
// Copyright 2014 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax --expose-gc
function __f_4(i1) {
return __v_3[i1] * __v_3[0];
}
function __f_3(i1) {
__f_4(i1);
__f_4(i1 + 16);
__f_4(i1 + 32);
%OptimizeFunctionOnNextCall(__f_4);
var x = __f_4(i1 + 993);
return x;
}
function __f_5() {
__v_3[0] = +__v_3[0];
gc();
__f_3(0) | 0;
__v_3 = /\u23a1|x/;
return 0;
}
var __v_3 = new Float32Array(1000);
__f_5();
__f_5();
__f_5();

74
deps/v8/test/mjsunit/regress/regress-353004.js

@ -0,0 +1,74 @@
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax
var buffer1 = new ArrayBuffer(100 * 1024);
var array1 = new Uint8Array(buffer1, {valueOf : function() {
%ArrayBufferNeuter(buffer1);
return 0;
}});
assertEquals(0, array1.length);
var buffer2 = new ArrayBuffer(100 * 1024);
assertThrows(function() {
var array2 = new Uint8Array(buffer2, 0, {valueOf : function() {
%ArrayBufferNeuter(buffer2);
return 100 * 1024;
}});
}, RangeError);
var buffer3 = new ArrayBuffer(100 * 1024 * 1024);
var dataView1 = new DataView(buffer3, {valueOf : function() {
%ArrayBufferNeuter(buffer3);
return 0;
}});
assertEquals(0, dataView1.byteLength);
var buffer4 = new ArrayBuffer(100 * 1024);
assertThrows(function() {
var dataView2 = new DataView(buffer4, 0, {valueOf : function() {
%ArrayBufferNeuter(buffer4);
return 100 * 1024 * 1024;
}});
}, RangeError);
var buffer5 = new ArrayBuffer(100 * 1024);
var buffer6 = buffer5.slice({valueOf : function() {
%ArrayBufferNeuter(buffer5);
return 0;
}}, 100 * 1024 * 1024);
assertEquals(0, buffer6.byteLength);
var buffer7 = new ArrayBuffer(100 * 1024 * 1024);
var buffer8 = buffer7.slice(0, {valueOf : function() {
%ArrayBufferNeuter(buffer7);
return 100 * 1024 * 1024;
}});
assertEquals(0, buffer8.byteLength);
var buffer9 = new ArrayBuffer(1024);
var array9 = new Uint8Array(buffer9);
var array10 = array9.subarray({valueOf : function() {
%ArrayBufferNeuter(buffer9);
return 0;
}}, 1024);
assertEquals(0, array9.length);
assertEquals(0, array10.length);
var buffer11 = new ArrayBuffer(1024);
var array11 = new Uint8Array(buffer11);
var array12 = array11.subarray(0, {valueOf : function() {
%ArrayBufferNeuter(buffer11);
return 1024;
}});
assertEquals(0, array11.length);
assertEquals(0, array12.length);

33
deps/v8/test/mjsunit/regress/regress-crbug-352929.js

@ -0,0 +1,33 @@
// Copyright 2014 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
// Flags: --allow-natives-syntax
var dummy = new Int32Array(100);
array = new Int32Array(100);
var dummy2 = new Int32Array(100);
array[-17] = 0;
function fun(base,cond) {
array[base - 1] = 1;
array[base - 2] = 2;
if (cond) {
array[base - 4] = 3;
array[base - 5] = 4;
} else {
array[base - 6] = 5;
array[base - 100] = 777;
}
}
fun(5,true);
fun(7,false);
%OptimizeFunctionOnNextCall(fun);
fun(7,false);
for (var i = 0; i < dummy.length; i++) {
assertEquals(0, dummy[i]);
}
for (var i = 0; i < dummy2.length; i++) {
assertEquals(0, dummy2[i]);
}
Loading…
Cancel
Save