Browse Source

deps: upgrade to V8 4.7.80.32

Pick up the latest fixes on the V8 4.7 branch
https://github.com/v8/v8/compare/4.7.80.25...4.7.80.32

PR-URL: https://github.com/nodejs/node/pull/4699
Reviewed-By: cjihrig - Colin Ihrig <cjihrig@gmail.com>
Reviewed-By: trevnorris - Trevor Norris <trev.norris@gmail.com>
Reviewed-By: jasnell - James M Snell <jasnell@gmail.com>
Reviewed-By: targos - Michaël Zasso <mic.besace@gmail.com>
process-exit-stdio-flushing
Ali Ijaz Sheikh 9 years ago
parent
commit
384b20362c
  1. 2
      deps/v8/include/v8-version.h
  2. 3
      deps/v8/src/code-stubs-hydrogen.cc
  3. 125
      deps/v8/src/elements.cc
  4. 2
      deps/v8/src/flag-definitions.h
  5. 38
      deps/v8/src/func-name-inferrer.h
  6. 11
      deps/v8/src/heap/heap.cc
  7. 76
      deps/v8/src/ic/ic.cc
  8. 6
      deps/v8/src/objects-debug.cc
  9. 16
      deps/v8/src/objects.cc
  10. 17
      deps/v8/src/parser.cc
  11. 17
      deps/v8/src/preparser.h
  12. 48
      deps/v8/test/cctest/test-api.cc
  13. 3
      deps/v8/test/mjsunit/mjsunit.status
  14. 10008
      deps/v8/test/mjsunit/regress/regress-4595.js

2
deps/v8/include/v8-version.h

@ -11,7 +11,7 @@
#define V8_MAJOR_VERSION 4
#define V8_MINOR_VERSION 7
#define V8_BUILD_NUMBER 80
#define V8_PATCH_LEVEL 25
#define V8_PATCH_LEVEL 32
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)

3
deps/v8/src/code-stubs-hydrogen.cc

@ -396,8 +396,7 @@ HValue* CodeStubGraphBuilder<TypeofStub>::BuildCodeStub() {
// Is it an undetectable object?
IfBuilder is_undetectable(this);
is_undetectable.If<HCompareNumericAndBranch>(
bit_field_masked, Add<HConstant>(1 << Map::kIsUndetectable),
Token::EQ);
bit_field_masked, graph()->GetConstant0(), Token::NE);
is_undetectable.Then();
{
// typeof an undetectable object is 'undefined'.

125
deps/v8/src/elements.cc

@ -691,11 +691,12 @@ class ElementsAccessorBase : public ElementsAccessor {
}
virtual void SetLength(Handle<JSArray> array, uint32_t length) final {
ElementsAccessorSubclass::SetLengthImpl(array, length,
ElementsAccessorSubclass::SetLengthImpl(array->GetIsolate(), array, length,
handle(array->elements()));
}
static void SetLengthImpl(Handle<JSArray> array, uint32_t length,
static void SetLengthImpl(Isolate* isolate, Handle<JSArray> array,
uint32_t length,
Handle<FixedArrayBase> backing_store) {
DCHECK(!array->SetLengthWouldNormalize(length));
DCHECK(IsFastElementsKind(array->GetElementsKind()));
@ -712,6 +713,7 @@ class ElementsAccessorBase : public ElementsAccessor {
// Check whether the backing store should be shrunk.
uint32_t capacity = backing_store->length();
old_length = Min(old_length, capacity);
if (length == 0) {
array->initialize_elements();
} else if (length <= capacity) {
@ -720,7 +722,7 @@ class ElementsAccessorBase : public ElementsAccessor {
}
if (2 * length <= capacity) {
// If more than half the elements won't be used, trim the array.
array->GetHeap()->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(
isolate->heap()->RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>(
*backing_store, capacity - length);
} else {
// Otherwise, fill the unused tail with holes.
@ -954,11 +956,11 @@ class DictionaryElementsAccessor
: ElementsAccessorBase<DictionaryElementsAccessor,
ElementsKindTraits<DICTIONARY_ELEMENTS> >(name) {}
static void SetLengthImpl(Handle<JSArray> array, uint32_t length,
static void SetLengthImpl(Isolate* isolate, Handle<JSArray> array,
uint32_t length,
Handle<FixedArrayBase> backing_store) {
Handle<SeededNumberDictionary> dict =
Handle<SeededNumberDictionary>::cast(backing_store);
Isolate* isolate = array->GetIsolate();
int capacity = dict->Capacity();
uint32_t old_length = 0;
CHECK(array->length()->ToArrayLength(&old_length));
@ -1181,13 +1183,18 @@ class FastElementsAccessor
}
int num_used = 0;
for (int i = 0; i < backing_store->length(); ++i) {
if (!backing_store->is_the_hole(i)) ++num_used;
// Bail out early if more than 1/4 is used.
if (4 * num_used > backing_store->length()) break;
}
if (4 * num_used <= backing_store->length()) {
JSObject::NormalizeElements(obj);
if (!backing_store->is_the_hole(i)) {
++num_used;
// Bail out if a number dictionary wouldn't be able to save at least
// 75% space.
if (4 * SeededNumberDictionary::ComputeCapacity(num_used) *
SeededNumberDictionary::kEntrySize >
backing_store->length()) {
return;
}
}
}
JSObject::NormalizeElements(obj);
}
}
@ -1293,9 +1300,10 @@ class FastElementsAccessor
receiver, backing_store, args, unshift_size, AT_START);
}
static void MoveElements(Heap* heap, Handle<FixedArrayBase> backing_store,
int dst_index, int src_index, int len,
int hole_start, int hole_end) {
static void MoveElements(Isolate* isolate, Handle<JSArray> receiver,
Handle<FixedArrayBase> backing_store, int dst_index,
int src_index, int len, int hole_start,
int hole_end) {
UNREACHABLE();
}
@ -1344,13 +1352,13 @@ class FastElementsAccessor
// Delete and move elements to make space for add_count new elements.
if (add_count < delete_count) {
FastElementsAccessorSubclass::SpliceShrinkStep(backing_store, heap, start,
delete_count, add_count,
length, new_length);
FastElementsAccessorSubclass::SpliceShrinkStep(
isolate, receiver, backing_store, start, delete_count, add_count,
length, new_length);
} else if (add_count > delete_count) {
backing_store = FastElementsAccessorSubclass::SpliceGrowStep(
receiver, backing_store, isolate, heap, start, delete_count,
add_count, length, new_length);
isolate, receiver, backing_store, start, delete_count, add_count,
length, new_length);
}
// Copy over the arguments.
@ -1364,29 +1372,33 @@ class FastElementsAccessor
}
private:
static void SpliceShrinkStep(Handle<FixedArrayBase> backing_store, Heap* heap,
// SpliceShrinkStep might modify the backing_store.
static void SpliceShrinkStep(Isolate* isolate, Handle<JSArray> receiver,
Handle<FixedArrayBase> backing_store,
uint32_t start, uint32_t delete_count,
uint32_t add_count, uint32_t len,
uint32_t new_length) {
const int move_left_count = len - delete_count - start;
const int move_left_dst_index = start + add_count;
FastElementsAccessorSubclass::MoveElements(
heap, backing_store, move_left_dst_index, start + delete_count,
move_left_count, new_length, len);
isolate, receiver, backing_store, move_left_dst_index,
start + delete_count, move_left_count, new_length, len);
}
// SpliceGrowStep might modify the backing_store.
static Handle<FixedArrayBase> SpliceGrowStep(
Handle<JSArray> receiver, Handle<FixedArrayBase> backing_store,
Isolate* isolate, Heap* heap, uint32_t start, uint32_t delete_count,
uint32_t add_count, uint32_t length, uint32_t new_length) {
Isolate* isolate, Handle<JSArray> receiver,
Handle<FixedArrayBase> backing_store, uint32_t start,
uint32_t delete_count, uint32_t add_count, uint32_t length,
uint32_t new_length) {
// Check we do not overflow the new_length.
DCHECK((add_count - delete_count) <= (Smi::kMaxValue - length));
// Check if backing_store is big enough.
if (new_length <= static_cast<uint32_t>(backing_store->length())) {
FastElementsAccessorSubclass::MoveElements(
heap, backing_store, start + add_count, start + delete_count,
(length - delete_count - start), 0, 0);
isolate, receiver, backing_store, start + add_count,
start + delete_count, (length - delete_count - start), 0, 0);
// MoveElements updates the backing_store in-place.
return backing_store;
}
// New backing storage is needed.
@ -1407,20 +1419,19 @@ class FastElementsAccessor
static Handle<Object> RemoveElement(Handle<JSArray> receiver,
Handle<FixedArrayBase> backing_store,
Where remove_position) {
Isolate* isolate = receiver->GetIsolate();
uint32_t length =
static_cast<uint32_t>(Smi::cast(receiver->length())->value());
Isolate* isolate = receiver->GetIsolate();
DCHECK(length > 0);
int new_length = length - 1;
int remove_index = remove_position == AT_START ? 0 : new_length;
Handle<Object> result =
FastElementsAccessorSubclass::GetImpl(backing_store, remove_index);
if (remove_position == AT_START) {
Heap* heap = isolate->heap();
FastElementsAccessorSubclass::MoveElements(heap, backing_store, 0, 1,
new_length, 0, 0);
FastElementsAccessorSubclass::MoveElements(
isolate, receiver, backing_store, 0, 1, new_length, 0, 0);
}
FastElementsAccessorSubclass::SetLengthImpl(receiver, new_length,
FastElementsAccessorSubclass::SetLengthImpl(isolate, receiver, new_length,
backing_store);
if (IsHoleyElementsKind(KindTraits::Kind) && result->IsTheHole()) {
@ -1454,8 +1465,8 @@ class FastElementsAccessor
// If the backing store has enough capacity and we add elements to the
// start we have to shift the existing objects.
Isolate* isolate = receiver->GetIsolate();
FastElementsAccessorSubclass::MoveElements(isolate->heap(), backing_store,
add_size, 0, length, 0, 0);
FastElementsAccessorSubclass::MoveElements(
isolate, receiver, backing_store, add_size, 0, length, 0, 0);
}
int insertion_index = remove_position == AT_START ? 0 : length;
@ -1508,11 +1519,22 @@ class FastSmiOrObjectElementsAccessor
return backing_store->get(index);
}
static void MoveElements(Heap* heap, Handle<FixedArrayBase> backing_store,
int dst_index, int src_index, int len,
int hole_start, int hole_end) {
static void MoveElements(Isolate* isolate, Handle<JSArray> receiver,
Handle<FixedArrayBase> backing_store, int dst_index,
int src_index, int len, int hole_start,
int hole_end) {
Heap* heap = isolate->heap();
Handle<FixedArray> dst_elms = Handle<FixedArray>::cast(backing_store);
if (len != 0) {
if (heap->CanMoveObjectStart(*dst_elms) && dst_index == 0) {
// Update all the copies of this backing_store handle.
*dst_elms.location() =
FixedArray::cast(heap->LeftTrimFixedArray(*dst_elms, src_index));
receiver->set_elements(*dst_elms);
// Adjust the hole offset as the array has been shrunk.
hole_end -= src_index;
DCHECK_LE(hole_start, backing_store->length());
DCHECK_LE(hole_end, backing_store->length());
} else if (len != 0) {
DisallowHeapAllocation no_gc;
heap->MoveElements(*dst_elms, dst_index, src_index, len);
}
@ -1631,12 +1653,23 @@ class FastDoubleElementsAccessor
FixedDoubleArray::cast(backing_store)->set(entry, value->Number());
}
static void MoveElements(Heap* heap, Handle<FixedArrayBase> backing_store,
int dst_index, int src_index, int len,
int hole_start, int hole_end) {
static void MoveElements(Isolate* isolate, Handle<JSArray> receiver,
Handle<FixedArrayBase> backing_store, int dst_index,
int src_index, int len, int hole_start,
int hole_end) {
Heap* heap = isolate->heap();
Handle<FixedDoubleArray> dst_elms =
Handle<FixedDoubleArray>::cast(backing_store);
if (len != 0) {
if (heap->CanMoveObjectStart(*dst_elms) && dst_index == 0) {
// Update all the copies of this backing_store handle.
*dst_elms.location() = FixedDoubleArray::cast(
heap->LeftTrimFixedArray(*dst_elms, src_index));
receiver->set_elements(*dst_elms);
// Adjust the hole offset as the array has been shrunk.
hole_end -= src_index;
DCHECK_LE(hole_start, backing_store->length());
DCHECK_LE(hole_end, backing_store->length());
} else if (len != 0) {
MemMove(dst_elms->data_start() + dst_index,
dst_elms->data_start() + src_index, len * kDoubleSize);
}
@ -1742,7 +1775,8 @@ class TypedElementsAccessor
return PropertyDetails(DONT_DELETE, DATA, 0, PropertyCellType::kNoCell);
}
static void SetLengthImpl(Handle<JSArray> array, uint32_t length,
static void SetLengthImpl(Isolate* isolate, Handle<JSArray> array,
uint32_t length,
Handle<FixedArrayBase> backing_store) {
// External arrays do not support changing their length.
UNREACHABLE();
@ -1856,7 +1890,8 @@ class SloppyArgumentsElementsAccessor
}
}
static void SetLengthImpl(Handle<JSArray> array, uint32_t length,
static void SetLengthImpl(Isolate* isolate, Handle<JSArray> array,
uint32_t length,
Handle<FixedArrayBase> parameter_map) {
// Sloppy arguments objects are not arrays.
UNREACHABLE();

2
deps/v8/src/flag-definitions.h

@ -676,7 +676,7 @@ DEFINE_IMPLICATION(trace_detached_contexts, track_detached_contexts)
#ifdef VERIFY_HEAP
DEFINE_BOOL(verify_heap, false, "verify heap pointers before and after GC")
#endif
DEFINE_BOOL(move_object_start, false, "enable moving of object starts")
DEFINE_BOOL(move_object_start, true, "enable moving of object starts")
// counters.cc
DEFINE_INT(histogram_interval, 600000,

38
deps/v8/src/func-name-inferrer.h

@ -30,17 +30,29 @@ class FuncNameInferrer : public ZoneObject {
public:
FuncNameInferrer(AstValueFactory* ast_value_factory, Zone* zone);
// To enter function name inference state, put a FuncNameInferrer::State
// on the stack.
class State {
public:
explicit State(FuncNameInferrer* fni) : fni_(fni) {
if (fni_ != nullptr) fni_->Enter();
}
~State() {
if (fni_ != nullptr) fni_->Leave();
}
private:
FuncNameInferrer* fni_;
DISALLOW_COPY_AND_ASSIGN(State);
};
// Returns whether we have entered name collection state.
bool IsOpen() const { return !entries_stack_.is_empty(); }
// Pushes an enclosing the name of enclosing function onto names stack.
void PushEnclosingName(const AstRawString* name);
// Enters name collection state.
void Enter() {
entries_stack_.Add(names_stack_.length(), zone());
}
// Pushes an encountered name onto names stack when in collection state.
void PushLiteralName(const AstRawString* name);
@ -67,14 +79,6 @@ class FuncNameInferrer : public ZoneObject {
}
}
// Leaves names collection state.
void Leave() {
DCHECK(IsOpen());
names_stack_.Rewind(entries_stack_.RemoveLast());
if (entries_stack_.is_empty())
funcs_to_infer_.Clear();
}
private:
enum NameType {
kEnclosingConstructorName,
@ -87,6 +91,14 @@ class FuncNameInferrer : public ZoneObject {
NameType type;
};
void Enter() { entries_stack_.Add(names_stack_.length(), zone()); }
void Leave() {
DCHECK(IsOpen());
names_stack_.Rewind(entries_stack_.RemoveLast());
if (entries_stack_.is_empty()) funcs_to_infer_.Clear();
}
Zone* zone() const { return zone_; }
// Constructs a full name in dotted notation from gathered names.

11
deps/v8/src/heap/heap.cc

@ -5993,9 +5993,14 @@ void Heap::FilterStoreBufferEntriesOnAboutToBeFreedPages() {
void Heap::FreeQueuedChunks() {
if (chunks_queued_for_free_ != NULL) {
V8::GetCurrentPlatform()->CallOnBackgroundThread(
new UnmapFreeMemoryTask(this, chunks_queued_for_free_),
v8::Platform::kShortRunningTask);
if (FLAG_concurrent_sweeping) {
V8::GetCurrentPlatform()->CallOnBackgroundThread(
new UnmapFreeMemoryTask(this, chunks_queued_for_free_),
v8::Platform::kShortRunningTask);
} else {
FreeQueuedChunks(chunks_queued_for_free_);
pending_unmapping_tasks_semaphore_.Signal();
}
chunks_queued_for_free_ = NULL;
} else {
// If we do not have anything to unmap, we just signal the semaphore

76
deps/v8/src/ic/ic.cc

@ -1022,6 +1022,39 @@ Handle<Code> LoadIC::SimpleFieldLoad(FieldIndex index) {
}
bool IsCompatibleReceiver(LookupIterator* lookup, Handle<Map> receiver_map) {
DCHECK(lookup->state() == LookupIterator::ACCESSOR);
Isolate* isolate = lookup->isolate();
Handle<Object> accessors = lookup->GetAccessors();
if (accessors->IsExecutableAccessorInfo()) {
Handle<ExecutableAccessorInfo> info =
Handle<ExecutableAccessorInfo>::cast(accessors);
if (info->getter() != NULL &&
!ExecutableAccessorInfo::IsCompatibleReceiverMap(isolate, info,
receiver_map)) {
return false;
}
} else if (accessors->IsAccessorPair()) {
Handle<Object> getter(Handle<AccessorPair>::cast(accessors)->getter(),
isolate);
Handle<JSObject> holder = lookup->GetHolder<JSObject>();
Handle<Object> receiver = lookup->GetReceiver();
if (getter->IsJSFunction() && holder->HasFastProperties()) {
Handle<JSFunction> function = Handle<JSFunction>::cast(getter);
if (receiver->IsJSObject() || function->shared()->IsBuiltin() ||
!is_sloppy(function->shared()->language_mode())) {
CallOptimization call_optimization(function);
if (call_optimization.is_simple_api_call() &&
!call_optimization.IsCompatibleReceiverMap(receiver_map, holder)) {
return false;
}
}
}
}
return true;
}
void LoadIC::UpdateCaches(LookupIterator* lookup) {
if (state() == UNINITIALIZED) {
// This is the first time we execute this inline cache. Set the target to
@ -1046,35 +1079,20 @@ void LoadIC::UpdateCaches(LookupIterator* lookup) {
}
} else {
if (lookup->state() == LookupIterator::ACCESSOR) {
Handle<Object> accessors = lookup->GetAccessors();
Handle<Map> map = receiver_map();
if (accessors->IsExecutableAccessorInfo()) {
Handle<ExecutableAccessorInfo> info =
Handle<ExecutableAccessorInfo>::cast(accessors);
if ((v8::ToCData<Address>(info->getter()) != 0) &&
!ExecutableAccessorInfo::IsCompatibleReceiverMap(isolate(), info,
map)) {
TRACE_GENERIC_IC(isolate(), "LoadIC", "incompatible receiver type");
code = slow_stub();
}
} else if (accessors->IsAccessorPair()) {
Handle<Object> getter(Handle<AccessorPair>::cast(accessors)->getter(),
isolate());
Handle<JSObject> holder = lookup->GetHolder<JSObject>();
Handle<Object> receiver = lookup->GetReceiver();
if (getter->IsJSFunction() && holder->HasFastProperties()) {
Handle<JSFunction> function = Handle<JSFunction>::cast(getter);
if (receiver->IsJSObject() || function->IsBuiltin() ||
!is_sloppy(function->shared()->language_mode())) {
CallOptimization call_optimization(function);
if (call_optimization.is_simple_api_call() &&
!call_optimization.IsCompatibleReceiver(receiver, holder)) {
TRACE_GENERIC_IC(isolate(), "LoadIC",
"incompatible receiver type");
code = slow_stub();
}
}
}
if (!IsCompatibleReceiver(lookup, receiver_map())) {
TRACE_GENERIC_IC(isolate(), "LoadIC", "incompatible receiver type");
code = slow_stub();
}
} else if (lookup->state() == LookupIterator::INTERCEPTOR) {
// Perform a lookup behind the interceptor. Copy the LookupIterator since
// the original iterator will be used to fetch the value.
LookupIterator it = *lookup;
it.Next();
LookupForRead(&it);
if (it.state() == LookupIterator::ACCESSOR &&
!IsCompatibleReceiver(&it, receiver_map())) {
TRACE_GENERIC_IC(isolate(), "LoadIC", "incompatible receiver type");
code = slow_stub();
}
}
if (code.is_null()) code = ComputeHandler(lookup);

6
deps/v8/src/objects-debug.cc

@ -303,7 +303,11 @@ void JSObject::JSObjectVerify() {
if (r.IsNone()) {
CHECK(type_is_none);
} else if (!type_is_any && !(type_is_none && r.IsHeapObject())) {
CHECK(!field_type->NowStable() || field_type->NowContains(value));
// If allocation folding is off then GC could happen during inner
// object literal creation and we will end up having and undefined
// value that does not match the field type.
CHECK(!field_type->NowStable() || field_type->NowContains(value) ||
(!FLAG_use_allocation_folding && value->IsUndefined()));
}
}
}

16
deps/v8/src/objects.cc

@ -2770,7 +2770,7 @@ void Map::UpdateFieldType(int descriptor, Handle<Name> name,
}
bool FieldTypeIsCleared(Representation rep, Handle<HeapType> type) {
bool FieldTypeIsCleared(Representation rep, HeapType* type) {
return type->Is(HeapType::None()) && rep.IsHeapObject();
}
@ -2784,7 +2784,7 @@ Handle<HeapType> Map::GeneralizeFieldType(Representation rep1,
// Cleared field types need special treatment. They represent lost knowledge,
// so we must be conservative, so their generalization with any other type
// is "Any".
if (FieldTypeIsCleared(rep1, type1) || FieldTypeIsCleared(rep2, type2)) {
if (FieldTypeIsCleared(rep1, *type1) || FieldTypeIsCleared(rep2, *type2)) {
return HeapType::Any(isolate);
}
if (type1->NowIs(type2)) return type2;
@ -2807,7 +2807,7 @@ void Map::GeneralizeFieldType(Handle<Map> map, int modify_index,
isolate);
if (old_representation.Equals(new_representation) &&
!FieldTypeIsCleared(new_representation, new_field_type) &&
!FieldTypeIsCleared(new_representation, *new_field_type) &&
// Checking old_field_type for being cleared is not necessary because
// the NowIs check below would fail anyway in that case.
new_field_type->NowIs(old_field_type)) {
@ -3454,10 +3454,16 @@ MaybeHandle<Map> Map::TryUpdate(Handle<Map> old_map) {
switch (new_details.type()) {
case DATA: {
HeapType* new_type = new_descriptors->GetFieldType(i);
// Cleared field types need special treatment. They represent lost
// knowledge, so we must first generalize the new_type to "Any".
if (FieldTypeIsCleared(new_details.representation(), new_type)) {
return MaybeHandle<Map>();
}
PropertyType old_property_type = old_details.type();
if (old_property_type == DATA) {
HeapType* old_type = old_descriptors->GetFieldType(i);
if (!old_type->NowIs(new_type)) {
if (FieldTypeIsCleared(old_details.representation(), old_type) ||
!old_type->NowIs(new_type)) {
return MaybeHandle<Map>();
}
} else {
@ -13128,6 +13134,8 @@ static bool ShouldConvertToFastElements(JSObject* object,
uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) *
SeededNumberDictionary::kEntrySize;
// Turn fast if the dictionary only saves 50% space.
return 2 * dictionary_size >= *new_capacity;
}

17
deps/v8/src/parser.cc

@ -2227,10 +2227,8 @@ Statement* Parser::ParseFunctionDeclaration(
const AstRawString* name = ParseIdentifierOrStrictReservedWord(
&is_strict_reserved, CHECK_OK);
if (fni_ != NULL) {
fni_->Enter();
fni_->PushEnclosingName(name);
}
FuncNameInferrer::State fni_state(fni_);
if (fni_ != NULL) fni_->PushEnclosingName(name);
FunctionLiteral* fun = ParseFunctionLiteral(
name, scanner()->location(),
is_strict_reserved ? kFunctionNameIsStrictReserved
@ -2239,7 +2237,6 @@ Statement* Parser::ParseFunctionDeclaration(
: FunctionKind::kNormalFunction,
pos, FunctionLiteral::DECLARATION, FunctionLiteral::NORMAL_ARITY,
language_mode(), CHECK_OK);
if (fni_ != NULL) fni_->Leave();
// Even if we're not at the top-level of the global or a function
// scope, we treat it as such and introduce the function with its
@ -2516,7 +2513,7 @@ void Parser::ParseVariableDeclarations(VariableDeclarationContext var_context,
int bindings_start = peek_position();
bool is_for_iteration_variable;
do {
if (fni_ != NULL) fni_->Enter();
FuncNameInferrer::State fni_state(fni_);
// Parse name.
if (!first_declaration) Consume(Token::COMMA);
@ -2591,7 +2588,6 @@ void Parser::ParseVariableDeclarations(VariableDeclarationContext var_context,
value = GetLiteralUndefined(position());
}
if (single_name && fni_ != NULL) fni_->Leave();
parsing_result->declarations.Add(DeclarationParsingResult::Declaration(
pattern, initializer_position, value));
first_declaration = false;
@ -4813,7 +4809,7 @@ ClassLiteral* Parser::ParseClassLiteral(const AstRawString* name,
const bool has_extends = extends != nullptr;
while (peek() != Token::RBRACE) {
if (Check(Token::SEMICOLON)) continue;
if (fni_ != NULL) fni_->Enter();
FuncNameInferrer::State fni_state(fni_);
const bool in_class = true;
const bool is_static = false;
bool is_computed_name = false; // Classes do not care about computed
@ -4831,10 +4827,7 @@ ClassLiteral* Parser::ParseClassLiteral(const AstRawString* name,
properties->Add(property, zone());
}
if (fni_ != NULL) {
fni_->Infer();
fni_->Leave();
}
if (fni_ != NULL) fni_->Infer();
}
Expect(Token::RBRACE, CHECK_OK);

17
deps/v8/src/preparser.h

@ -2770,7 +2770,7 @@ typename ParserBase<Traits>::ExpressionT ParserBase<Traits>::ParseObjectLiteral(
Expect(Token::LBRACE, CHECK_OK);
while (peek() != Token::RBRACE) {
if (fni_ != nullptr) fni_->Enter();
FuncNameInferrer::State fni_state(fni_);
const bool in_class = false;
const bool is_static = false;
@ -2801,10 +2801,7 @@ typename ParserBase<Traits>::ExpressionT ParserBase<Traits>::ParseObjectLiteral(
Expect(Token::COMMA, CHECK_OK);
}
if (fni_ != nullptr) {
fni_->Infer();
fni_->Leave();
}
if (fni_ != nullptr) fni_->Infer();
}
Expect(Token::RBRACE, CHECK_OK);
@ -2906,7 +2903,7 @@ ParserBase<Traits>::ParseAssignmentExpression(bool accept_IN,
return this->ParseYieldExpression(classifier, ok);
}
if (fni_ != NULL) fni_->Enter();
FuncNameInferrer::State fni_state(fni_);
ParserBase<Traits>::Checkpoint checkpoint(this);
ExpressionClassifier arrow_formals_classifier(classifier->duplicate_finder());
bool parenthesized_formals = peek() == Token::LPAREN;
@ -2941,6 +2938,9 @@ ParserBase<Traits>::ParseAssignmentExpression(bool accept_IN,
}
expression = this->ParseArrowFunctionLiteral(
parameters, arrow_formals_classifier, CHECK_OK);
if (fni_ != nullptr) fni_->Infer();
return expression;
}
@ -2951,7 +2951,6 @@ ParserBase<Traits>::ParseAssignmentExpression(bool accept_IN,
ExpressionClassifier::FormalParametersProductions);
if (!Token::IsAssignmentOp(peek())) {
if (fni_ != NULL) fni_->Leave();
// Parsed conditional expression only (no assignment).
return expression;
}
@ -3002,7 +3001,6 @@ ParserBase<Traits>::ParseAssignmentExpression(bool accept_IN,
} else {
fni_->RemoveLastFunction();
}
fni_->Leave();
}
return factory()->NewAssignment(op, expression, right, pos);
@ -3469,7 +3467,7 @@ ParserBase<Traits>::ParseStrongInitializationExpression(
// 'this' '.' IdentifierName '=' AssignmentExpression
// 'this' '[' Expression ']' '=' AssignmentExpression
if (fni_ != NULL) fni_->Enter();
FuncNameInferrer::State fni_state(fni_);
Consume(Token::THIS);
int pos = position();
@ -3528,7 +3526,6 @@ ParserBase<Traits>::ParseStrongInitializationExpression(
} else {
fni_->RemoveLastFunction();
}
fni_->Leave();
}
if (function_state_->return_location().IsValid()) {

48
deps/v8/test/cctest/test-api.cc

@ -10258,6 +10258,54 @@ THREADED_TEST(CallableObject) {
}
THREADED_TEST(Regress567998) {
LocalContext env;
v8::HandleScope scope(env->GetIsolate());
Local<v8::FunctionTemplate> desc =
v8::FunctionTemplate::New(env->GetIsolate());
desc->InstanceTemplate()->MarkAsUndetectable(); // undetectable
desc->InstanceTemplate()->SetCallAsFunctionHandler(ReturnThis); // callable
Local<v8::Object> obj = desc->GetFunction(env.local())
.ToLocalChecked()
->NewInstance(env.local())
.ToLocalChecked();
CHECK(
env->Global()->Set(env.local(), v8_str("undetectable"), obj).FromJust());
ExpectString("undetectable.toString()", "[object Object]");
ExpectString("typeof undetectable", "undefined");
ExpectString("typeof(undetectable)", "undefined");
ExpectBoolean("typeof undetectable == 'undefined'", true);
ExpectBoolean("typeof undetectable == 'object'", false);
ExpectBoolean("if (undetectable) { true; } else { false; }", false);
ExpectBoolean("!undetectable", true);
ExpectObject("true&&undetectable", obj);
ExpectBoolean("false&&undetectable", false);
ExpectBoolean("true||undetectable", true);
ExpectObject("false||undetectable", obj);
ExpectObject("undetectable&&true", obj);
ExpectObject("undetectable&&false", obj);
ExpectBoolean("undetectable||true", true);
ExpectBoolean("undetectable||false", false);
ExpectBoolean("undetectable==null", true);
ExpectBoolean("null==undetectable", true);
ExpectBoolean("undetectable==undefined", true);
ExpectBoolean("undefined==undetectable", true);
ExpectBoolean("undetectable==undetectable", true);
ExpectBoolean("undetectable===null", false);
ExpectBoolean("null===undetectable", false);
ExpectBoolean("undetectable===undefined", false);
ExpectBoolean("undefined===undetectable", false);
ExpectBoolean("undetectable===undetectable", true);
}
static int Recurse(v8::Isolate* isolate, int depth, int iterations) {
v8::HandleScope scope(isolate);
if (depth == 0) return v8::HandleScope::NumberOfHandles(isolate);

3
deps/v8/test/mjsunit/mjsunit.status

@ -170,6 +170,9 @@
# Too slow in debug mode for GC stress mode.
'regress/regress-crbug-217858': [PASS, ['mode == debug', SKIP]],
# Too slow in debug mode and under turbofan.
'regress/regress-4595': [PASS, NO_VARIANTS, ['mode == debug', SKIP]],
##############################################################################
# Only RegExp stuff tested, no need for extensive optimizing compiler tests.
'regexp-global': [PASS, NO_VARIANTS],

10008
deps/v8/test/mjsunit/regress/regress-4595.js

File diff suppressed because it is too large
Loading…
Cancel
Save