Browse Source

Upgrade V8 to 3.5.3

v0.7.4-release
Ryan Dahl 14 years ago
parent
commit
eeece4f5ea
  1. 45
      deps/v8/ChangeLog
  2. 7
      deps/v8/include/v8.h
  3. 52
      deps/v8/src/api.cc
  4. 12
      deps/v8/src/apinatives.js
  5. 2
      deps/v8/src/arm/full-codegen-arm.cc
  6. 4
      deps/v8/src/arm/lithium-codegen-arm.cc
  7. 15
      deps/v8/src/arm/stub-cache-arm.cc
  8. 74
      deps/v8/src/code-stubs.cc
  9. 60
      deps/v8/src/code-stubs.h
  10. 40
      deps/v8/src/d8.cc
  11. 3
      deps/v8/src/debug.cc
  12. 18
      deps/v8/src/extensions/experimental/number-format.cc
  13. 8
      deps/v8/src/factory.cc
  14. 2
      deps/v8/src/factory.h
  15. 3
      deps/v8/src/flag-definitions.h
  16. 19
      deps/v8/src/full-codegen.cc
  17. 53
      deps/v8/src/full-codegen.h
  18. 56
      deps/v8/src/handles.cc
  19. 14
      deps/v8/src/handles.h
  20. 14
      deps/v8/src/heap-inl.h
  21. 32
      deps/v8/src/heap.cc
  22. 10
      deps/v8/src/heap.h
  23. 58
      deps/v8/src/hydrogen-instructions.cc
  24. 52
      deps/v8/src/hydrogen-instructions.h
  25. 122
      deps/v8/src/hydrogen.cc
  26. 10
      deps/v8/src/hydrogen.h
  27. 180
      deps/v8/src/ia32/code-stubs-ia32.cc
  28. 1
      deps/v8/src/ia32/codegen-ia32.cc
  29. 5
      deps/v8/src/ia32/cpu-ia32.cc
  30. 131
      deps/v8/src/ia32/full-codegen-ia32.cc
  31. 157
      deps/v8/src/ia32/lithium-codegen-ia32.cc
  32. 11
      deps/v8/src/ia32/lithium-ia32.cc
  33. 5
      deps/v8/src/ia32/lithium-ia32.h
  34. 7
      deps/v8/src/ia32/macro-assembler-ia32.cc
  35. 3
      deps/v8/src/ia32/macro-assembler-ia32.h
  36. 8
      deps/v8/src/ia32/stub-cache-ia32.cc
  37. 37
      deps/v8/src/ic.cc
  38. 12
      deps/v8/src/ic.h
  39. 3
      deps/v8/src/json-parser.h
  40. 2
      deps/v8/src/log-utils.cc
  41. 1
      deps/v8/src/log.cc
  42. 3
      deps/v8/src/macros.py
  43. 1
      deps/v8/src/messages.js
  44. 2
      deps/v8/src/mips/full-codegen-mips.cc
  45. 15
      deps/v8/src/mips/stub-cache-mips.cc
  46. 70
      deps/v8/src/objects-inl.h
  47. 33
      deps/v8/src/objects-printer.cc
  48. 482
      deps/v8/src/objects.cc
  49. 116
      deps/v8/src/objects.h
  50. 58
      deps/v8/src/parser.cc
  51. 6
      deps/v8/src/platform-cygwin.cc
  52. 38
      deps/v8/src/platform-linux.cc
  53. 6
      deps/v8/src/platform-posix.cc
  54. 6
      deps/v8/src/platform-win32.cc
  55. 3
      deps/v8/src/platform.h
  56. 4
      deps/v8/src/proxy.js
  57. 88
      deps/v8/src/runtime.cc
  58. 1
      deps/v8/src/runtime.h
  59. 16
      deps/v8/src/scopes.h
  60. 1
      deps/v8/src/spaces.cc
  61. 2790
      deps/v8/src/third_party/valgrind/valgrind.h
  62. 7
      deps/v8/src/type-info.cc
  63. 5
      deps/v8/src/type-info.h
  64. 25
      deps/v8/src/utils.h
  65. 103
      deps/v8/src/v8natives.js
  66. 4
      deps/v8/src/version.cc
  67. 177
      deps/v8/src/x64/code-stubs-x64.cc
  68. 1
      deps/v8/src/x64/codegen-x64.cc
  69. 3
      deps/v8/src/x64/cpu-x64.cc
  70. 150
      deps/v8/src/x64/lithium-codegen-x64.cc
  71. 6
      deps/v8/src/x64/lithium-x64.cc
  72. 7
      deps/v8/src/x64/stub-cache-x64.cc
  73. 2
      deps/v8/test/benchmarks/testcfg.py
  74. 1
      deps/v8/test/cctest/SConscript
  75. 1
      deps/v8/test/cctest/cctest.gyp
  76. 114
      deps/v8/test/cctest/test-api.cc
  77. 11
      deps/v8/test/cctest/test-ast.cc
  78. 2
      deps/v8/test/cctest/test-compiler.cc
  79. 85
      deps/v8/test/cctest/test-dictionary.cc
  80. 13
      deps/v8/test/cctest/test-list.cc
  81. 8
      deps/v8/test/cctest/test-serialize.cc
  82. 2
      deps/v8/test/es5conform/testcfg.py
  83. 2
      deps/v8/test/message/testcfg.py
  84. 40
      deps/v8/test/mjsunit/compiler/regress-lbranch-double.js
  85. 2
      deps/v8/test/mjsunit/function-names.js
  86. 217
      deps/v8/test/mjsunit/harmony/proxies.js
  87. 27
      deps/v8/test/mjsunit/math-floor.js
  88. 17
      deps/v8/test/mjsunit/math-round.js
  89. 21
      deps/v8/test/mjsunit/regress/regress-1563.js
  90. 47
      deps/v8/test/mjsunit/regress/regress-1582.js
  91. 43
      deps/v8/test/mjsunit/regress/regress-91008.js
  92. 18
      deps/v8/test/mjsunit/regress/regress-91010.js
  93. 51
      deps/v8/test/mjsunit/regress/regress-91013.js
  94. 48
      deps/v8/test/mjsunit/regress/regress-91120.js
  95. 65
      deps/v8/test/mjsunit/scope-calls-eval.js
  96. 2
      deps/v8/test/mjsunit/testcfg.py
  97. 63
      deps/v8/test/mjsunit/unbox-double-arrays.js
  98. 2
      deps/v8/test/mozilla/testcfg.py
  99. 2
      deps/v8/test/sputnik/testcfg.py
  100. 2
      deps/v8/test/test262/testcfg.py

45
deps/v8/ChangeLog

@ -1,8 +1,51 @@
2011-08-03: Version 3.5.3
MIPS: Port of fix to ClassOf check from ARM.
Patch from Paul Lind <plind44@gmail.com>.
Stopped using mprotect on Cygwin.
Avoided uninitialized member warning on gcc 4.3.4
Both patches by Bert Belder.
Bug fixes and performance improvements on all platforms.
2011-08-01: Version 3.5.2
Performance improvements on all platforms.
2011-07-28: Version 3.5.1
Fixed setting the readonly flag on the prototype property using the
API call FunctionTemplate::SetPrototypeAttributes (issue 1539).
Changed the tools/test.py script to use d8 instead of shell for
testing.
Fixed crash in ToBooleanStub when GC happens during invocation.
Enabled automatic unboxing of double arrays.
Performance improvements on all platforms.
2011-07-25: Version 3.5.0
Implemented Object.prototype.{hasOwnProperty, propertyIsEnumerable} for
proxies.
Removed logging to memory support.
Bugfixes and performance work.
2011-07-20: Version 3.4.14
Fix the debugger for strict-mode functions. (Chromium issue 89236)
Add GetPropertyAttribute method for Object in the API. (Patch by Peter Varga)
Add GetPropertyAttribute method for Object in the API. (Patch by
Peter Varga)
Fix -Wunused-but-set-variable for gcc-4.6 on x64. (Issue 1291)

7
deps/v8/include/v8.h

@ -2231,11 +2231,10 @@ class V8EXPORT FunctionTemplate : public Template {
void SetHiddenPrototype(bool value);
/**
* Sets the property attributes of the 'prototype' property of functions
* created from this FunctionTemplate. Can be any combination of ReadOnly,
* DontEnum and DontDelete.
* Sets the ReadOnly flag in the attributes of the 'prototype' property
* of functions created from this FunctionTemplate to true.
*/
void SetPrototypeAttributes(int attributes);
void ReadOnlyPrototype();
/**
* Returns true if the given object is an instance of this function

52
deps/v8/src/api.cc

@ -877,7 +877,6 @@ static void InitializeFunctionTemplate(
i::Handle<i::FunctionTemplateInfo> info) {
info->set_tag(i::Smi::FromInt(Consts::FUNCTION_TEMPLATE));
info->set_flag(0);
info->set_prototype_attributes(i::Smi::FromInt(v8::None));
}
@ -1100,14 +1099,13 @@ void FunctionTemplate::SetHiddenPrototype(bool value) {
}
void FunctionTemplate::SetPrototypeAttributes(int attributes) {
void FunctionTemplate::ReadOnlyPrototype() {
i::Isolate* isolate = Utils::OpenHandle(this)->GetIsolate();
if (IsDeadCheck(isolate, "v8::FunctionTemplate::SetPrototypeAttributes()")) {
return;
}
ENTER_V8(isolate);
Utils::OpenHandle(this)->set_prototype_attributes(
i::Smi::FromInt(attributes));
Utils::OpenHandle(this)->set_read_only_prototype(true);
}
@ -3194,39 +3192,7 @@ int v8::Object::GetIdentityHash() {
ENTER_V8(isolate);
i::HandleScope scope(isolate);
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
i::Handle<i::Object> hidden_props_obj(i::GetHiddenProperties(self, true));
if (!hidden_props_obj->IsJSObject()) {
// We failed to create hidden properties. That's a detached
// global proxy.
ASSERT(hidden_props_obj->IsUndefined());
return 0;
}
i::Handle<i::JSObject> hidden_props =
i::Handle<i::JSObject>::cast(hidden_props_obj);
i::Handle<i::String> hash_symbol = isolate->factory()->identity_hash_symbol();
if (hidden_props->HasLocalProperty(*hash_symbol)) {
i::Handle<i::Object> hash = i::GetProperty(hidden_props, hash_symbol);
CHECK(!hash.is_null());
CHECK(hash->IsSmi());
return i::Smi::cast(*hash)->value();
}
int hash_value;
int attempts = 0;
do {
// Generate a random 32-bit hash value but limit range to fit
// within a smi.
hash_value = i::V8::Random(self->GetIsolate()) & i::Smi::kMaxValue;
attempts++;
} while (hash_value == 0 && attempts < 30);
hash_value = hash_value != 0 ? hash_value : 1; // never return 0
CHECK(!i::SetLocalPropertyIgnoreAttributes(
hidden_props,
hash_symbol,
i::Handle<i::Object>(i::Smi::FromInt(hash_value)),
static_cast<PropertyAttributes>(None)).is_null());
return hash_value;
return i::GetIdentityHash(self);
}
@ -3237,7 +3203,9 @@ bool v8::Object::SetHiddenValue(v8::Handle<v8::String> key,
ENTER_V8(isolate);
i::HandleScope scope(isolate);
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
i::Handle<i::Object> hidden_props(i::GetHiddenProperties(self, true));
i::Handle<i::Object> hidden_props(i::GetHiddenProperties(
self,
i::JSObject::ALLOW_CREATION));
i::Handle<i::Object> key_obj = Utils::OpenHandle(*key);
i::Handle<i::Object> value_obj = Utils::OpenHandle(*value);
EXCEPTION_PREAMBLE(isolate);
@ -3259,7 +3227,9 @@ v8::Local<v8::Value> v8::Object::GetHiddenValue(v8::Handle<v8::String> key) {
return Local<v8::Value>());
ENTER_V8(isolate);
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
i::Handle<i::Object> hidden_props(i::GetHiddenProperties(self, false));
i::Handle<i::Object> hidden_props(i::GetHiddenProperties(
self,
i::JSObject::OMIT_CREATION));
if (hidden_props->IsUndefined()) {
return v8::Local<v8::Value>();
}
@ -3281,7 +3251,9 @@ bool v8::Object::DeleteHiddenValue(v8::Handle<v8::String> key) {
ENTER_V8(isolate);
i::HandleScope scope(isolate);
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
i::Handle<i::Object> hidden_props(i::GetHiddenProperties(self, false));
i::Handle<i::Object> hidden_props(i::GetHiddenProperties(
self,
i::JSObject::OMIT_CREATION));
if (hidden_props->IsUndefined()) {
return true;
}

12
deps/v8/src/apinatives.js

@ -73,14 +73,10 @@ function InstantiateFunction(data, name) {
if (name) %FunctionSetName(fun, name);
cache[serialNumber] = fun;
var prototype = %GetTemplateField(data, kApiPrototypeTemplateOffset);
var attributes = %GetTemplateField(data, kApiPrototypeAttributesOffset);
if (attributes != NONE) {
%IgnoreAttributesAndSetProperty(
fun, "prototype",
prototype ? Instantiate(prototype) : {},
attributes);
} else {
fun.prototype = prototype ? Instantiate(prototype) : {};
var flags = %GetTemplateField(data, kApiFlagOffset);
fun.prototype = prototype ? Instantiate(prototype) : {};
if (flags & (1 << kReadOnlyPrototypeBit)) {
%FunctionSetReadOnlyPrototype(fun);
}
%SetProperty(fun.prototype, "constructor", fun, DONT_ENUM);
var parent = %GetTemplateField(data, kApiParentTemplateOffset);

2
deps/v8/src/arm/full-codegen-arm.cc

@ -2753,7 +2753,7 @@ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
// Objects with a non-function constructor have class 'Object'.
__ bind(&non_function_constructor);
__ LoadRoot(r0, Heap::kfunction_class_symbolRootIndex);
__ LoadRoot(r0, Heap::kObject_symbolRootIndex);
__ jmp(&done);
// Non-JS objects have class null.

4
deps/v8/src/arm/lithium-codegen-arm.cc

@ -1560,7 +1560,7 @@ void LCodeGen::DoBranch(LBranch* instr) {
// Test the double value. Zero and NaN are false.
__ VFPCompareAndLoadFlags(reg, 0.0, scratch);
__ tst(scratch, Operand(kVFPZConditionFlagBit | kVFPVConditionFlagBit));
EmitBranch(true_block, false_block, ne);
EmitBranch(true_block, false_block, eq);
} else {
ASSERT(r.IsTagged());
Register reg = ToRegister(instr->InputAt(0));
@ -4070,7 +4070,7 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
// conversions.
__ cmp(input_reg, Operand(factory()->undefined_value()));
DeoptimizeIf(ne, instr->environment());
__ movt(input_reg, 0);
__ mov(result_reg, Operand(0));
__ jmp(&done);
// Heap number

15
deps/v8/src/arm/stub-cache-arm.cc

@ -4399,11 +4399,18 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
} else {
destination = FloatingPointHelper::kCoreRegisters;
}
__ SmiUntag(value_reg, value_reg);
Register untagged_value = receiver_reg;
__ SmiUntag(untagged_value, value_reg);
FloatingPointHelper::ConvertIntToDouble(
masm, value_reg, destination,
d0, mantissa_reg, exponent_reg, // These are: double_dst, dst1, dst2.
scratch4, s2); // These are: scratch2, single_scratch.
masm,
untagged_value,
destination,
d0,
mantissa_reg,
exponent_reg,
scratch4,
s2);
if (destination == FloatingPointHelper::kVFPRegisters) {
CpuFeatures::Scope scope(VFP3);
__ vstr(d0, scratch, 0);

74
deps/v8/src/code-stubs.cc

@ -329,4 +329,78 @@ void CallFunctionStub::PrintName(StringStream* stream) {
stream->Add("CallFunctionStub_Args%d%s%s", argc_, in_loop_name, flags_name);
}
void ToBooleanStub::PrintName(StringStream* stream) {
stream->Add("ToBooleanStub_");
types_.Print(stream);
}
void ToBooleanStub::Types::Print(StringStream* stream) const {
if (IsEmpty()) stream->Add("None");
if (Contains(UNDEFINED)) stream->Add("Undefined");
if (Contains(BOOLEAN)) stream->Add("Bool");
if (Contains(SMI)) stream->Add("Smi");
if (Contains(NULL_TYPE)) stream->Add("Null");
if (Contains(SPEC_OBJECT)) stream->Add("SpecObject");
if (Contains(STRING)) stream->Add("String");
if (Contains(HEAP_NUMBER)) stream->Add("HeapNumber");
if (Contains(INTERNAL_OBJECT)) stream->Add("InternalObject");
}
void ToBooleanStub::Types::TraceTransition(Types to) const {
if (!FLAG_trace_ic) return;
char buffer[100];
NoAllocationStringAllocator allocator(buffer,
static_cast<unsigned>(sizeof(buffer)));
StringStream stream(&allocator);
stream.Add("[ToBooleanIC (");
Print(&stream);
stream.Add("->");
to.Print(&stream);
stream.Add(")]\n");
stream.OutputToStdOut();
}
bool ToBooleanStub::Types::Record(Handle<Object> object) {
if (object->IsUndefined()) {
Add(UNDEFINED);
return false;
} else if (object->IsBoolean()) {
Add(BOOLEAN);
return object->IsTrue();
} else if (object->IsNull()) {
Add(NULL_TYPE);
return false;
} else if (object->IsSmi()) {
Add(SMI);
return Smi::cast(*object)->value() != 0;
} else if (object->IsSpecObject()) {
Add(SPEC_OBJECT);
return !object->IsUndetectableObject();
} else if (object->IsString()) {
Add(STRING);
return !object->IsUndetectableObject() &&
String::cast(*object)->length() != 0;
} else if (object->IsHeapNumber()) {
Add(HEAP_NUMBER);
double value = HeapNumber::cast(*object)->value();
return !object->IsUndetectableObject() && value != 0 && !isnan(value);
} else {
Add(INTERNAL_OBJECT);
return !object->IsUndetectableObject();
}
}
bool ToBooleanStub::Types::NeedsMap() const {
return Contains(ToBooleanStub::SPEC_OBJECT)
|| Contains(ToBooleanStub::STRING)
|| Contains(ToBooleanStub::HEAP_NUMBER)
|| Contains(ToBooleanStub::INTERNAL_OBJECT);
}
} } // namespace v8::internal

60
deps/v8/src/code-stubs.h

@ -900,14 +900,68 @@ class KeyedStoreElementStub : public CodeStub {
class ToBooleanStub: public CodeStub {
public:
explicit ToBooleanStub(Register tos) : tos_(tos) { }
enum Type {
UNDEFINED,
BOOLEAN,
NULL_TYPE,
SMI,
SPEC_OBJECT,
STRING,
HEAP_NUMBER,
INTERNAL_OBJECT,
NUMBER_OF_TYPES
};
// At most 8 different types can be distinguished, because the Code object
// only has room for a single byte to hold a set of these types. :-P
STATIC_ASSERT(NUMBER_OF_TYPES <= 8);
class Types {
public:
Types() {}
explicit Types(byte bits) : set_(bits) {}
bool IsEmpty() const { return set_.IsEmpty(); }
bool IsAll() const { return ToByte() == ((1 << NUMBER_OF_TYPES) - 1); }
bool Contains(Type type) const { return set_.Contains(type); }
void Add(Type type) { set_.Add(type); }
byte ToByte() const { return set_.ToIntegral(); }
void Print(StringStream* stream) const;
void TraceTransition(Types to) const;
bool Record(Handle<Object> object);
bool NeedsMap() const;
private:
EnumSet<Type, byte> set_;
};
static Types no_types() { return Types(); }
static Types all_types() { return Types((1 << NUMBER_OF_TYPES) - 1); }
explicit ToBooleanStub(Register tos, Types types = Types())
: tos_(tos), types_(types) { }
void Generate(MacroAssembler* masm);
virtual int GetCodeKind() { return Code::TO_BOOLEAN_IC; }
virtual void PrintName(StringStream* stream);
private:
Register tos_;
Major MajorKey() { return ToBoolean; }
int MinorKey() { return tos_.code(); }
int MinorKey() { return (tos_.code() << NUMBER_OF_TYPES) | types_.ToByte(); }
virtual void FinishCode(Code* code) {
code->set_to_boolean_state(types_.ToByte());
}
void CheckOddball(MacroAssembler* masm,
Type type,
Heap::RootListIndex value,
bool result,
Label* patch);
void GenerateTypeTransition(MacroAssembler* masm);
Register tos_;
Types types_;
};
} } // namespace v8::internal

40
deps/v8/src/d8.cc

@ -199,7 +199,7 @@ Handle<Value> Shell::Write(const Arguments& args) {
printf(" ");
}
v8::String::Utf8Value str(args[i]);
int n = fwrite(*str, sizeof(**str), str.length(), stdout);
int n = static_cast<int>(fwrite(*str, sizeof(**str), str.length(), stdout));
if (n != str.length()) {
printf("Error in fwrite\n");
exit(1);
@ -226,17 +226,24 @@ Handle<Value> Shell::ReadLine(const Arguments& args) {
static const int kBufferSize = 256;
char buffer[kBufferSize];
Handle<String> accumulator = String::New("");
bool linebreak;
int length;
do { // Repeat if the line ends with an escape '\'.
// fgets got an error. Just give up.
while (true) {
// Continue reading if the line ends with an escape '\\' or the line has
// not been fully read into the buffer yet (does not end with '\n').
// If fgets gets an error, just give up.
if (fgets(buffer, kBufferSize, stdin) == NULL) return Null();
length = strlen(buffer);
linebreak = (length > 1 && buffer[length-2] == '\\');
if (linebreak) buffer[length-2] = '\n';
accumulator = String::Concat(accumulator, String::New(buffer, length-1));
} while (linebreak);
return accumulator;
length = static_cast<int>(strlen(buffer));
if (length == 0) {
return accumulator;
} else if (buffer[length-1] != '\n') {
accumulator = String::Concat(accumulator, String::New(buffer, length));
} else if (length > 1 && buffer[length-2] == '\\') {
buffer[length-2] = '\n';
accumulator = String::Concat(accumulator, String::New(buffer, length-1));
} else {
return String::Concat(accumulator, String::New(buffer, length-1));
}
}
}
@ -299,9 +306,12 @@ Handle<Value> Shell::CreateExternalArray(const Arguments& args,
Persistent<Object> persistent_array = Persistent<Object>::New(array);
persistent_array.MakeWeak(data, ExternalArrayWeakCallback);
persistent_array.MarkIndependent();
array->SetIndexedPropertiesToExternalArrayData(data, type, length);
array->Set(String::New("length"), Int32::New(length), ReadOnly);
array->Set(String::New("BYTES_PER_ELEMENT"), Int32::New(element_size));
array->SetIndexedPropertiesToExternalArrayData(data, type,
static_cast<int>(length));
array->Set(String::New("length"),
Int32::New(static_cast<int32_t>(length)), ReadOnly);
array->Set(String::New("BYTES_PER_ELEMENT"),
Int32::New(static_cast<int32_t>(element_size)));
return array;
}
@ -790,7 +800,7 @@ static char* ReadChars(const char* name, int* size_out) {
char* chars = new char[size + 1];
chars[size] = '\0';
for (int i = 0; i < size;) {
int read = fread(&chars[i], 1, size - i, file);
int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
i += read;
}
fclose(file);
@ -981,7 +991,7 @@ Handle<String> SourceGroup::ReadFile(const char* name) {
char* chars = new char[size + 1];
chars[size] = '\0';
for (int i = 0; i < size;) {
int read = fread(&chars[i], 1, size - i, file);
int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
i += read;
}
fclose(file);

3
deps/v8/src/debug.cc

@ -169,7 +169,8 @@ void BreakLocationIterator::Next() {
if ((code->is_inline_cache_stub() &&
!code->is_binary_op_stub() &&
!code->is_unary_op_stub() &&
!code->is_compare_ic_stub()) ||
!code->is_compare_ic_stub() &&
!code->is_to_boolean_ic_stub()) ||
RelocInfo::IsConstructCall(rmode())) {
break_point_++;
return;

18
deps/v8/src/extensions/experimental/number-format.cc

@ -36,6 +36,8 @@
#include "unicode/numfmt.h"
#include "unicode/uchar.h"
#include "unicode/ucurr.h"
#include "unicode/unum.h"
#include "unicode/uversion.h"
namespace v8 {
namespace internal {
@ -231,6 +233,8 @@ static icu::DecimalFormat* CreateNumberFormat(v8::Handle<v8::String> locale,
}
// Generates ICU number format pattern from given skeleton.
// TODO(cira): Remove once ICU includes equivalent method
// (see http://bugs.icu-project.org/trac/ticket/8610).
static icu::DecimalFormat* CreateFormatterFromSkeleton(
const icu::Locale& icu_locale,
const icu::UnicodeString& skeleton,
@ -251,6 +255,7 @@ static icu::DecimalFormat* CreateFormatterFromSkeleton(
// Case of non-consecutive U+00A4 is taken care of in i18n.js.
int32_t end_index = skeleton.lastIndexOf(currency_symbol, index);
#if (U_ICU_VERSION_MAJOR_NUM == 4) && (U_ICU_VERSION_MINOR_NUM <= 6)
icu::NumberFormat::EStyles style;
switch (end_index - index) {
case 0:
@ -262,6 +267,19 @@ static icu::DecimalFormat* CreateFormatterFromSkeleton(
default:
style = icu::NumberFormat::kPluralCurrencyStyle;
}
#else // ICU version is 4.8 or above (we ignore versions below 4.0).
UNumberFormatStyle style;
switch (end_index - index) {
case 0:
style = UNUM_CURRENCY;
break;
case 1:
style = UNUM_CURRENCY_ISO;
break;
default:
style = UNUM_CURRENCY_PLURAL;
}
#endif
base_format = static_cast<icu::DecimalFormat*>(
icu::NumberFormat::createInstance(icu_locale, style, *status));

8
deps/v8/src/factory.cc

@ -84,6 +84,14 @@ Handle<NumberDictionary> Factory::NewNumberDictionary(int at_least_space_for) {
}
Handle<ObjectHashTable> Factory::NewObjectHashTable(int at_least_space_for) {
ASSERT(0 <= at_least_space_for);
CALL_HEAP_FUNCTION(isolate(),
ObjectHashTable::Allocate(at_least_space_for),
ObjectHashTable);
}
Handle<DescriptorArray> Factory::NewDescriptorArray(int number_of_descriptors) {
ASSERT(0 <= number_of_descriptors);
CALL_HEAP_FUNCTION(isolate(),

2
deps/v8/src/factory.h

@ -58,6 +58,8 @@ class Factory {
Handle<StringDictionary> NewStringDictionary(int at_least_space_for);
Handle<ObjectHashTable> NewObjectHashTable(int at_least_space_for);
Handle<DescriptorArray> NewDescriptorArray(int number_of_descriptors);
Handle<DeoptimizationInputData> NewDeoptimizationInputData(
int deopt_entry_count,

3
deps/v8/src/flag-definitions.h

@ -100,7 +100,7 @@ private:
DEFINE_bool(harmony_proxies, false, "enable harmony proxies")
// Flags for experimental implementation features.
DEFINE_bool(unbox_double_arrays, false, "automatically unbox arrays of doubles")
DEFINE_bool(unbox_double_arrays, true, "automatically unbox arrays of doubles")
// Flags for Crankshaft.
#ifdef V8_TARGET_ARCH_MIPS
@ -400,6 +400,7 @@ DEFINE_bool(print_json_ast, false, "print source AST as JSON")
DEFINE_bool(print_builtin_json_ast, false,
"print source AST for builtins as JSON")
DEFINE_string(stop_at, "", "function name where to insert a breakpoint")
DEFINE_bool(verify_stack_height, false, "verify stack height tracing on ia32")
// compiler.cc
DEFINE_bool(print_builtin_scopes, false, "print scopes for builtins")

19
deps/v8/src/full-codegen.cc

@ -437,6 +437,7 @@ void FullCodeGenerator::AccumulatorValueContext::Plug(Register reg) const {
void FullCodeGenerator::StackValueContext::Plug(Register reg) const {
__ push(reg);
codegen()->increment_stack_height();
}
@ -450,11 +451,13 @@ void FullCodeGenerator::TestContext::Plug(Register reg) const {
void FullCodeGenerator::EffectContext::PlugTOS() const {
__ Drop(1);
codegen()->decrement_stack_height();
}
void FullCodeGenerator::AccumulatorValueContext::PlugTOS() const {
__ pop(result_register());
codegen()->decrement_stack_height();
}
@ -465,6 +468,7 @@ void FullCodeGenerator::StackValueContext::PlugTOS() const {
void FullCodeGenerator::TestContext::PlugTOS() const {
// For simplicity we always test the accumulator register.
__ pop(result_register());
codegen()->decrement_stack_height();
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(this);
}
@ -960,6 +964,7 @@ void FullCodeGenerator::VisitEnterWithContextStatement(
VisitForStackValue(stmt->expression());
PushFunctionArgumentForContextAllocation();
__ CallRuntime(Runtime::kPushWithContext, 2);
decrement_stack_height();
StoreToFrameField(StandardFrameConstants::kContextOffset, context_register());
}
@ -1128,8 +1133,10 @@ void FullCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
{
TryCatch try_block(this, &catch_entry);
__ PushTryHandler(IN_JAVASCRIPT, TRY_CATCH_HANDLER);
increment_stack_height(StackHandlerConstants::kSize / kPointerSize);
Visit(stmt->try_block());
__ PopTryHandler();
decrement_stack_height(StackHandlerConstants::kSize / kPointerSize);
}
__ bind(&done);
}
@ -1161,6 +1168,10 @@ void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
// cooked before GC.
Label finally_entry;
Label try_handler_setup;
const int original_stack_height = stack_height();
const int finally_block_stack_height = original_stack_height + 2;
const int try_block_stack_height = original_stack_height + 4;
STATIC_ASSERT(StackHandlerConstants::kSize / kPointerSize == 4);
// Setup the try-handler chain. Use a call to
// Jump to try-handler setup and try-block code. Use call to put try-handler
@ -1182,6 +1193,7 @@ void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
// Finally block implementation.
Finally finally_block(this);
EnterFinallyBlock();
set_stack_height(finally_block_stack_height);
Visit(stmt->finally_block());
ExitFinallyBlock(); // Return to the calling code.
}
@ -1191,8 +1203,10 @@ void FullCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
// Setup try handler (stack pointer registers).
TryFinally try_block(this, &finally_entry);
__ PushTryHandler(IN_JAVASCRIPT, TRY_FINALLY_HANDLER);
set_stack_height(try_block_stack_height);
Visit(stmt->try_block());
__ PopTryHandler();
set_stack_height(original_stack_height);
}
// Execute the finally block on the way out. Clobber the unpredictable
// value in the accumulator with one that's safe for GC. The finally
@ -1222,6 +1236,7 @@ void FullCodeGenerator::VisitConditional(Conditional* expr) {
__ bind(&true_case);
SetExpressionPosition(expr->then_expression(),
expr->then_expression_position());
int start_stack_height = stack_height();
if (context()->IsTest()) {
const TestContext* for_test = TestContext::cast(context());
VisitForControl(expr->then_expression(),
@ -1235,6 +1250,7 @@ void FullCodeGenerator::VisitConditional(Conditional* expr) {
PrepareForBailoutForId(expr->ElseId(), NO_REGISTERS);
__ bind(&false_case);
set_stack_height(start_stack_height);
if (context()->IsTest()) ForwardBailoutToChild(expr);
SetExpressionPosition(expr->else_expression(),
expr->else_expression_position());
@ -1275,8 +1291,11 @@ void FullCodeGenerator::VisitSharedFunctionInfoLiteral(
void FullCodeGenerator::VisitThrow(Throw* expr) {
Comment cmnt(masm_, "[ Throw");
// Throw has no effect on the stack height or the current expression context.
// Usually the expression context is null, because throw is a statement.
VisitForStackValue(expr->exception());
__ CallRuntime(Runtime::kThrow, 1);
decrement_stack_height();
// Never returns here.
}

53
deps/v8/src/full-codegen.h

@ -83,6 +83,7 @@ class FullCodeGenerator: public AstVisitor {
scope_(NULL),
nesting_stack_(NULL),
loop_depth_(0),
stack_height_(0),
context_(NULL),
bailout_entries_(0),
stack_checks_(2), // There's always at least one.
@ -519,6 +520,35 @@ class FullCodeGenerator: public AstVisitor {
loop_depth_--;
}
#if defined(V8_TARGET_ARCH_IA32)
int stack_height() { return stack_height_; }
void set_stack_height(int depth) { stack_height_ = depth; }
void increment_stack_height() { stack_height_++; }
void increment_stack_height(int delta) { stack_height_ += delta; }
void decrement_stack_height() {
if (FLAG_verify_stack_height) {
ASSERT(stack_height_ > 0);
}
stack_height_--;
}
void decrement_stack_height(int delta) {
stack_height_-= delta;
if (FLAG_verify_stack_height) {
ASSERT(stack_height_ >= 0);
}
}
// Call this function only if FLAG_verify_stack_height is true.
void verify_stack_height(); // Generates a runtime check of esp - ebp.
#else
int stack_height() { return 0; }
void set_stack_height(int depth) {}
void increment_stack_height() {}
void increment_stack_height(int delta) {}
void decrement_stack_height() {}
void decrement_stack_height(int delta) {}
void verify_stack_height() {}
#endif // V8_TARGET_ARCH_IA32
MacroAssembler* masm() { return masm_; }
class ExpressionContext;
@ -578,6 +608,10 @@ class FullCodeGenerator: public AstVisitor {
virtual ~ExpressionContext() {
codegen_->set_new_context(old_);
if (FLAG_verify_stack_height) {
ASSERT_EQ(expected_stack_height_, codegen()->stack_height());
codegen()->verify_stack_height();
}
}
Isolate* isolate() const { return codegen_->isolate(); }
@ -631,6 +665,7 @@ class FullCodeGenerator: public AstVisitor {
FullCodeGenerator* codegen() const { return codegen_; }
MacroAssembler* masm() const { return masm_; }
MacroAssembler* masm_;
int expected_stack_height_; // The expected stack height esp - ebp on exit.
private:
const ExpressionContext* old_;
@ -640,7 +675,9 @@ class FullCodeGenerator: public AstVisitor {
class AccumulatorValueContext : public ExpressionContext {
public:
explicit AccumulatorValueContext(FullCodeGenerator* codegen)
: ExpressionContext(codegen) { }
: ExpressionContext(codegen) {
expected_stack_height_ = codegen->stack_height();
}
virtual void Plug(bool flag) const;
virtual void Plug(Register reg) const;
@ -661,7 +698,9 @@ class FullCodeGenerator: public AstVisitor {
class StackValueContext : public ExpressionContext {
public:
explicit StackValueContext(FullCodeGenerator* codegen)
: ExpressionContext(codegen) { }
: ExpressionContext(codegen) {
expected_stack_height_ = codegen->stack_height() + 1;
}
virtual void Plug(bool flag) const;
virtual void Plug(Register reg) const;
@ -690,7 +729,9 @@ class FullCodeGenerator: public AstVisitor {
condition_(condition),
true_label_(true_label),
false_label_(false_label),
fall_through_(fall_through) { }
fall_through_(fall_through) {
expected_stack_height_ = codegen->stack_height();
}
static const TestContext* cast(const ExpressionContext* context) {
ASSERT(context->IsTest());
@ -727,7 +768,10 @@ class FullCodeGenerator: public AstVisitor {
class EffectContext : public ExpressionContext {
public:
explicit EffectContext(FullCodeGenerator* codegen)
: ExpressionContext(codegen) { }
: ExpressionContext(codegen) {
expected_stack_height_ = codegen->stack_height();
}
virtual void Plug(bool flag) const;
virtual void Plug(Register reg) const;
@ -751,6 +795,7 @@ class FullCodeGenerator: public AstVisitor {
Label return_label_;
NestedStatement* nesting_stack_;
int loop_depth_;
int stack_height_;
const ExpressionContext* context_;
ZoneList<BailoutEntry> bailout_entries_;
ZoneList<BailoutEntry> stack_checks_;

56
deps/v8/src/handles.cc

@ -422,43 +422,18 @@ Handle<Object> PreventExtensions(Handle<JSObject> object) {
Handle<Object> GetHiddenProperties(Handle<JSObject> obj,
bool create_if_needed) {
Isolate* isolate = obj->GetIsolate();
Object* holder = obj->BypassGlobalProxy();
if (holder->IsUndefined()) return isolate->factory()->undefined_value();
obj = Handle<JSObject>(JSObject::cast(holder), isolate);
if (obj->HasFastProperties()) {
// If the object has fast properties, check whether the first slot
// in the descriptor array matches the hidden symbol. Since the
// hidden symbols hash code is zero (and no other string has hash
// code zero) it will always occupy the first entry if present.
DescriptorArray* descriptors = obj->map()->instance_descriptors();
if ((descriptors->number_of_descriptors() > 0) &&
(descriptors->GetKey(0) == isolate->heap()->hidden_symbol()) &&
descriptors->IsProperty(0)) {
ASSERT(descriptors->GetType(0) == FIELD);
return Handle<Object>(obj->FastPropertyAt(descriptors->GetFieldIndex(0)),
isolate);
}
}
JSObject::HiddenPropertiesFlag flag) {
CALL_HEAP_FUNCTION(obj->GetIsolate(),
obj->GetHiddenProperties(flag),
Object);
}
// Only attempt to find the hidden properties in the local object and not
// in the prototype chain. Note that HasLocalProperty() can cause a GC in
// the general case in the presence of interceptors.
if (!obj->HasHiddenPropertiesObject()) {
// Hidden properties object not found. Allocate a new hidden properties
// object if requested. Otherwise return the undefined value.
if (create_if_needed) {
Handle<Object> hidden_obj =
isolate->factory()->NewJSObject(isolate->object_function());
CALL_HEAP_FUNCTION(isolate,
obj->SetHiddenPropertiesObject(*hidden_obj), Object);
} else {
return isolate->factory()->undefined_value();
}
}
return Handle<Object>(obj->GetHiddenPropertiesObject(), isolate);
int GetIdentityHash(Handle<JSObject> obj) {
CALL_AND_RETRY(obj->GetIsolate(),
obj->GetIdentityHash(JSObject::ALLOW_CREATION),
return Smi::cast(__object__)->value(),
return 0);
}
@ -908,6 +883,15 @@ Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
}
Handle<ObjectHashTable> PutIntoObjectHashTable(Handle<ObjectHashTable> table,
Handle<JSObject> key,
Handle<Object> value) {
CALL_HEAP_FUNCTION(table->GetIsolate(),
table->Put(*key, *value),
ObjectHashTable);
}
bool EnsureCompiled(Handle<SharedFunctionInfo> shared,
ClearExceptionFlag flag) {
return shared->is_compiled() || CompileLazyShared(shared, flag);

14
deps/v8/src/handles.h

@ -264,9 +264,13 @@ Handle<Object> GetPrototype(Handle<Object> obj);
Handle<Object> SetPrototype(Handle<JSObject> obj, Handle<Object> value);
// Return the object's hidden properties object. If the object has no hidden
// properties and create_if_needed is true, then a new hidden property object
// will be allocated. Otherwise the Heap::undefined_value is returned.
Handle<Object> GetHiddenProperties(Handle<JSObject> obj, bool create_if_needed);
// properties and HiddenPropertiesFlag::ALLOW_CREATION is passed, then a new
// hidden property object will be allocated. Otherwise Heap::undefined_value
// is returned.
Handle<Object> GetHiddenProperties(Handle<JSObject> obj,
JSObject::HiddenPropertiesFlag flag);
int GetIdentityHash(Handle<JSObject> obj);
Handle<Object> DeleteElement(Handle<JSObject> obj, uint32_t index);
Handle<Object> DeleteProperty(Handle<JSObject> obj, Handle<String> prop);
@ -343,6 +347,10 @@ Handle<Object> SetPrototype(Handle<JSFunction> function,
Handle<Object> PreventExtensions(Handle<JSObject> object);
Handle<ObjectHashTable> PutIntoObjectHashTable(Handle<ObjectHashTable> table,
Handle<JSObject> key,
Handle<Object> value);
// Does lazy compilation of the given function. Returns true on success and
// false if the compilation resulted in a stack overflow.
enum ClearExceptionFlag { KEEP_EXCEPTION, CLEAR_EXCEPTION };

14
deps/v8/src/heap-inl.h

@ -142,6 +142,11 @@ MaybeObject* Heap::CopyFixedArray(FixedArray* src) {
}
MaybeObject* Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
return CopyFixedDoubleArrayWithMap(src, src->map());
}
MaybeObject* Heap::AllocateRaw(int size_in_bytes,
AllocationSpace space,
AllocationSpace retry_space) {
@ -368,11 +373,7 @@ void Heap::MoveBlock(Address dst, Address src, int byte_size) {
int size_in_words = byte_size / kPointerSize;
if ((dst < src) || (dst >= (src + size_in_words))) {
ASSERT((dst >= (src + size_in_words)) ||
((OffsetFrom(reinterpret_cast<Address>(src)) -
OffsetFrom(reinterpret_cast<Address>(dst))) >= kPointerSize));
if ((dst < src) || (dst >= (src + byte_size))) {
Object** src_slot = reinterpret_cast<Object**>(src);
Object** dst_slot = reinterpret_cast<Object**>(dst);
Object** end_slot = src_slot + size_in_words;
@ -390,8 +391,7 @@ void Heap::MoveBlockToOldSpaceAndUpdateRegionMarks(Address dst,
Address src,
int byte_size) {
ASSERT(IsAligned(byte_size, kPointerSize));
ASSERT((dst >= (src + byte_size)) ||
((OffsetFrom(src) - OffsetFrom(dst)) >= kPointerSize));
ASSERT((dst < src) || (dst >= (src + byte_size)));
CopyBlockToOldSpaceAndUpdateRegionMarks(dst, src, byte_size);
}

32
deps/v8/src/heap.cc

@ -3388,17 +3388,22 @@ MaybeObject* Heap::CopyJSObject(JSObject* source) {
object_size);
}
FixedArray* elements = FixedArray::cast(source->elements());
FixedArrayBase* elements = FixedArrayBase::cast(source->elements());
FixedArray* properties = FixedArray::cast(source->properties());
// Update elements if necessary.
if (elements->length() > 0) {
Object* elem;
{ MaybeObject* maybe_elem =
(elements->map() == fixed_cow_array_map()) ?
elements : CopyFixedArray(elements);
{ MaybeObject* maybe_elem;
if (elements->map() == fixed_cow_array_map()) {
maybe_elem = FixedArray::cast(elements);
} else if (source->HasFastDoubleElements()) {
maybe_elem = CopyFixedDoubleArray(FixedDoubleArray::cast(elements));
} else {
maybe_elem = CopyFixedArray(FixedArray::cast(elements));
}
if (!maybe_elem->ToObject(&elem)) return maybe_elem;
}
JSObject::cast(clone)->set_elements(FixedArray::cast(elem));
JSObject::cast(clone)->set_elements(FixedArrayBase::cast(elem));
}
// Update properties if necessary.
if (properties->length() > 0) {
@ -3757,6 +3762,23 @@ MaybeObject* Heap::CopyFixedArrayWithMap(FixedArray* src, Map* map) {
}
MaybeObject* Heap::CopyFixedDoubleArrayWithMap(FixedDoubleArray* src,
Map* map) {
int len = src->length();
Object* obj;
{ MaybeObject* maybe_obj = AllocateRawFixedDoubleArray(len, NOT_TENURED);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
HeapObject* dst = HeapObject::cast(obj);
dst->set_map(map);
CopyBlock(
dst->address() + FixedDoubleArray::kLengthOffset,
src->address() + FixedDoubleArray::kLengthOffset,
FixedDoubleArray::SizeFor(len) - FixedDoubleArray::kLengthOffset);
return obj;
}
MaybeObject* Heap::AllocateFixedArray(int length) {
ASSERT(length >= 0);
if (length == 0) return empty_fixed_array();

10
deps/v8/src/heap.h

@ -617,6 +617,16 @@ class Heap {
// Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
MUST_USE_RESULT MaybeObject* CopyFixedArrayWithMap(FixedArray* src, Map* map);
// Make a copy of src and return it. Returns
// Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
MUST_USE_RESULT inline MaybeObject* CopyFixedDoubleArray(
FixedDoubleArray* src);
// Make a copy of src, set the map, and return the copy. Returns
// Failure::RetryAfterGC(requested_bytes, space) if the allocation failed.
MUST_USE_RESULT MaybeObject* CopyFixedDoubleArrayWithMap(
FixedDoubleArray* src, Map* map);
// Allocates a fixed array initialized with the hole values.
// Returns Failure::RetryAfterGC(requested_bytes, space) if the allocation
// failed.

58
deps/v8/src/hydrogen-instructions.cc

@ -862,19 +862,10 @@ void HInstanceOf::PrintDataTo(StringStream* stream) {
Range* HValue::InferRange() {
if (representation().IsTagged()) {
// Tagged values are always in int32 range when converted to integer,
// but they can contain -0.
Range* result = new Range();
result->set_can_be_minus_zero(true);
return result;
} else if (representation().IsNone()) {
return NULL;
} else {
// Untagged integer32 cannot be -0 and we don't compute ranges for
// untagged doubles.
return new Range();
}
// Untagged integer32 cannot be -0, all other representations can.
Range* result = new Range();
result->set_can_be_minus_zero(!representation().IsInteger32());
return result;
}
@ -1230,6 +1221,30 @@ Range* HSar::InferRange() {
}
Range* HShr::InferRange() {
if (right()->IsConstant()) {
HConstant* c = HConstant::cast(right());
if (c->HasInteger32Value()) {
int shift_count = c->Integer32Value() & 0x1f;
if (left()->range()->CanBeNegative()) {
// Only compute bounds if the result always fits into an int32.
return (shift_count >= 1)
? new Range(0, static_cast<uint32_t>(0xffffffff) >> shift_count)
: new Range();
} else {
// For positive inputs we can use the >> operator.
Range* result = (left()->range() != NULL)
? left()->range()->Copy()
: new Range();
result->Sar(c->Integer32Value());
return result;
}
}
}
return HValue::InferRange();
}
Range* HShl::InferRange() {
if (right()->IsConstant()) {
HConstant* c = HConstant::cast(right());
@ -1798,11 +1813,6 @@ void HSimulate::Verify() {
}
void HBoundsCheck::Verify() {
HInstruction::Verify();
}
void HCheckSmi::Verify() {
HInstruction::Verify();
ASSERT(HasNoUses());
@ -1815,18 +1825,6 @@ void HCheckNonSmi::Verify() {
}
void HCheckInstanceType::Verify() {
HInstruction::Verify();
ASSERT(HasNoUses());
}
void HCheckMap::Verify() {
HInstruction::Verify();
ASSERT(HasNoUses());
}
void HCheckFunction::Verify() {
HInstruction::Verify();
ASSERT(HasNoUses());

52
deps/v8/src/hydrogen-instructions.h

@ -184,6 +184,7 @@ class LChunkBuilder;
V(InobjectFields) \
V(BackingStoreFields) \
V(ArrayElements) \
V(DoubleArrayElements) \
V(SpecializedArrayElements) \
V(GlobalVars) \
V(Maps) \
@ -933,8 +934,12 @@ class HUnaryControlInstruction: public HTemplateControlInstruction<2, 1> {
class HBranch: public HUnaryControlInstruction {
public:
HBranch(HValue* value, HBasicBlock* true_target, HBasicBlock* false_target)
: HUnaryControlInstruction(value, true_target, false_target) {
HBranch(HValue* value,
HBasicBlock* true_target,
HBasicBlock* false_target,
ToBooleanStub::Types expected_input_types = ToBooleanStub::no_types())
: HUnaryControlInstruction(value, true_target, false_target),
expected_input_types_(expected_input_types) {
ASSERT(true_target != NULL && false_target != NULL);
}
explicit HBranch(HValue* value)
@ -945,7 +950,14 @@ class HBranch: public HUnaryControlInstruction {
return Representation::None();
}
ToBooleanStub::Types expected_input_types() const {
return expected_input_types_;
}
DECLARE_CONCRETE_INSTRUCTION(Branch)
private:
ToBooleanStub::Types expected_input_types_;
};
@ -1663,12 +1675,14 @@ class HCallRuntime: public HCall<1> {
};
class HJSArrayLength: public HUnaryOperation {
class HJSArrayLength: public HTemplateInstruction<2> {
public:
explicit HJSArrayLength(HValue* value) : HUnaryOperation(value) {
HJSArrayLength(HValue* value, HValue* typecheck) {
// The length of an array is stored as a tagged value in the array
// object. It is guaranteed to be 32 bit integer, but it can be
// represented as either a smi or heap number.
SetOperandAt(0, value);
SetOperandAt(1, typecheck);
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
SetFlag(kDependsOnArrayLengths);
@ -1679,6 +1693,8 @@ class HJSArrayLength: public HUnaryOperation {
return Representation::Tagged();
}
HValue* value() { return OperandAt(0); }
DECLARE_CONCRETE_INSTRUCTION(JSArrayLength)
protected:
@ -1894,10 +1910,14 @@ class HLoadExternalArrayPointer: public HUnaryOperation {
};
class HCheckMap: public HUnaryOperation {
class HCheckMap: public HTemplateInstruction<2> {
public:
HCheckMap(HValue* value, Handle<Map> map)
: HUnaryOperation(value), map_(map) {
HCheckMap(HValue* value, Handle<Map> map, HValue* typecheck = NULL)
: map_(map) {
SetOperandAt(0, value);
// If callers don't depend on a typecheck, they can pass in NULL. In that
// case we use a copy of the |value| argument as a dummy value.
SetOperandAt(1, typecheck != NULL ? typecheck : value);
set_representation(Representation::Tagged());
SetFlag(kUseGVN);
SetFlag(kDependsOnMaps);
@ -1909,10 +1929,7 @@ class HCheckMap: public HUnaryOperation {
virtual void PrintDataTo(StringStream* stream);
virtual HType CalculateInferredType();
#ifdef DEBUG
virtual void Verify();
#endif
HValue* value() { return OperandAt(0); }
Handle<Map> map() const { return map_; }
DECLARE_CONCRETE_INSTRUCTION(CheckMap)
@ -1980,10 +1997,6 @@ class HCheckInstanceType: public HUnaryOperation {
return Representation::Tagged();
}
#ifdef DEBUG
virtual void Verify();
#endif
virtual HValue* Canonicalize();
bool is_interval_check() const { return check_ <= LAST_INTERVAL_CHECK; }
@ -2458,10 +2471,6 @@ class HBoundsCheck: public HTemplateInstruction<2> {
return Representation::Integer32();
}
#ifdef DEBUG
virtual void Verify();
#endif
HValue* index() { return OperandAt(0); }
HValue* length() { return OperandAt(1); }
@ -3063,6 +3072,7 @@ class HShr: public HBitwiseBinaryOperation {
HShr(HValue* context, HValue* left, HValue* right)
: HBitwiseBinaryOperation(context, left, right) { }
virtual Range* InferRange();
virtual HType CalculateInferredType();
DECLARE_CONCRETE_INSTRUCTION(Shr)
@ -3527,7 +3537,7 @@ class HLoadKeyedFastDoubleElement: public HTemplateInstruction<2> {
SetOperandAt(0, elements);
SetOperandAt(1, key);
set_representation(Representation::Double());
SetFlag(kDependsOnArrayElements);
SetFlag(kDependsOnDoubleArrayElements);
SetFlag(kUseGVN);
}
@ -3745,7 +3755,7 @@ class HStoreKeyedFastDoubleElement: public HTemplateInstruction<3> {
SetOperandAt(0, elements);
SetOperandAt(1, key);
SetOperandAt(2, val);
SetFlag(kChangesArrayElements);
SetFlag(kChangesDoubleArrayElements);
}
virtual Representation RequiredInputRepresentation(int index) const {

122
deps/v8/src/hydrogen.cc

@ -736,6 +736,8 @@ void HGraph::AssignDominators() {
HPhase phase("Assign dominators", this);
for (int i = 0; i < blocks_.length(); ++i) {
if (blocks_[i]->IsLoopHeader()) {
// Only the first predecessor of a loop header is from outside the loop.
// All others are back edges, and thus cannot dominate the loop header.
blocks_[i]->AssignCommonDominator(blocks_[i]->predecessors()->first());
} else {
for (int j = 0; j < blocks_[i]->predecessors()->length(); ++j) {
@ -743,13 +745,15 @@ void HGraph::AssignDominators() {
}
}
}
}
// Propagate flag marking blocks containing unconditional deoptimize.
// Mark all blocks that are dominated by an unconditional soft deoptimize to
// prevent code motion across those blocks.
void HGraph::PropagateDeoptimizingMark() {
HPhase phase("Propagate deoptimizing mark", this);
MarkAsDeoptimizingRecursively(entry_block());
}
// Mark all blocks that are dominated by an unconditional deoptimize.
void HGraph::MarkAsDeoptimizingRecursively(HBasicBlock* block) {
for (int i = 0; i < block->dominated_blocks()->length(); ++i) {
HBasicBlock* dominated = block->dominated_blocks()->at(i);
@ -836,6 +840,19 @@ void HGraph::EliminateUnreachablePhis() {
}
bool HGraph::CheckPhis() {
int block_count = blocks_.length();
for (int i = 0; i < block_count; ++i) {
for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
HPhi* phi = blocks_[i]->phis()->at(j);
// We don't support phi uses of arguments for now.
if (phi->CheckFlag(HValue::kIsArguments)) return false;
}
}
return true;
}
bool HGraph::CollectPhis() {
int block_count = blocks_.length();
phi_list_ = new ZoneList<HPhi*>(block_count);
@ -843,8 +860,6 @@ bool HGraph::CollectPhis() {
for (int j = 0; j < blocks_[i]->phis()->length(); ++j) {
HPhi* phi = blocks_[i]->phis()->at(j);
phi_list_->Add(phi);
// We don't support phi uses of arguments for now.
if (phi->CheckFlag(HValue::kIsArguments)) return false;
// Check for the hole value (from an uninitialized const).
for (int k = 0; k < phi->OperandCount(); k++) {
if (phi->OperandAt(k) == GetConstantHole()) return false;
@ -2158,7 +2173,9 @@ void TestContext::BuildBranch(HValue* value) {
}
HBasicBlock* empty_true = builder->graph()->CreateBasicBlock();
HBasicBlock* empty_false = builder->graph()->CreateBasicBlock();
HBranch* test = new(zone()) HBranch(value, empty_true, empty_false);
unsigned test_id = condition()->test_id();
ToBooleanStub::Types expected(builder->oracle()->ToBooleanTypes(test_id));
HBranch* test = new(zone()) HBranch(value, empty_true, empty_false, expected);
builder->current_block()->Finish(test);
empty_true->Goto(if_true());
@ -2293,10 +2310,15 @@ HGraph* HGraphBuilder::CreateGraph() {
graph()->OrderBlocks();
graph()->AssignDominators();
graph()->PropagateDeoptimizingMark();
graph()->EliminateRedundantPhis();
if (!graph()->CheckPhis()) {
Bailout("Unsupported phi use of arguments object");
return NULL;
}
if (FLAG_eliminate_dead_phis) graph()->EliminateUnreachablePhis();
if (!graph()->CollectPhis()) {
Bailout("Unsupported phi-use");
Bailout("Unsupported phi use of uninitialized constant");
return NULL;
}
@ -3274,8 +3296,8 @@ void HGraphBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
// Load the elements array before the first store.
if (elements == NULL) {
elements = new(zone()) HLoadElements(literal);
AddInstruction(elements);
elements = new(zone()) HLoadElements(literal);
AddInstruction(elements);
}
HValue* key = AddInstruction(
@ -3908,12 +3930,16 @@ HInstruction* HGraphBuilder::BuildMonomorphicElementAccess(HValue* object,
: BuildLoadKeyedGeneric(object, key);
}
AddInstruction(new(zone()) HCheckNonSmi(object));
AddInstruction(new(zone()) HCheckMap(object, map));
HInstruction* elements = new(zone()) HLoadElements(object);
HInstruction* mapcheck = AddInstruction(new(zone()) HCheckMap(object, map));
HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object));
bool fast_double_elements = map->has_fast_double_elements();
if (is_store && map->has_fast_elements()) {
AddInstruction(new(zone()) HCheckMap(
elements, isolate()->factory()->fixed_array_map()));
}
HInstruction* length = NULL;
HInstruction* checked_key = NULL;
if (map->has_external_array_elements()) {
AddInstruction(elements);
length = AddInstruction(new(zone()) HExternalArrayLength(elements));
checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
HLoadExternalArrayPointer* external_elements =
@ -3922,25 +3948,13 @@ HInstruction* HGraphBuilder::BuildMonomorphicElementAccess(HValue* object,
return BuildExternalArrayElementAccess(external_elements, checked_key,
val, map->elements_kind(), is_store);
}
bool fast_double_elements = map->has_fast_double_elements();
ASSERT(map->has_fast_elements() || fast_double_elements);
if (map->instance_type() == JS_ARRAY_TYPE) {
length = AddInstruction(new(zone()) HJSArrayLength(object));
checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
AddInstruction(elements);
if (is_store && !fast_double_elements) {
AddInstruction(new(zone()) HCheckMap(
elements, isolate()->factory()->fixed_array_map()));
}
length = AddInstruction(new(zone()) HJSArrayLength(object, mapcheck));
} else {
AddInstruction(elements);
if (is_store && !fast_double_elements) {
AddInstruction(new(zone()) HCheckMap(
elements, isolate()->factory()->fixed_array_map()));
}
length = AddInstruction(new(zone()) HFixedArrayLength(elements));
checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
}
checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
if (is_store) {
if (fast_double_elements) {
return new(zone()) HStoreKeyedFastDoubleElement(elements,
@ -3992,7 +4006,8 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
HInstruction* elements_kind_instr =
AddInstruction(new(zone()) HElementsKind(object));
HInstruction* elements = NULL;
HCompareConstantEqAndBranch* elements_kind_branch = NULL;
HInstruction* elements = AddInstruction(new(zone()) HLoadElements(object));
HLoadExternalArrayPointer* external_elements = NULL;
HInstruction* checked_key = NULL;
@ -4008,14 +4023,6 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
JSObject::LAST_ELEMENTS_KIND);
if (elements_kind == JSObject::FIRST_EXTERNAL_ARRAY_ELEMENTS_KIND
&& todo_external_array) {
elements = AddInstruction(new(zone()) HLoadElements(object));
// We need to forcibly prevent some ElementsKind-dependent instructions
// from being hoisted out of any loops they might occur in, because
// the current loop-invariant-code-motion algorithm isn't clever enough
// to deal with them properly.
// There's some performance to be gained by developing a smarter
// solution for this.
elements->ClearFlag(HValue::kUseGVN);
HInstruction* length =
AddInstruction(new(zone()) HExternalArrayLength(elements));
checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
@ -4025,18 +4032,23 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
if (type_todo[elements_kind]) {
HBasicBlock* if_true = graph()->CreateBasicBlock();
HBasicBlock* if_false = graph()->CreateBasicBlock();
HCompareConstantEqAndBranch* compare =
new(zone()) HCompareConstantEqAndBranch(elements_kind_instr,
elements_kind,
Token::EQ_STRICT);
compare->SetSuccessorAt(0, if_true);
compare->SetSuccessorAt(1, if_false);
current_block()->Finish(compare);
elements_kind_branch = new(zone()) HCompareConstantEqAndBranch(
elements_kind_instr, elements_kind, Token::EQ_STRICT);
elements_kind_branch->SetSuccessorAt(0, if_true);
elements_kind_branch->SetSuccessorAt(1, if_false);
current_block()->Finish(elements_kind_branch);
set_current_block(if_true);
HInstruction* access;
if (elements_kind == JSObject::FAST_ELEMENTS ||
elements_kind == JSObject::FAST_DOUBLE_ELEMENTS) {
bool fast_double_elements =
elements_kind == JSObject::FAST_DOUBLE_ELEMENTS;
if (is_store && elements_kind == JSObject::FAST_ELEMENTS) {
AddInstruction(new(zone()) HCheckMap(
elements, isolate()->factory()->fixed_array_map(),
elements_kind_branch));
}
HBasicBlock* if_jsarray = graph()->CreateBasicBlock();
HBasicBlock* if_fastobject = graph()->CreateBasicBlock();
HHasInstanceTypeAndBranch* typecheck =
@ -4046,14 +4058,9 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
current_block()->Finish(typecheck);
set_current_block(if_jsarray);
HInstruction* length = new(zone()) HJSArrayLength(object);
HInstruction* length = new(zone()) HJSArrayLength(object, typecheck);
AddInstruction(length);
length->ClearFlag(HValue::kUseGVN);
checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
elements = AddInstruction(new(zone()) HLoadElements(object));
elements->ClearFlag(HValue::kUseGVN);
bool fast_double_elements =
elements_kind == JSObject::FAST_DOUBLE_ELEMENTS;
if (is_store) {
if (fast_double_elements) {
access = AddInstruction(
@ -4061,8 +4068,6 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
checked_key,
val));
} else {
AddInstruction(new(zone()) HCheckMap(
elements, isolate()->factory()->fixed_array_map()));
access = AddInstruction(
new(zone()) HStoreKeyedFastElement(elements, checked_key, val));
}
@ -4083,12 +4088,6 @@ HValue* HGraphBuilder::HandlePolymorphicElementAccess(HValue* object,
if_jsarray->Goto(join);
set_current_block(if_fastobject);
elements = AddInstruction(new(zone()) HLoadElements(object));
elements->ClearFlag(HValue::kUseGVN);
if (is_store && !fast_double_elements) {
AddInstruction(new(zone()) HCheckMap(
elements, isolate()->factory()->fixed_array_map()));
}
length = AddInstruction(new(zone()) HFixedArrayLength(elements));
checked_key = AddInstruction(new(zone()) HBoundsCheck(key, length));
if (is_store) {
@ -4233,8 +4232,9 @@ void HGraphBuilder::VisitProperty(Property* expr) {
if (expr->IsArrayLength()) {
HValue* array = Pop();
AddInstruction(new(zone()) HCheckNonSmi(array));
AddInstruction(HCheckInstanceType::NewIsJSArray(array));
instr = new(zone()) HJSArrayLength(array);
HInstruction* mapcheck =
AddInstruction(HCheckInstanceType::NewIsJSArray(array));
instr = new(zone()) HJSArrayLength(array, mapcheck);
} else if (expr->IsStringLength()) {
HValue* string = Pop();
@ -5526,9 +5526,11 @@ void HGraphBuilder::VisitLogicalExpression(BinaryOperation* expr) {
// We need an extra block to maintain edge-split form.
HBasicBlock* empty_block = graph()->CreateBasicBlock();
HBasicBlock* eval_right = graph()->CreateBasicBlock();
unsigned test_id = expr->left()->test_id();
ToBooleanStub::Types expected(oracle()->ToBooleanTypes(test_id));
HBranch* test = is_logical_and
? new(zone()) HBranch(Top(), eval_right, empty_block)
: new(zone()) HBranch(Top(), empty_block, eval_right);
? new(zone()) HBranch(Top(), eval_right, empty_block, expected)
: new(zone()) HBranch(Top(), empty_block, eval_right, expected);
current_block()->Finish(test);
set_current_block(eval_right);

10
deps/v8/src/hydrogen.h

@ -238,10 +238,14 @@ class HGraph: public ZoneObject {
void OrderBlocks();
void AssignDominators();
void ReplaceCheckedValues();
void MarkAsDeoptimizingRecursively(HBasicBlock* block);
void PropagateDeoptimizingMark();
// Returns false if there are phi-uses of the arguments-object
// which are not supported by the optimizing compiler.
bool CheckPhis();
// Returns false if there are phi-uses of hole values comming
// from uninitialized consts.
bool CollectPhis();
Handle<Code> Compile(CompilationInfo* info);
@ -293,6 +297,7 @@ class HGraph: public ZoneObject {
HConstant* GetConstant(SetOncePointer<HConstant>* pointer,
Object* value);
void MarkAsDeoptimizingRecursively(HBasicBlock* block);
void InsertTypeConversions(HInstruction* instr);
void PropagateMinusZeroChecks(HValue* value, BitVector* visited);
void RecursivelyMarkPhiDeoptimizeOnUndefined(HPhi* phi);
@ -719,6 +724,8 @@ class HGraphBuilder: public AstVisitor {
HBasicBlock* second,
int join_id);
TypeFeedbackOracle* oracle() const { return function_state()->oracle(); }
private:
// Type of a member function that generates inline code for a native function.
typedef void (HGraphBuilder::*InlineFunctionGenerator)(CallRuntime* call);
@ -747,7 +754,6 @@ class HGraphBuilder: public AstVisitor {
CompilationInfo* info() const {
return function_state()->compilation_info();
}
TypeFeedbackOracle* oracle() const { return function_state()->oracle(); }
AstContext* call_context() const {
return function_state()->call_context();

180
deps/v8/src/ia32/code-stubs-ia32.cc

@ -236,69 +236,153 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
}
// The stub returns zero for false, and a non-zero value for true.
// The stub expects its argument on the stack and returns its result in tos_:
// zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
Label false_result, true_result, not_string;
Label patch;
Factory* factory = masm->isolate()->factory();
const Register argument = eax;
const Register map = edx;
__ mov(eax, Operand(esp, 1 * kPointerSize));
if (!types_.IsEmpty()) {
__ mov(argument, Operand(esp, 1 * kPointerSize));
}
// undefined -> false
__ cmp(eax, factory->undefined_value());
__ j(equal, &false_result);
CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false, &patch);
// Boolean -> its value
__ cmp(eax, factory->false_value());
__ j(equal, &false_result);
__ cmp(eax, factory->true_value());
__ j(equal, &true_result);
// Smis: 0 -> false, all other -> true
__ test(eax, Operand(eax));
__ j(zero, &false_result);
__ JumpIfSmi(eax, &true_result);
CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false, &patch);
CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true, &patch);
// 'null' -> false.
__ cmp(eax, factory->null_value());
__ j(equal, &false_result, Label::kNear);
CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false, &patch);
// Get the map of the heap object.
__ mov(map, FieldOperand(eax, HeapObject::kMapOffset));
if (types_.Contains(SMI)) {
// Smis: 0 -> false, all other -> true
Label not_smi;
__ JumpIfNotSmi(argument, &not_smi, Label::kNear);
// argument contains the correct return value already
if (!tos_.is(argument)) {
__ mov(tos_, argument);
}
__ ret(1 * kPointerSize);
__ bind(&not_smi);
} else if (types_.NeedsMap()) {
// If we need a map later and have a Smi -> patch.
__ JumpIfSmi(argument, &patch, Label::kNear);
}
// Undetectable -> false.
__ test_b(FieldOperand(map, Map::kBitFieldOffset),
1 << Map::kIsUndetectable);
__ j(not_zero, &false_result, Label::kNear);
if (types_.NeedsMap()) {
__ mov(map, FieldOperand(argument, HeapObject::kMapOffset));
// JavaScript object -> true.
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(above_equal, &true_result, Label::kNear);
// Everything with a map could be undetectable, so check this now.
__ test_b(FieldOperand(map, Map::kBitFieldOffset),
1 << Map::kIsUndetectable);
// Undetectable -> false.
Label not_undetectable;
__ j(zero, &not_undetectable, Label::kNear);
__ Set(tos_, Immediate(0));
__ ret(1 * kPointerSize);
__ bind(&not_undetectable);
}
// String value -> false iff empty.
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
__ j(above_equal, &not_string, Label::kNear);
__ cmp(FieldOperand(eax, String::kLengthOffset), Immediate(0));
__ j(zero, &false_result, Label::kNear);
__ jmp(&true_result, Label::kNear);
if (types_.Contains(SPEC_OBJECT)) {
// spec object -> true.
Label not_js_object;
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(below, &not_js_object, Label::kNear);
__ Set(tos_, Immediate(1));
__ ret(1 * kPointerSize);
__ bind(&not_js_object);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// We've seen a spec object for the first time -> patch.
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(above_equal, &patch, Label::kNear);
}
__ bind(&not_string);
// HeapNumber -> false iff +0, -0, or NaN.
__ cmp(map, factory->heap_number_map());
__ j(not_equal, &true_result, Label::kNear);
__ fldz();
__ fld_d(FieldOperand(eax, HeapNumber::kValueOffset));
__ FCmp();
__ j(zero, &false_result, Label::kNear);
// Fall through to |true_result|.
// Return 1/0 for true/false in tos_.
__ bind(&true_result);
__ mov(tos_, 1);
__ ret(1 * kPointerSize);
__ bind(&false_result);
__ mov(tos_, 0);
__ ret(1 * kPointerSize);
if (types_.Contains(STRING)) {
// String value -> false iff empty.
Label not_string;
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
__ j(above_equal, &not_string, Label::kNear);
__ mov(tos_, FieldOperand(argument, String::kLengthOffset));
__ ret(1 * kPointerSize); // the string length is OK as the return value
__ bind(&not_string);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// We've seen a string for the first time -> patch
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
__ j(below, &patch, Label::kNear);
}
if (types_.Contains(HEAP_NUMBER)) {
// heap number -> false iff +0, -0, or NaN.
Label not_heap_number, false_result;
__ cmp(map, factory->heap_number_map());
__ j(not_equal, &not_heap_number, Label::kNear);
__ fldz();
__ fld_d(FieldOperand(argument, HeapNumber::kValueOffset));
__ FCmp();
__ j(zero, &false_result, Label::kNear);
__ Set(tos_, Immediate(1));
__ ret(1 * kPointerSize);
__ bind(&false_result);
__ Set(tos_, Immediate(0));
__ ret(1 * kPointerSize);
__ bind(&not_heap_number);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// We've seen a heap number for the first time -> patch
__ cmp(map, factory->heap_number_map());
__ j(equal, &patch, Label::kNear);
}
if (types_.Contains(INTERNAL_OBJECT)) {
// internal objects -> true
__ Set(tos_, Immediate(1));
__ ret(1 * kPointerSize);
}
if (!types_.IsAll()) {
__ bind(&patch);
GenerateTypeTransition(masm);
}
}
void ToBooleanStub::CheckOddball(MacroAssembler* masm,
Type type,
Heap::RootListIndex value,
bool result,
Label* patch) {
const Register argument = eax;
if (types_.Contains(type)) {
// If we see an expected oddball, return its ToBoolean value tos_.
Label different_value;
__ CompareRoot(argument, value);
__ j(not_equal, &different_value, Label::kNear);
__ Set(tos_, Immediate(result ? 1 : 0));
__ ret(1 * kPointerSize);
__ bind(&different_value);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// If we see an unexpected oddball and handle internal objects, we must
// patch because the code for internal objects doesn't handle it explictly.
__ CompareRoot(argument, value);
__ j(equal, patch);
}
}
void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
__ pop(ecx); // Get return address, operand is now on top of stack.
__ push(Immediate(Smi::FromInt(tos_.code())));
__ push(Immediate(Smi::FromInt(types_.ToByte())));
__ push(ecx); // Push return address.
// Patch the caller to an appropriate specialized stub and return the
// operation result to the caller of the stub.
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
3,
1);
}

1
deps/v8/src/ia32/codegen-ia32.cc

@ -255,6 +255,7 @@ OS::MemCopyFunction CreateMemCopyFunction() {
ASSERT(desc.reloc_size == 0);
CPU::FlushICache(buffer, actual_size);
OS::ProtectCode(buffer, actual_size);
return FUNCTION_CAST<OS::MemCopyFunction>(buffer);
}

5
deps/v8/src/ia32/cpu-ia32.cc

@ -1,4 +1,4 @@
// Copyright 2006-2008 the V8 project authors. All rights reserved.
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -67,7 +67,8 @@ void CPU::FlushICache(void* start, size_t size) {
// solution is to run valgrind with --smc-check=all, but this comes at a big
// performance cost. We can notify valgrind to invalidate its cache.
#ifdef VALGRIND_DISCARD_TRANSLATIONS
VALGRIND_DISCARD_TRANSLATIONS(start, size);
unsigned res = VALGRIND_DISCARD_TRANSLATIONS(start, size);
USE(res);
#endif
}

131
deps/v8/src/ia32/full-codegen-ia32.cc

@ -166,6 +166,11 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
}
}
set_stack_height(2 + scope()->num_stack_slots());
if (FLAG_verify_stack_height) {
verify_stack_height();
}
bool function_in_register = true;
// Possibly allocate a local context.
@ -358,6 +363,15 @@ void FullCodeGenerator::EmitReturnSequence() {
}
void FullCodeGenerator::verify_stack_height() {
ASSERT(FLAG_verify_stack_height);
__ sub(Operand(ebp), Immediate(kPointerSize * stack_height()));
__ cmp(ebp, Operand(esp));
__ Assert(equal, "Full codegen stack height not as expected.");
__ add(Operand(ebp), Immediate(kPointerSize * stack_height()));
}
void FullCodeGenerator::EffectContext::Plug(Slot* slot) const {
}
@ -372,6 +386,7 @@ void FullCodeGenerator::StackValueContext::Plug(Slot* slot) const {
MemOperand slot_operand = codegen()->EmitSlotSearch(slot, result_register());
// Memory operands can be pushed directly.
__ push(slot_operand);
codegen()->increment_stack_height();
}
@ -425,6 +440,7 @@ void FullCodeGenerator::StackValueContext::Plug(Handle<Object> lit) const {
} else {
__ push(Immediate(lit));
}
codegen()->increment_stack_height();
}
@ -462,6 +478,7 @@ void FullCodeGenerator::EffectContext::DropAndPlug(int count,
Register reg) const {
ASSERT(count > 0);
__ Drop(count);
codegen()->decrement_stack_height(count);
}
@ -471,6 +488,7 @@ void FullCodeGenerator::AccumulatorValueContext::DropAndPlug(
ASSERT(count > 0);
__ Drop(count);
__ Move(result_register(), reg);
codegen()->decrement_stack_height(count);
}
@ -479,6 +497,7 @@ void FullCodeGenerator::StackValueContext::DropAndPlug(int count,
ASSERT(count > 0);
if (count > 1) __ Drop(count - 1);
__ mov(Operand(esp, 0), reg);
codegen()->decrement_stack_height(count - 1);
}
@ -490,6 +509,7 @@ void FullCodeGenerator::TestContext::DropAndPlug(int count,
__ Move(result_register(), reg);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
codegen()->DoTest(this);
codegen()->decrement_stack_height(count);
}
@ -523,6 +543,7 @@ void FullCodeGenerator::StackValueContext::Plug(
__ bind(materialize_false);
__ push(Immediate(isolate()->factory()->false_value()));
__ bind(&done);
codegen()->increment_stack_height();
}
@ -550,6 +571,7 @@ void FullCodeGenerator::StackValueContext::Plug(bool flag) const {
? isolate()->factory()->true_value()
: isolate()->factory()->false_value();
__ push(Immediate(value));
codegen()->increment_stack_height();
}
@ -572,7 +594,7 @@ void FullCodeGenerator::DoTest(Expression* condition,
Label* fall_through) {
ToBooleanStub stub(result_register());
__ push(result_register());
__ CallStub(&stub);
__ CallStub(&stub, condition->test_id());
__ test(result_register(), Operand(result_register()));
// The stub returns nonzero for true.
Split(not_zero, if_true, if_false, fall_through);
@ -722,14 +744,18 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
// Note: For variables we must not push an initial value (such as
// 'undefined') because we may have a (legal) redeclaration and we
// must not destroy the current value.
increment_stack_height(3);
if (mode == Variable::CONST) {
__ push(Immediate(isolate()->factory()->the_hole_value()));
increment_stack_height();
} else if (function != NULL) {
VisitForStackValue(function);
} else {
__ push(Immediate(Smi::FromInt(0))); // No initial value!
increment_stack_height();
}
__ CallRuntime(Runtime::kDeclareContextSlot, 4);
decrement_stack_height(4);
break;
}
}
@ -748,8 +774,10 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
}
__ push(eax);
increment_stack_height();
VisitForAccumulatorValue(function);
__ pop(edx);
decrement_stack_height();
ASSERT(prop->key()->AsLiteral() != NULL &&
prop->key()->AsLiteral()->handle()->IsSmi());
@ -785,6 +813,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
Breakable nested_statement(this, stmt);
SetStatementPosition(stmt);
int switch_clause_stack_height = stack_height();
// Keep the switch value on the stack until a case matches.
VisitForStackValue(stmt->tag());
PrepareForBailoutForId(stmt->EntryId(), NO_REGISTERS);
@ -849,6 +878,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
__ jmp(default_clause->body_target());
}
set_stack_height(switch_clause_stack_height);
// Compile all the case bodies.
for (int i = 0; i < clauses->length(); i++) {
Comment cmnt(masm_, "[ Case body");
@ -890,6 +920,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ bind(&done_convert);
__ push(eax);
increment_stack_height();
// Check cache validity in generated code. This is a fast case for
// the JSObject::IsSimpleEnum cache validity checks. If we cannot
@ -973,6 +1004,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ push(eax); // Fixed array length (as smi).
__ push(Immediate(Smi::FromInt(0))); // Initial index.
increment_stack_height(4);
// Generate code for doing the condition check.
__ bind(&loop);
__ mov(eax, Operand(esp, 0 * kPointerSize)); // Get the current index.
@ -1028,6 +1060,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
__ bind(loop_statement.break_target());
__ add(Operand(esp), Immediate(5 * kPointerSize));
decrement_stack_height(5);
// Exit and decrement the loop depth.
__ bind(&exit);
decrement_loop_depth();
@ -1363,6 +1396,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
if (!result_saved) {
__ push(eax); // Save result on the stack
result_saved = true;
increment_stack_height();
}
switch (property->kind()) {
case ObjectLiteral::Property::MATERIALIZED_LITERAL:
@ -1387,6 +1421,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
// Fall through.
case ObjectLiteral::Property::PROTOTYPE:
__ push(Operand(esp, 0)); // Duplicate receiver.
increment_stack_height();
VisitForStackValue(key);
VisitForStackValue(value);
if (property->emit_store()) {
@ -1395,16 +1430,20 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
} else {
__ Drop(3);
}
decrement_stack_height(3);
break;
case ObjectLiteral::Property::SETTER:
case ObjectLiteral::Property::GETTER:
__ push(Operand(esp, 0)); // Duplicate receiver.
increment_stack_height();
VisitForStackValue(key);
__ push(Immediate(property->kind() == ObjectLiteral::Property::SETTER ?
Smi::FromInt(1) :
Smi::FromInt(0)));
increment_stack_height();
VisitForStackValue(value);
__ CallRuntime(Runtime::kDefineAccessor, 4);
decrement_stack_height(4);
break;
default: UNREACHABLE();
}
@ -1467,6 +1506,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
if (!result_saved) {
__ push(eax);
result_saved = true;
increment_stack_height();
}
VisitForAccumulatorValue(subexpr);
@ -1495,7 +1535,9 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
// Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
// on the left-hand side.
if (!expr->target()->IsValidLeftHandSide()) {
VisitForEffect(expr->target());
ASSERT(expr->target()->AsThrow() != NULL);
VisitInCurrentContext(expr->target()); // Throw does not plug the context
context()->Plug(eax);
return;
}
@ -1520,6 +1562,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
// We need the receiver both on the stack and in the accumulator.
VisitForAccumulatorValue(property->obj());
__ push(result_register());
increment_stack_height();
} else {
VisitForStackValue(property->obj());
}
@ -1530,6 +1573,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
VisitForAccumulatorValue(property->key());
__ mov(edx, Operand(esp, 0));
__ push(eax);
increment_stack_height();
} else {
VisitForStackValue(property->obj());
VisitForStackValue(property->key());
@ -1541,7 +1585,8 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
// For compound assignments we need another deoptimization point after the
// variable/property load.
if (expr->is_compound()) {
{ AccumulatorValueContext context(this);
AccumulatorValueContext result_context(this);
{ AccumulatorValueContext left_operand_context(this);
switch (assign_type) {
case VARIABLE:
EmitVariableLoad(expr->target()->AsVariableProxy());
@ -1560,13 +1605,13 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
Token::Value op = expr->binary_op();
__ push(eax); // Left operand goes on the stack.
increment_stack_height();
VisitForAccumulatorValue(expr->value());
OverwriteMode mode = expr->value()->ResultOverwriteAllowed()
? OVERWRITE_RIGHT
: NO_OVERWRITE;
SetSourcePosition(expr->position() + 1);
AccumulatorValueContext context(this);
if (ShouldInlineSmiCase(op)) {
EmitInlineSmiBinaryOp(expr->binary_operation(),
op,
@ -1630,6 +1675,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
// stack. Right operand is in eax.
Label smi_case, done, stub_call;
__ pop(edx);
decrement_stack_height();
__ mov(ecx, eax);
__ or_(eax, Operand(edx));
JumpPatchSite patch_site(masm_);
@ -1721,6 +1767,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
Token::Value op,
OverwriteMode mode) {
__ pop(edx);
decrement_stack_height();
BinaryOpStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
__ call(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
@ -1733,7 +1780,9 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
// Invalid left-hand sides are rewritten to have a 'throw
// ReferenceError' on the left-hand side.
if (!expr->IsValidLeftHandSide()) {
VisitForEffect(expr);
ASSERT(expr->AsThrow() != NULL);
VisitInCurrentContext(expr); // Throw does not plug the context
context()->Plug(eax);
return;
}
@ -1757,9 +1806,11 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
}
case NAMED_PROPERTY: {
__ push(eax); // Preserve value.
increment_stack_height();
VisitForAccumulatorValue(prop->obj());
__ mov(edx, eax);
__ pop(eax); // Restore value.
decrement_stack_height();
__ mov(ecx, prop->key()->AsLiteral()->handle());
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
@ -1769,6 +1820,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
}
case KEYED_PROPERTY: {
__ push(eax); // Preserve value.
increment_stack_height();
if (prop->is_synthetic()) {
ASSERT(prop->obj()->AsVariableProxy() != NULL);
ASSERT(prop->key()->AsLiteral() != NULL);
@ -1782,8 +1834,10 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
VisitForAccumulatorValue(prop->key());
__ mov(ecx, eax);
__ pop(edx);
decrement_stack_height();
}
__ pop(eax); // Restore value.
decrement_stack_height();
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
@ -1900,6 +1954,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
__ mov(edx, Operand(esp, 0));
} else {
__ pop(edx);
decrement_stack_height();
}
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
@ -1913,6 +1968,7 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
__ CallRuntime(Runtime::kToFastProperties, 1);
__ pop(eax);
__ Drop(1);
decrement_stack_height();
}
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
context()->Plug(eax);
@ -1934,10 +1990,12 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
}
__ pop(ecx);
decrement_stack_height();
if (expr->ends_initialization_block()) {
__ mov(edx, Operand(esp, 0)); // Leave receiver on the stack for later.
} else {
__ pop(edx);
decrement_stack_height();
}
// Record source code position before IC call.
SetSourcePosition(expr->position());
@ -1953,6 +2011,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
__ push(edx);
__ CallRuntime(Runtime::kToFastProperties, 1);
__ pop(eax);
decrement_stack_height();
}
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
@ -1972,6 +2031,7 @@ void FullCodeGenerator::VisitProperty(Property* expr) {
VisitForStackValue(expr->obj());
VisitForAccumulatorValue(expr->key());
__ pop(edx);
decrement_stack_height();
EmitKeyedPropertyLoad(expr);
context()->Plug(eax);
}
@ -1999,6 +2059,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
decrement_stack_height(arg_count + 1);
context()->Plug(eax);
}
@ -2013,6 +2074,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
__ pop(ecx);
__ push(eax);
__ push(ecx);
increment_stack_height();
// Load the arguments.
ZoneList<Expression*>* args = expr->arguments();
@ -2032,6 +2094,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
decrement_stack_height(arg_count + 1);
context()->DropAndPlug(1, eax); // Drop the key still on the stack.
}
@ -2053,6 +2116,8 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
decrement_stack_height(arg_count + 1);
context()->DropAndPlug(1, eax);
}
@ -2100,7 +2165,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
VisitForStackValue(fun);
// Reserved receiver slot.
__ push(Immediate(isolate()->factory()->undefined_value()));
increment_stack_height();
// Push the arguments.
for (int i = 0; i < arg_count; i++) {
VisitForStackValue(args->at(i));
@ -2144,10 +2209,12 @@ void FullCodeGenerator::VisitCall(Call* expr) {
RecordJSReturnSite(expr);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
decrement_stack_height(arg_count + 1); // Function is left on the stack.
context()->DropAndPlug(1, eax);
} else if (var != NULL && !var->is_this() && var->is_global()) {
// Push global object as receiver for the call IC.
__ push(GlobalObjectOperand());
increment_stack_height();
EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
} else if (var != NULL && var->AsSlot() != NULL &&
var->AsSlot()->type() == Slot::LOOKUP) {
@ -2170,7 +2237,9 @@ void FullCodeGenerator::VisitCall(Call* expr) {
__ push(Immediate(var->name()));
__ CallRuntime(Runtime::kLoadContextSlot, 2);
__ push(eax); // Function.
increment_stack_height();
__ push(edx); // Receiver.
increment_stack_height();
// If fast case code has been generated, emit code to push the
// function and receiver and have the slow path jump around this
@ -2179,7 +2248,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
Label call;
__ jmp(&call);
__ bind(&done);
// Push function.
// Push function. Stack height already incremented in slow case above.
__ push(eax);
// The receiver is implicitly the global receiver. Indicate this
// by passing the hole to the call function stub.
@ -2225,9 +2294,11 @@ void FullCodeGenerator::VisitCall(Call* expr) {
__ call(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
// Push result (function).
__ push(eax);
increment_stack_height();
// Push Global receiver.
__ mov(ecx, GlobalObjectOperand());
__ push(FieldOperand(ecx, GlobalObject::kGlobalReceiverOffset));
increment_stack_height();
EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
} else {
{ PreservePositionScope scope(masm()->positions_recorder());
@ -2243,6 +2314,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// Load global receiver object.
__ mov(ebx, GlobalObjectOperand());
__ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
increment_stack_height();
// Emit function call.
EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
}
@ -2283,6 +2355,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
Handle<Code> construct_builtin =
isolate()->builtins()->JSConstructCall();
__ call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
decrement_stack_height(arg_count + 1);
context()->Plug(eax);
}
@ -2595,6 +2669,7 @@ void FullCodeGenerator::EmitObjectEquals(ZoneList<Expression*>* args) {
&if_true, &if_false, &fall_through);
__ pop(ebx);
decrement_stack_height();
__ cmp(eax, Operand(ebx));
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(equal, if_true, if_false, fall_through);
@ -2709,6 +2784,7 @@ void FullCodeGenerator::EmitLog(ZoneList<Expression*>* args) {
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
__ CallRuntime(Runtime::kLog, 2);
decrement_stack_height(2);
}
// Finally, we're expected to leave a value on the top of the stack.
__ mov(eax, isolate()->factory()->undefined_value());
@ -2774,6 +2850,7 @@ void FullCodeGenerator::EmitSubString(ZoneList<Expression*>* args) {
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
__ CallStub(&stub);
decrement_stack_height(3);
context()->Plug(eax);
}
@ -2787,6 +2864,7 @@ void FullCodeGenerator::EmitRegExpExec(ZoneList<Expression*>* args) {
VisitForStackValue(args->at(2));
VisitForStackValue(args->at(3));
__ CallStub(&stub);
decrement_stack_height(4);
context()->Plug(eax);
}
@ -2821,6 +2899,7 @@ void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
} else {
__ CallRuntime(Runtime::kMath_pow, 2);
}
decrement_stack_height(2);
context()->Plug(eax);
}
@ -2831,6 +2910,7 @@ void FullCodeGenerator::EmitSetValueOf(ZoneList<Expression*>* args) {
VisitForStackValue(args->at(0)); // Load the object.
VisitForAccumulatorValue(args->at(1)); // Load the value.
__ pop(ebx); // eax = value. ebx = object.
decrement_stack_height();
Label done;
// If the object is a smi, return the value.
@ -2860,6 +2940,7 @@ void FullCodeGenerator::EmitNumberToString(ZoneList<Expression*>* args) {
NumberToStringStub stub;
__ CallStub(&stub);
decrement_stack_height();
context()->Plug(eax);
}
@ -2894,6 +2975,7 @@ void FullCodeGenerator::EmitStringCharCodeAt(ZoneList<Expression*>* args) {
Register result = edx;
__ pop(object);
decrement_stack_height();
Label need_conversion;
Label index_out_of_range;
@ -2942,6 +3024,7 @@ void FullCodeGenerator::EmitStringCharAt(ZoneList<Expression*>* args) {
Register result = eax;
__ pop(object);
decrement_stack_height();
Label need_conversion;
Label index_out_of_range;
@ -2986,6 +3069,7 @@ void FullCodeGenerator::EmitStringAdd(ZoneList<Expression*>* args) {
StringAddStub stub(NO_STRING_ADD_FLAGS);
__ CallStub(&stub);
decrement_stack_height(2);
context()->Plug(eax);
}
@ -2998,6 +3082,7 @@ void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
StringCompareStub stub;
__ CallStub(&stub);
decrement_stack_height(2);
context()->Plug(eax);
}
@ -3009,6 +3094,7 @@ void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
decrement_stack_height();
context()->Plug(eax);
}
@ -3020,6 +3106,7 @@ void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
decrement_stack_height();
context()->Plug(eax);
}
@ -3031,6 +3118,7 @@ void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
decrement_stack_height();
context()->Plug(eax);
}
@ -3040,6 +3128,7 @@ void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallRuntime(Runtime::kMath_sqrt, 1);
decrement_stack_height();
context()->Plug(eax);
}
@ -3059,6 +3148,7 @@ void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
__ InvokeFunction(edi, count, CALL_FUNCTION,
NullCallWrapper(), CALL_AS_METHOD);
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
decrement_stack_height(arg_count + 1);
context()->Plug(eax);
}
@ -3071,6 +3161,7 @@ void FullCodeGenerator::EmitRegExpConstructResult(ZoneList<Expression*>* args) {
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
__ CallStub(&stub);
decrement_stack_height(3);
context()->Plug(eax);
}
@ -3144,6 +3235,7 @@ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
__ CallRuntime(Runtime::kSwapElements, 3);
__ bind(&done);
decrement_stack_height(3);
context()->Plug(eax);
}
@ -3229,6 +3321,7 @@ void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
__ mov(eax, Immediate(isolate()->factory()->true_value()));
__ bind(&done);
decrement_stack_height();
context()->Plug(eax);
}
@ -3532,6 +3625,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
__ add(Operand(esp), Immediate(3 * kPointerSize));
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
decrement_stack_height();
context()->Plug(eax);
}
@ -3584,6 +3678,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
// Prepare for calling JS runtime function.
__ mov(eax, GlobalObjectOperand());
__ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
increment_stack_height();
}
// Push the arguments ("left-to-right").
@ -3606,6 +3701,11 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
// Call the C runtime function.
__ CallRuntime(expr->function(), arg_count);
}
decrement_stack_height(arg_count);
if (expr->is_jsruntime()) {
decrement_stack_height();
}
context()->Plug(eax);
}
@ -3627,6 +3727,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
VisitForStackValue(prop->key());
__ push(Immediate(Smi::FromInt(strict_mode_flag())));
__ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
decrement_stack_height(2);
context()->Plug(eax);
}
} else if (var != NULL) {
@ -3696,6 +3797,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
VisitForTypeofValue(expr->expression());
}
__ CallRuntime(Runtime::kTypeof, 1);
decrement_stack_height();
context()->Plug(eax);
break;
}
@ -3750,7 +3852,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// Invalid left-hand sides are rewritten to have a 'throw ReferenceError'
// as the left-hand side.
if (!expr->expression()->IsValidLeftHandSide()) {
VisitForEffect(expr->expression());
ASSERT(expr->expression()->AsThrow() != NULL);
VisitInCurrentContext(expr->expression());
// Visiting Throw does not plug the context.
context()->Plug(eax);
return;
}
@ -3775,17 +3880,20 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// Reserve space for result of postfix operation.
if (expr->is_postfix() && !context()->IsEffect()) {
__ push(Immediate(Smi::FromInt(0)));
increment_stack_height();
}
if (assign_type == NAMED_PROPERTY) {
// Put the object both on the stack and in the accumulator.
VisitForAccumulatorValue(prop->obj());
__ push(eax);
increment_stack_height();
EmitNamedPropertyLoad(prop);
} else {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
__ mov(edx, Operand(esp, 0));
__ push(eax);
increment_stack_height();
EmitKeyedPropertyLoad(prop);
}
}
@ -3816,6 +3924,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
switch (assign_type) {
case VARIABLE:
__ push(eax);
increment_stack_height();
break;
case NAMED_PROPERTY:
__ mov(Operand(esp, kPointerSize), eax);
@ -3889,6 +3998,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
case NAMED_PROPERTY: {
__ mov(ecx, prop->key()->AsLiteral()->handle());
__ pop(edx);
decrement_stack_height();
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->StoreIC_Initialize_Strict()
: isolate()->builtins()->StoreIC_Initialize();
@ -3906,6 +4016,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
case KEYED_PROPERTY: {
__ pop(ecx);
__ pop(edx);
decrement_stack_height();
decrement_stack_height();
Handle<Code> ic = is_strict_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
: isolate()->builtins()->KeyedStoreIC_Initialize();
@ -4063,6 +4175,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
case Token::IN:
VisitForStackValue(expr->right());
__ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
decrement_stack_height(2);
PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
__ cmp(eax, isolate()->factory()->true_value());
Split(equal, if_true, if_false, fall_through);
@ -4072,6 +4185,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
VisitForStackValue(expr->right());
InstanceofStub stub(InstanceofStub::kNoFlags);
__ CallStub(&stub);
decrement_stack_height(2);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ test(eax, Operand(eax));
// The stub returns 0 for true.
@ -4116,6 +4230,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
default:
UNREACHABLE();
}
decrement_stack_height();
bool inline_smi_code = ShouldInlineSmiCase(op);
JumpPatchSite patch_site(masm_);

157
deps/v8/src/ia32/lithium-codegen-ia32.cc

@ -1393,44 +1393,135 @@ void LCodeGen::DoBranch(LBranch* instr) {
} else {
ASSERT(r.IsTagged());
Register reg = ToRegister(instr->InputAt(0));
if (instr->hydrogen()->value()->type().IsBoolean()) {
HType type = instr->hydrogen()->value()->type();
if (type.IsBoolean()) {
__ cmp(reg, factory()->true_value());
EmitBranch(true_block, false_block, equal);
} else if (type.IsSmi()) {
__ test(reg, Operand(reg));
EmitBranch(true_block, false_block, not_equal);
} else {
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
__ cmp(reg, factory()->undefined_value());
__ j(equal, false_label);
__ cmp(reg, factory()->true_value());
__ j(equal, true_label);
__ cmp(reg, factory()->false_value());
__ j(equal, false_label);
__ test(reg, Operand(reg));
__ j(equal, false_label);
__ JumpIfSmi(reg, true_label);
// Test for double values. Zero is false.
Label call_stub;
__ cmp(FieldOperand(reg, HeapObject::kMapOffset),
factory()->heap_number_map());
__ j(not_equal, &call_stub, Label::kNear);
__ fldz();
__ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
__ FCmp();
__ j(zero, false_label);
__ jmp(true_label);
// The conversion stub doesn't cause garbage collections so it's
// safe to not record a safepoint after the call.
__ bind(&call_stub);
ToBooleanStub stub(eax);
__ pushad();
__ push(reg);
__ CallStub(&stub);
__ test(eax, Operand(eax));
__ popad();
EmitBranch(true_block, false_block, not_zero);
ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
// Avoid deopts in the case where we've never executed this path before.
if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
if (expected.Contains(ToBooleanStub::UNDEFINED)) {
// undefined -> false.
__ cmp(reg, factory()->undefined_value());
__ j(equal, false_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen undefined for the first time -> deopt.
__ cmp(reg, factory()->undefined_value());
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::BOOLEAN)) {
// true -> true.
__ cmp(reg, factory()->true_value());
__ j(equal, true_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a boolean for the first time -> deopt.
__ cmp(reg, factory()->true_value());
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::BOOLEAN)) {
// false -> false.
__ cmp(reg, factory()->false_value());
__ j(equal, false_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a boolean for the first time -> deopt.
__ cmp(reg, factory()->false_value());
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
// 'null' -> false.
__ cmp(reg, factory()->null_value());
__ j(equal, false_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen null for the first time -> deopt.
__ cmp(reg, factory()->null_value());
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::SMI)) {
// Smis: 0 -> false, all other -> true.
__ test(reg, Operand(reg));
__ j(equal, false_label);
__ JumpIfSmi(reg, true_label);
} else if (expected.NeedsMap()) {
// If we need a map later and have a Smi -> deopt.
__ test(reg, Immediate(kSmiTagMask));
DeoptimizeIf(zero, instr->environment());
}
Register map = no_reg;
if (expected.NeedsMap()) {
map = ToRegister(instr->TempAt(0));
ASSERT(!map.is(reg));
__ mov(map, FieldOperand(reg, HeapObject::kMapOffset));
// Everything with a map could be undetectable, so check this now.
__ test_b(FieldOperand(map, Map::kBitFieldOffset),
1 << Map::kIsUndetectable);
// Undetectable -> false.
__ j(not_zero, false_label);
}
if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
// spec object -> true.
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(above_equal, true_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a spec object for the first time -> deopt.
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
DeoptimizeIf(above_equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::STRING)) {
// String value -> false iff empty.
Label not_string;
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
__ j(above_equal, &not_string, Label::kNear);
__ cmp(FieldOperand(reg, String::kLengthOffset), Immediate(0));
__ j(not_zero, true_label);
__ jmp(false_label);
__ bind(&not_string);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a string for the first time -> deopt
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
DeoptimizeIf(below, instr->environment());
}
if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
// heap number -> false iff +0, -0, or NaN.
Label not_heap_number;
__ cmp(FieldOperand(reg, HeapObject::kMapOffset),
factory()->heap_number_map());
__ j(not_equal, &not_heap_number, Label::kNear);
__ fldz();
__ fld_d(FieldOperand(reg, HeapNumber::kValueOffset));
__ FCmp();
__ j(zero, false_label);
__ jmp(true_label);
__ bind(&not_heap_number);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a heap number for the first time -> deopt.
__ cmp(FieldOperand(reg, HeapObject::kMapOffset),
factory()->heap_number_map());
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// internal objects -> true
__ jmp(true_label);
} else {
// We've seen something for the first time -> deopt.
DeoptimizeIf(no_condition, instr->environment());
}
}
}
}
@ -2232,7 +2323,6 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
void LCodeGen::DoLoadKeyedFastDoubleElement(
LLoadKeyedFastDoubleElement* instr) {
Register elements = ToRegister(instr->elements());
XMMRegister result = ToDoubleRegister(instr->result());
if (instr->hydrogen()->RequiresHoleCheck()) {
@ -3097,7 +3187,6 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
void LCodeGen::DoStoreKeyedFastDoubleElement(
LStoreKeyedFastDoubleElement* instr) {
XMMRegister value = ToDoubleRegister(instr->value());
Register elements = ToRegister(instr->elements());
Register key = instr->key()->IsRegister() ? ToRegister(instr->key()) : no_reg;
Label have_value;

11
deps/v8/src/ia32/lithium-ia32.cc

@ -1041,7 +1041,16 @@ LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
: instr->SecondSuccessor();
return new LGoto(successor->block_id());
}
return new LBranch(UseRegisterAtStart(v));
ToBooleanStub::Types expected = instr->expected_input_types();
// We need a temporary register when we have to access the map *or* we have
// no type info yet, in which case we handle all cases (including the ones
// involving maps).
bool needs_temp = expected.NeedsMap() || expected.IsEmpty();
LOperand* temp = needs_temp ? TempRegister() : NULL;
LInstruction* branch = new LBranch(UseRegister(v), temp);
// When we handle all cases, we never deopt, so we don't need to assign the
// environment then.
return expected.IsAll() ? branch : AssignEnvironment(branch);
}

5
deps/v8/src/ia32/lithium-ia32.h

@ -876,10 +876,11 @@ class LConstantT: public LTemplateInstruction<1, 0, 0> {
};
class LBranch: public LControlInstruction<1, 0> {
class LBranch: public LControlInstruction<1, 1> {
public:
explicit LBranch(LOperand* value) {
explicit LBranch(LOperand* value, LOperand* temp) {
inputs_[0] = value;
temps_[0] = temp;
}
DECLARE_CONCRETE_INSTRUCTION(Branch, "branch")

7
deps/v8/src/ia32/macro-assembler-ia32.cc

@ -263,6 +263,13 @@ void MacroAssembler::SafePush(const Immediate& x) {
}
void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
// see ROOT_ACCESSOR macro in factory.h
Handle<Object> value(&isolate()->heap()->roots_address()[index]);
cmp(with, value);
}
void MacroAssembler::CmpObjectType(Register heap_object,
InstanceType type,
Register map) {

3
deps/v8/src/ia32/macro-assembler-ia32.h

@ -209,6 +209,9 @@ class MacroAssembler: public Assembler {
void SafeSet(Register dst, const Immediate& x);
void SafePush(const Immediate& x);
// Compare a register against a known root, e.g. undefined, null, true, ...
void CompareRoot(Register with, Heap::RootListIndex index);
// Compare object type for heap object.
// Incoming register is heap_object and outgoing register is map.
void CmpObjectType(Register heap_object, InstanceType type, Register map);

8
deps/v8/src/ia32/stub-cache-ia32.cc

@ -3981,10 +3981,12 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
__ bind(&smi_value);
// Value is a smi. convert to a double and store.
__ SmiUntag(eax);
__ push(eax);
// Preserve original value.
__ mov(edx, eax);
__ SmiUntag(edx);
__ push(edx);
__ fild_s(Operand(esp, 0));
__ pop(eax);
__ pop(edx);
__ fstp_d(FieldOperand(edi, ecx, times_4, FixedDoubleArray::kHeaderSize));
__ ret(0);

37
deps/v8/src/ic.cc

@ -88,7 +88,8 @@ void IC::TraceIC(const char* type,
// function and the original code.
JSFunction* function = JSFunction::cast(frame->function());
function->PrintName();
int code_offset = address() - js_code->instruction_start();
int code_offset =
static_cast<int>(address() - js_code->instruction_start());
PrintF("+%d", code_offset);
} else {
PrintF("<unknown>");
@ -309,6 +310,7 @@ void IC::Clear(Address address) {
case Code::UNARY_OP_IC:
case Code::BINARY_OP_IC:
case Code::COMPARE_IC:
case Code::TO_BOOLEAN_IC:
// Clearing these is tricky and does not
// make any performance difference.
return;
@ -842,14 +844,6 @@ MaybeObject* KeyedCallIC::LoadFunction(State state,
}
#ifdef DEBUG
#define TRACE_IC_NAMED(msg, name) \
if (FLAG_trace_ic) PrintF(msg, *(name)->ToCString())
#else
#define TRACE_IC_NAMED(msg, name)
#endif
MaybeObject* LoadIC::Load(State state,
Handle<Object> object,
Handle<String> name) {
@ -2506,6 +2500,31 @@ RUNTIME_FUNCTION(Code*, CompareIC_Miss) {
}
RUNTIME_FUNCTION(MaybeObject*, ToBoolean_Patch) {
ASSERT(args.length() == 3);
HandleScope scope(isolate);
Handle<Object> object = args.at<Object>(0);
Register tos = Register::from_code(args.smi_at(1));
ToBooleanStub::Types old_types(args.smi_at(2));
ToBooleanStub::Types new_types(old_types);
bool to_boolean_value = new_types.Record(object);
old_types.TraceTransition(new_types);
ToBooleanStub stub(tos, new_types);
Handle<Code> code = stub.GetCode();
ToBooleanIC ic(isolate);
ic.patch(*code);
return Smi::FromInt(to_boolean_value ? 1 : 0);
}
void ToBooleanIC::patch(Code* code) {
set_target(code);
}
static const Address IC_utilities[] = {
#define ADDR(name) FUNCTION_ADDR(name),
IC_UTIL_LIST(ADDR)

12
deps/v8/src/ic.h

@ -59,7 +59,8 @@ namespace internal {
ICU(StoreInterceptorProperty) \
ICU(UnaryOp_Patch) \
ICU(BinaryOp_Patch) \
ICU(CompareIC_Miss)
ICU(CompareIC_Miss) \
ICU(ToBoolean_Patch)
//
// IC is the base class for LoadIC, StoreIC, CallIC, KeyedLoadIC,
// and KeyedStoreIC.
@ -720,6 +721,15 @@ class CompareIC: public IC {
Token::Value op_;
};
class ToBooleanIC: public IC {
public:
explicit ToBooleanIC(Isolate* isolate) : IC(NO_EXTRA_FRAME, isolate) { }
void patch(Code* code);
};
// Helper for BinaryOpIC and CompareIC.
void PatchInlinedSmiCode(Address address);

3
deps/v8/src/json-parser.h

@ -166,7 +166,8 @@ class JsonParser BASE_EMBEDDED {
template <bool seq_ascii>
Handle<Object> JsonParser<seq_ascii>::ParseJson(Handle<String> source) {
isolate_ = source->map()->isolate();
source_ = Handle<String>(source->TryFlattenGetString());
FlattenString(source);
source_ = source;
source_length_ = source_->length();
// Optimized fast case where we only have ASCII characters.

2
deps/v8/src/log-utils.cc

@ -86,8 +86,6 @@ void Log::Initialize() {
if (open_log_file) {
if (strcmp(FLAG_logfile, "-") == 0) {
OpenStdout();
} else if (strcmp(FLAG_logfile, "*") == 0) {
// Does nothing for now. Will be removed.
} else if (strcmp(FLAG_logfile, kLogToTemporaryFile) == 0) {
OpenTemporaryFile();
} else {

1
deps/v8/src/log.cc

@ -1400,6 +1400,7 @@ void Logger::LogCodeObject(Object* object) {
case Code::UNARY_OP_IC: // fall through
case Code::BINARY_OP_IC: // fall through
case Code::COMPARE_IC: // fall through
case Code::TO_BOOLEAN_IC: // fall through
case Code::STUB:
description =
CodeStub::MajorName(CodeStub::GetMajorKey(code_object), true);

3
deps/v8/src/macros.py

@ -44,7 +44,7 @@ const kApiSerialNumberOffset = 2;
const kApiConstructorOffset = 2;
const kApiPrototypeTemplateOffset = 5;
const kApiParentTemplateOffset = 6;
const kApiPrototypeAttributesOffset = 15;
const kApiFlagOffset = 14;
const NO_HINT = 0;
const NUMBER_HINT = 1;
@ -65,6 +65,7 @@ const msPerMonth = 2592000000;
# For apinatives.js
const kUninitialized = -1;
const kReadOnlyPrototypeBit = 3; # For FunctionTemplateInfo, matches objects.h
# Note: kDayZeroInJulianDay = ToJulianDay(1970, 0, 1).
const kInvalidDate = 'Invalid Date';

1
deps/v8/src/messages.js

@ -195,6 +195,7 @@ function FormatMessage(message) {
non_extensible_proto: ["%0", " is not extensible"],
handler_non_object: ["Proxy.", "%0", " called with non-object as handler"],
handler_trap_missing: ["Proxy handler ", "%0", " has no '", "%1", "' trap"],
handler_trap_must_be_callable: ["Proxy handler ", "%0", " has non-callable '", "%1", "' trap"],
handler_returned_false: ["Proxy handler ", "%0", " returned false for '", "%1", "' trap"],
handler_returned_undefined: ["Proxy handler ", "%0", " returned undefined for '", "%1", "' trap"],
proxy_prop_not_configurable: ["Trap ", "%1", " of proxy handler ", "%0", " returned non-configurable descriptor for property ", "%2"],

2
deps/v8/src/mips/full-codegen-mips.cc

@ -2761,7 +2761,7 @@ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
// Objects with a non-function constructor have class 'Object'.
__ bind(&non_function_constructor);
__ LoadRoot(v0, Heap::kfunction_class_symbolRootIndex);
__ LoadRoot(v0, Heap::kObject_symbolRootIndex);
__ jmp(&done);
// Non-JS objects have class null.

15
deps/v8/src/mips/stub-cache-mips.cc

@ -4459,11 +4459,18 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
} else {
destination = FloatingPointHelper::kCoreRegisters;
}
__ SmiUntag(value_reg, value_reg);
Register untagged_value = receiver_reg;
__ SmiUntag(untagged_value, value_reg);
FloatingPointHelper::ConvertIntToDouble(
masm, value_reg, destination,
f0, mantissa_reg, exponent_reg, // These are: double_dst, dst1, dst2.
scratch4, f2); // These are: scratch2, single_scratch.
masm,
untagged_value,
destination,
f0,
mantissa_reg,
exponent_reg,
scratch4,
f2);
if (destination == FloatingPointHelper::kFPURegisters) {
CpuFeatures::Scope scope(FPU);
__ sdc1(f0, MemOperand(scratch, 0));

70
deps/v8/src/objects-inl.h

@ -158,6 +158,12 @@ bool Object::IsString() {
}
bool Object::IsSpecObject() {
return Object::IsHeapObject()
&& HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
}
bool Object::IsSymbol() {
if (!this->IsHeapObject()) return false;
uint32_t type = HeapObject::cast(this)->map()->instance_type();
@ -1333,6 +1339,8 @@ void JSObject::set_elements(HeapObject* value, WriteBarrierMode mode) {
ASSERT(map()->has_fast_elements() ==
(value->map() == GetHeap()->fixed_array_map() ||
value->map() == GetHeap()->fixed_cow_array_map()));
ASSERT(map()->has_fast_double_elements() ==
value->IsFixedDoubleArray());
ASSERT(value->HasValidElements());
WRITE_FIELD(this, kElementsOffset, value);
CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, mode);
@ -1960,6 +1968,17 @@ void DescriptorArray::Swap(int first, int second) {
}
template<typename Shape, typename Key>
int HashTable<Shape, Key>::ComputeCapacity(int at_least_space_for) {
const int kMinCapacity = 32;
int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
if (capacity < kMinCapacity) {
capacity = kMinCapacity; // Guarantee min capacity.
}
return capacity;
}
template<typename Shape, typename Key>
int HashTable<Shape, Key>::FindEntry(Key key) {
return FindEntry(GetIsolate(), key);
@ -2757,7 +2776,8 @@ int Code::major_key() {
ASSERT(kind() == STUB ||
kind() == UNARY_OP_IC ||
kind() == BINARY_OP_IC ||
kind() == COMPARE_IC);
kind() == COMPARE_IC ||
kind() == TO_BOOLEAN_IC);
return READ_BYTE_FIELD(this, kStubMajorKeyOffset);
}
@ -2766,7 +2786,8 @@ void Code::set_major_key(int major) {
ASSERT(kind() == STUB ||
kind() == UNARY_OP_IC ||
kind() == BINARY_OP_IC ||
kind() == COMPARE_IC);
kind() == COMPARE_IC ||
kind() == TO_BOOLEAN_IC);
ASSERT(0 <= major && major < 256);
WRITE_BYTE_FIELD(this, kStubMajorKeyOffset, major);
}
@ -2908,6 +2929,17 @@ void Code::set_compare_state(byte value) {
}
byte Code::to_boolean_state() {
ASSERT(is_to_boolean_ic_stub());
return READ_BYTE_FIELD(this, kToBooleanTypeOffset);
}
void Code::set_to_boolean_state(byte value) {
ASSERT(is_to_boolean_ic_stub());
WRITE_BYTE_FIELD(this, kToBooleanTypeOffset, value);
}
bool Code::is_inline_cache_stub() {
Kind kind = this->kind();
return kind >= FIRST_IC_KIND && kind <= LAST_IC_KIND;
@ -3249,8 +3281,6 @@ ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
kAccessCheckInfoOffset)
ACCESSORS(FunctionTemplateInfo, flag, Smi, kFlagOffset)
ACCESSORS(FunctionTemplateInfo, prototype_attributes, Smi,
kPrototypeAttributesOffset)
ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
@ -3305,6 +3335,8 @@ BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
kNeedsAccessCheckBit)
BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
kReadOnlyPrototypeBit)
BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
kIsExpressionBit)
BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
@ -4222,6 +4254,11 @@ MaybeObject* JSObject::SetHiddenPropertiesObject(Object* hidden_obj) {
}
bool JSObject::HasHiddenProperties() {
return !GetHiddenProperties(OMIT_CREATION)->ToObjectChecked()->IsUndefined();
}
bool JSObject::HasElement(uint32_t index) {
return HasElementWithReceiver(this, index);
}
@ -4337,6 +4374,31 @@ MaybeObject* StringDictionaryShape::AsObject(String* key) {
}
bool ObjectHashTableShape::IsMatch(JSObject* key, Object* other) {
return key == JSObject::cast(other);
}
uint32_t ObjectHashTableShape::Hash(JSObject* key) {
MaybeObject* maybe_hash = key->GetIdentityHash(JSObject::OMIT_CREATION);
ASSERT(!maybe_hash->IsFailure());
return Smi::cast(maybe_hash->ToObjectUnchecked())->value();
}
uint32_t ObjectHashTableShape::HashForObject(JSObject* key, Object* other) {
MaybeObject* maybe_hash = JSObject::cast(other)->GetIdentityHash(
JSObject::OMIT_CREATION);
ASSERT(!maybe_hash->IsFailure());
return Smi::cast(maybe_hash->ToObjectUnchecked())->value();
}
MaybeObject* ObjectHashTableShape::AsObject(JSObject* key) {
return key;
}
void Map::ClearCodeCache(Heap* heap) {
// No write barrier is needed since empty_fixed_array is not in new space.
// Please note this function is used during marking:

33
deps/v8/src/objects-printer.cc

@ -1,4 +1,4 @@
// Copyright 2010 the V8 project authors. All rights reserved.
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -282,6 +282,19 @@ void JSObject::PrintElements(FILE* out) {
}
break;
}
case FAST_DOUBLE_ELEMENTS: {
// Print in array notation for non-sparse arrays.
FixedDoubleArray* p = FixedDoubleArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
if (p->is_the_hole(i)) {
PrintF(out, " %d: <the hole>", i);
} else {
PrintF(out, " %d: %g", i, p->get(i));
}
PrintF(out, "\n");
}
break;
}
case EXTERNAL_PIXEL_ELEMENTS: {
ExternalPixelArray* p = ExternalPixelArray::cast(elements());
for (int i = 0; i < p->length(); i++) {
@ -360,9 +373,6 @@ void JSObject::PrintElements(FILE* out) {
}
break;
}
default:
UNREACHABLE();
break;
}
}
@ -550,6 +560,21 @@ void String::StringPrint(FILE* out) {
}
// This method is only meant to be called from gdb for debugging purposes.
// Since the string can also be in two-byte encoding, non-ascii characters
// will be ignored in the output.
char* String::ToAsciiArray() {
// Static so that subsequent calls frees previously allocated space.
// This also means that previous results will be overwritten.
static char* buffer = NULL;
if (buffer != NULL) free(buffer);
buffer = new char[length()+1];
WriteToFlat(this, buffer, 0, length());
buffer[length()] = 0;
return buffer;
}
void JSProxy::JSProxyPrint(FILE* out) {
HeapObject::PrintHeader(out, "JSProxy");
PrintF(out, " - map = 0x%p\n", reinterpret_cast<void*>(map()));

482
deps/v8/src/objects.cc

@ -2331,7 +2331,7 @@ void JSProxy::Fix() {
Handle<JSProxy> self(this);
isolate->factory()->BecomeJSObject(self);
ASSERT(IsJSObject());
ASSERT(self->IsJSObject());
// TODO(rossberg): recognize function proxies.
}
@ -2471,6 +2471,9 @@ MaybeObject* JSObject::SetPropertyForResult(LookupResult* result,
// callback setter removed. The two lines looking up the LookupResult
// result are also added. If one of the functions is changed, the other
// should be.
// Note that this method cannot be used to set the prototype of a function
// because ConvertDescriptorToField() which is called in "case CALLBACKS:"
// doesn't handle function prototypes correctly.
MaybeObject* JSObject::SetLocalPropertyIgnoreAttributes(
String* name,
Object* value,
@ -2896,9 +2899,12 @@ MaybeObject* JSObject::NormalizeElements() {
int length = IsJSArray()
? Smi::cast(JSArray::cast(this)->length())->value()
: array->length();
int old_capacity = 0;
int used_elements = 0;
GetElementsCapacityAndUsage(&old_capacity, &used_elements);
NumberDictionary* dictionary = NULL;
{ Object* object;
MaybeObject* maybe = NumberDictionary::Allocate(length);
MaybeObject* maybe = NumberDictionary::Allocate(used_elements);
if (!maybe->ToObject(&object)) return maybe;
dictionary = NumberDictionary::cast(object);
}
@ -2961,6 +2967,91 @@ MaybeObject* JSObject::NormalizeElements() {
}
MaybeObject* JSObject::GetHiddenProperties(HiddenPropertiesFlag flag) {
Isolate* isolate = GetIsolate();
Heap* heap = isolate->heap();
Object* holder = BypassGlobalProxy();
if (holder->IsUndefined()) return heap->undefined_value();
JSObject* obj = JSObject::cast(holder);
if (obj->HasFastProperties()) {
// If the object has fast properties, check whether the first slot
// in the descriptor array matches the hidden symbol. Since the
// hidden symbols hash code is zero (and no other string has hash
// code zero) it will always occupy the first entry if present.
DescriptorArray* descriptors = obj->map()->instance_descriptors();
if ((descriptors->number_of_descriptors() > 0) &&
(descriptors->GetKey(0) == heap->hidden_symbol()) &&
descriptors->IsProperty(0)) {
ASSERT(descriptors->GetType(0) == FIELD);
return obj->FastPropertyAt(descriptors->GetFieldIndex(0));
}
}
// Only attempt to find the hidden properties in the local object and not
// in the prototype chain.
if (!obj->HasHiddenPropertiesObject()) {
// Hidden properties object not found. Allocate a new hidden properties
// object if requested. Otherwise return the undefined value.
if (flag == ALLOW_CREATION) {
Object* hidden_obj;
{ MaybeObject* maybe_obj = heap->AllocateJSObject(
isolate->context()->global_context()->object_function());
if (!maybe_obj->ToObject(&hidden_obj)) return maybe_obj;
}
return obj->SetHiddenPropertiesObject(hidden_obj);
} else {
return heap->undefined_value();
}
}
return obj->GetHiddenPropertiesObject();
}
MaybeObject* JSObject::GetIdentityHash(HiddenPropertiesFlag flag) {
Isolate* isolate = GetIsolate();
Object* hidden_props_obj;
{ MaybeObject* maybe_obj = GetHiddenProperties(flag);
if (!maybe_obj->ToObject(&hidden_props_obj)) return maybe_obj;
}
if (!hidden_props_obj->IsJSObject()) {
// We failed to create hidden properties. That's a detached
// global proxy.
ASSERT(hidden_props_obj->IsUndefined());
return Smi::FromInt(0);
}
JSObject* hidden_props = JSObject::cast(hidden_props_obj);
String* hash_symbol = isolate->heap()->identity_hash_symbol();
{
// Note that HasLocalProperty() can cause a GC in the general case in the
// presence of interceptors.
AssertNoAllocation no_alloc;
if (hidden_props->HasLocalProperty(hash_symbol)) {
MaybeObject* hash = hidden_props->GetProperty(hash_symbol);
return Smi::cast(hash->ToObjectChecked());
}
}
int hash_value;
int attempts = 0;
do {
// Generate a random 32-bit hash value but limit range to fit
// within a smi.
hash_value = V8::Random(isolate) & Smi::kMaxValue;
attempts++;
} while (hash_value == 0 && attempts < 30);
hash_value = hash_value != 0 ? hash_value : 1; // never return 0
Smi* hash = Smi::FromInt(hash_value);
{ MaybeObject* result = hidden_props->SetLocalPropertyIgnoreAttributes(
hash_symbol,
hash,
static_cast<PropertyAttributes>(None));
if (result->IsFailure()) return result;
}
return hash;
}
MaybeObject* JSObject::DeletePropertyPostInterceptor(String* name,
DeleteMode mode) {
// Check local property, ignore interceptor.
@ -3656,6 +3747,7 @@ MaybeObject* JSObject::DefineGetterSetter(String* name,
if (is_element) {
switch (GetElementsKind()) {
case FAST_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
break;
case EXTERNAL_PIXEL_ELEMENTS:
case EXTERNAL_BYTE_ELEMENTS:
@ -3666,7 +3758,6 @@ MaybeObject* JSObject::DefineGetterSetter(String* name,
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
// Ignore getters and setters on pixel and external array
// elements.
return heap->undefined_value();
@ -3905,6 +3996,7 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
// Accessors overwrite previous callbacks (cf. with getters/setters).
switch (GetElementsKind()) {
case FAST_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
break;
case EXTERNAL_PIXEL_ELEMENTS:
case EXTERNAL_BYTE_ELEMENTS:
@ -3915,7 +4007,6 @@ MaybeObject* JSObject::DefineAccessor(AccessorInfo* info) {
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
// Ignore getters and setters on pixel and external array
// elements.
return isolate->heap()->undefined_value();
@ -4688,6 +4779,9 @@ MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) {
switch (array->GetElementsKind()) {
case JSObject::FAST_ELEMENTS:
return UnionOfKeys(FixedArray::cast(array->elements()));
case JSObject::FAST_DOUBLE_ELEMENTS:
return UnionOfDoubleKeys(FixedDoubleArray::cast(array->elements()));
break;
case JSObject::DICTIONARY_ELEMENTS: {
NumberDictionary* dict = array->element_dictionary();
int size = dict->NumberOfElements();
@ -4722,7 +4816,6 @@ MaybeObject* FixedArray::AddKeysFromJSArray(JSArray* array) {
case JSObject::EXTERNAL_FLOAT_ELEMENTS:
case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
case JSObject::EXTERNAL_PIXEL_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
break;
}
UNREACHABLE();
@ -4784,6 +4877,69 @@ MaybeObject* FixedArray::UnionOfKeys(FixedArray* other) {
}
MaybeObject* FixedArray::UnionOfDoubleKeys(FixedDoubleArray* other) {
int len0 = length();
#ifdef DEBUG
if (FLAG_enable_slow_asserts) {
for (int i = 0; i < len0; i++) {
ASSERT(get(i)->IsString() || get(i)->IsNumber());
}
}
#endif
int len1 = other->length();
// Optimize if 'other' is empty.
// We cannot optimize if 'this' is empty, as other may have holes
// or non keys.
if (len1 == 0) return this;
// Compute how many elements are not in this.
int extra = 0;
Heap* heap = GetHeap();
Object* obj;
for (int y = 0; y < len1; y++) {
if (!other->is_the_hole(y)) {
MaybeObject* maybe_obj = heap->NumberFromDouble(other->get(y));
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
if (!HasKey(this, obj)) extra++;
}
}
if (extra == 0) return this;
// Allocate the result
{ MaybeObject* maybe_obj = GetHeap()->AllocateFixedArray(len0 + extra);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
// Fill in the content
FixedArray* result = FixedArray::cast(obj);
{
// Limit the scope of the AssertNoAllocation
AssertNoAllocation no_gc;
WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
for (int i = 0; i < len0; i++) {
Object* e = get(i);
ASSERT(e->IsString() || e->IsNumber());
result->set(i, e, mode);
}
}
// Fill in the extra keys.
int index = 0;
for (int y = 0; y < len1; y++) {
if (!other->is_the_hole(y)) {
MaybeObject* maybe_obj = heap->NumberFromDouble(other->get(y));
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
if (!HasKey(this, obj)) {
result->set(len0 + index, obj);
index++;
}
}
}
ASSERT(extra == index);
return result;
}
MaybeObject* FixedArray::CopySize(int new_length) {
Heap* heap = GetHeap();
if (new_length == 0) return heap->empty_fixed_array();
@ -7207,6 +7363,7 @@ const char* Code::Kind2String(Kind kind) {
case UNARY_OP_IC: return "UNARY_OP_IC";
case BINARY_OP_IC: return "BINARY_OP_IC";
case COMPARE_IC: return "COMPARE_IC";
case TO_BOOLEAN_IC: return "TO_BOOLEAN_IC";
}
UNREACHABLE();
return NULL;
@ -7539,9 +7696,10 @@ MaybeObject* JSObject::SetSlowElements(Object* len) {
switch (GetElementsKind()) {
case FAST_ELEMENTS: {
case FAST_DOUBLE_ELEMENTS:
// Make sure we never try to shrink dense arrays into sparse arrays.
ASSERT(static_cast<uint32_t>(FixedArray::cast(elements())->length()) <=
new_length);
ASSERT(static_cast<uint32_t>(
FixedArrayBase::cast(elements())->length()) <= new_length);
MaybeObject* result = NormalizeElements();
if (result->IsFailure()) return result;
@ -7570,7 +7728,6 @@ MaybeObject* JSObject::SetSlowElements(Object* len) {
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case EXTERNAL_PIXEL_ELEMENTS:
case FAST_DOUBLE_ELEMENTS:
UNREACHABLE();
break;
}
@ -7685,8 +7842,7 @@ MaybeObject* JSObject::SetElementsLength(Object* len) {
}
int min = NewElementsCapacity(old_capacity);
int new_capacity = value > min ? value : min;
if (new_capacity <= kMaxFastElementsLength ||
!ShouldConvertToSlowElements(new_capacity)) {
if (!ShouldConvertToSlowElements(new_capacity)) {
MaybeObject* result;
if (GetElementsKind() == FAST_ELEMENTS) {
result = SetFastElementsCapacityAndLength(new_capacity, value);
@ -7912,6 +8068,17 @@ bool JSObject::HasElementPostInterceptor(JSReceiver* receiver, uint32_t index) {
}
break;
}
case FAST_DOUBLE_ELEMENTS: {
uint32_t length = IsJSArray() ?
static_cast<uint32_t>
(Smi::cast(JSArray::cast(this)->length())->value()) :
static_cast<uint32_t>(FixedDoubleArray::cast(elements())->length());
if ((index < length) &&
!FixedDoubleArray::cast(elements())->is_the_hole(index)) {
return true;
}
break;
}
case EXTERNAL_PIXEL_ELEMENTS: {
ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
if (index < static_cast<uint32_t>(pixels->length())) {
@ -7926,8 +8093,7 @@ bool JSObject::HasElementPostInterceptor(JSReceiver* receiver, uint32_t index) {
case EXTERNAL_INT_ELEMENTS:
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS:
case FAST_DOUBLE_ELEMENTS: {
case EXTERNAL_DOUBLE_ELEMENTS: {
ExternalArray* array = ExternalArray::cast(elements());
if (index < static_cast<uint32_t>(array->length())) {
return true;
@ -8038,6 +8204,17 @@ JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
}
break;
}
case FAST_DOUBLE_ELEMENTS: {
uint32_t length = IsJSArray() ?
static_cast<uint32_t>
(Smi::cast(JSArray::cast(this)->length())->value()) :
static_cast<uint32_t>(FixedDoubleArray::cast(elements())->length());
if ((index < length) &&
!FixedDoubleArray::cast(elements())->is_the_hole(index)) {
return FAST_ELEMENT;
}
break;
}
case EXTERNAL_PIXEL_ELEMENTS: {
ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
if (index < static_cast<uint32_t>(pixels->length())) return FAST_ELEMENT;
@ -8055,9 +8232,6 @@ JSObject::LocalElementType JSObject::HasLocalElement(uint32_t index) {
if (index < static_cast<uint32_t>(array->length())) return FAST_ELEMENT;
break;
}
case FAST_DOUBLE_ELEMENTS:
UNREACHABLE();
break;
case DICTIONARY_ELEMENTS: {
if (element_dictionary()->FindEntry(index) !=
NumberDictionary::kNotFound) {
@ -8424,8 +8598,7 @@ MaybeObject* JSObject::SetFastElement(uint32_t index,
if ((index - length) < kMaxGap) {
// Try allocating extra space.
int new_capacity = NewElementsCapacity(index + 1);
if (new_capacity <= kMaxFastElementsLength ||
!ShouldConvertToSlowElements(new_capacity)) {
if (!ShouldConvertToSlowElements(new_capacity)) {
ASSERT(static_cast<uint32_t>(new_capacity) > index);
Object* new_elements;
MaybeObject* maybe =
@ -8533,7 +8706,7 @@ MaybeObject* JSObject::SetDictionaryElement(uint32_t index,
} else {
new_length = dictionary->max_number_key() + 1;
}
MaybeObject* result = ShouldConvertToFastDoubleElements()
MaybeObject* result = CanConvertToFastDoubleElements()
? SetFastDoubleElementsCapacityAndLength(new_length, new_length)
: SetFastElementsCapacityAndLength(new_length, new_length);
if (result->IsFailure()) return result;
@ -8607,8 +8780,7 @@ MUST_USE_RESULT MaybeObject* JSObject::SetFastDoubleElement(
if ((index - elms_length) < kMaxGap) {
// Try allocating extra space.
int new_capacity = NewElementsCapacity(index+1);
if (new_capacity <= kMaxFastElementsLength ||
!ShouldConvertToSlowElements(new_capacity)) {
if (!ShouldConvertToSlowElements(new_capacity)) {
ASSERT(static_cast<uint32_t>(new_capacity) > index);
Object* obj;
{ MaybeObject* maybe_obj =
@ -9076,7 +9248,15 @@ MaybeObject* JSObject::GetExternalElement(uint32_t index) {
bool JSObject::HasDenseElements() {
int capacity = 0;
int number_of_elements = 0;
int used = 0;
GetElementsCapacityAndUsage(&capacity, &used);
return (capacity == 0) || (used > (capacity / 2));
}
void JSObject::GetElementsCapacityAndUsage(int* capacity, int* used) {
*capacity = 0;
*used = 0;
FixedArrayBase* backing_store_base = FixedArrayBase::cast(elements());
FixedArray* backing_store = NULL;
@ -9087,34 +9267,33 @@ bool JSObject::HasDenseElements() {
backing_store = FixedArray::cast(backing_store_base);
if (backing_store->IsDictionary()) {
NumberDictionary* dictionary = NumberDictionary::cast(backing_store);
capacity = dictionary->Capacity();
number_of_elements = dictionary->NumberOfElements();
*capacity = dictionary->Capacity();
*used = dictionary->NumberOfElements();
break;
}
// Fall through.
case FAST_ELEMENTS:
backing_store = FixedArray::cast(backing_store_base);
capacity = backing_store->length();
for (int i = 0; i < capacity; ++i) {
if (!backing_store->get(i)->IsTheHole()) ++number_of_elements;
*capacity = backing_store->length();
for (int i = 0; i < *capacity; ++i) {
if (!backing_store->get(i)->IsTheHole()) ++(*used);
}
break;
case DICTIONARY_ELEMENTS: {
NumberDictionary* dictionary =
NumberDictionary::cast(FixedArray::cast(elements()));
capacity = dictionary->Capacity();
number_of_elements = dictionary->NumberOfElements();
*capacity = dictionary->Capacity();
*used = dictionary->NumberOfElements();
break;
}
case FAST_DOUBLE_ELEMENTS: {
FixedDoubleArray* elms = FixedDoubleArray::cast(elements());
capacity = elms->length();
for (int i = 0; i < capacity; i++) {
if (!elms->is_the_hole(i)) number_of_elements++;
*capacity = elms->length();
for (int i = 0; i < *capacity; i++) {
if (!elms->is_the_hole(i)) ++(*used);
}
break;
}
case EXTERNAL_PIXEL_ELEMENTS:
case EXTERNAL_BYTE_ELEMENTS:
case EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
case EXTERNAL_SHORT_ELEMENTS:
@ -9122,30 +9301,34 @@ bool JSObject::HasDenseElements() {
case EXTERNAL_INT_ELEMENTS:
case EXTERNAL_UNSIGNED_INT_ELEMENTS:
case EXTERNAL_FLOAT_ELEMENTS:
case EXTERNAL_DOUBLE_ELEMENTS: {
return true;
}
case EXTERNAL_DOUBLE_ELEMENTS:
case EXTERNAL_PIXEL_ELEMENTS:
// External arrays are considered 100% used.
ExternalArray* external_array = ExternalArray::cast(elements());
*capacity = external_array->length();
*used = external_array->length();
break;
}
return (capacity == 0) || (number_of_elements > (capacity / 2));
}
bool JSObject::ShouldConvertToSlowElements(int new_capacity) {
// Keep the array in fast case if the current backing storage is
// almost filled and if the new capacity is no more than twice the
// old capacity.
int elements_length = 0;
if (elements()->map() == GetHeap()->non_strict_arguments_elements_map()) {
FixedArray* backing_store = FixedArray::cast(elements());
elements_length = FixedArray::cast(backing_store->get(1))->length();
} else if (HasFastElements()) {
elements_length = FixedArray::cast(elements())->length();
} else if (HasFastDoubleElements()) {
elements_length = FixedDoubleArray::cast(elements())->length();
} else {
UNREACHABLE();
STATIC_ASSERT(kMaxUncheckedOldFastElementsLength <=
kMaxUncheckedFastElementsLength);
if (new_capacity <= kMaxUncheckedOldFastElementsLength ||
(new_capacity <= kMaxUncheckedFastElementsLength &&
GetHeap()->InNewSpace(this))) {
return false;
}
return !HasDenseElements() || ((new_capacity / 2) > elements_length);
// If the fast-case backing storage takes up roughly three times as
// much space (in machine words) as a dictionary backing storage
// would, the object should have slow elements.
int old_capacity = 0;
int used_elements = 0;
GetElementsCapacityAndUsage(&old_capacity, &used_elements);
int dictionary_size = NumberDictionary::ComputeCapacity(used_elements) *
NumberDictionary::kEntrySize;
return 3 * dictionary_size <= new_capacity;
}
@ -9168,20 +9351,21 @@ bool JSObject::ShouldConvertToFastElements() {
// dictionary, we cannot go back to fast case.
if (dictionary->requires_slow_elements()) return false;
// If the dictionary backing storage takes up roughly half as much
// space as a fast-case backing storage would the array should have
// fast elements.
uint32_t length = 0;
// space (in machine words) as a fast-case backing storage would,
// the object should have fast elements.
uint32_t array_size = 0;
if (IsJSArray()) {
CHECK(JSArray::cast(this)->length()->ToArrayIndex(&length));
CHECK(JSArray::cast(this)->length()->ToArrayIndex(&array_size));
} else {
length = dictionary->max_number_key();
array_size = dictionary->max_number_key();
}
return static_cast<uint32_t>(dictionary->Capacity()) >=
(length / (2 * NumberDictionary::kEntrySize));
uint32_t dictionary_size = static_cast<uint32_t>(dictionary->Capacity()) *
NumberDictionary::kEntrySize;
return 2 * dictionary_size >= array_size;
}
bool JSObject::ShouldConvertToFastDoubleElements() {
bool JSObject::CanConvertToFastDoubleElements() {
if (FLAG_unbox_double_arrays) {
ASSERT(HasDictionaryElements());
NumberDictionary* dictionary = NumberDictionary::cast(elements());
@ -9371,6 +9555,15 @@ bool JSObject::HasRealElementProperty(uint32_t index) {
return (index < length) &&
!FixedArray::cast(elements())->get(index)->IsTheHole();
}
case FAST_DOUBLE_ELEMENTS: {
uint32_t length = IsJSArray() ?
static_cast<uint32_t>(
Smi::cast(JSArray::cast(this)->length())->value()) :
static_cast<uint32_t>(FixedDoubleArray::cast(elements())->length());
return (index < length) &&
!FixedDoubleArray::cast(elements())->is_the_hole(index);
break;
}
case EXTERNAL_PIXEL_ELEMENTS: {
ExternalPixelArray* pixels = ExternalPixelArray::cast(elements());
return index < static_cast<uint32_t>(pixels->length());
@ -9386,9 +9579,6 @@ bool JSObject::HasRealElementProperty(uint32_t index) {
ExternalArray* array = ExternalArray::cast(elements());
return index < static_cast<uint32_t>(array->length());
}
case FAST_DOUBLE_ELEMENTS:
UNREACHABLE();
break;
case DICTIONARY_ELEMENTS: {
return element_dictionary()->FindEntry(index)
!= NumberDictionary::kNotFound;
@ -10106,11 +10296,8 @@ void HashTable<Shape, Key>::IterateElements(ObjectVisitor* v) {
template<typename Shape, typename Key>
MaybeObject* HashTable<Shape, Key>::Allocate(int at_least_space_for,
PretenureFlag pretenure) {
const int kMinCapacity = 32;
int capacity = RoundUpToPowerOf2(at_least_space_for * 2);
if (capacity < kMinCapacity) {
capacity = kMinCapacity; // Guarantee min capacity.
} else if (capacity > HashTable::kMaxCapacity) {
int capacity = ComputeCapacity(at_least_space_for);
if (capacity > HashTable::kMaxCapacity) {
return Failure::OutOfMemoryException();
}
@ -10278,6 +10465,8 @@ template class HashTable<CompilationCacheShape, HashTableKey*>;
template class HashTable<MapCacheShape, HashTableKey*>;
template class HashTable<ObjectHashTableShape, JSObject*>;
template class Dictionary<StringDictionaryShape, String*>;
template class Dictionary<NumberDictionaryShape, uint32_t>;
@ -10490,19 +10679,19 @@ MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
set_map(new_map);
set_elements(fast_elements);
} else {
} else if (!HasFastDoubleElements()) {
Object* obj;
{ MaybeObject* maybe_obj = EnsureWritableFastElements();
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
}
ASSERT(HasFastElements());
ASSERT(HasFastElements() || HasFastDoubleElements());
// Collect holes at the end, undefined before that and the rest at the
// start, and return the number of non-hole, non-undefined values.
FixedArray* elements = FixedArray::cast(this->elements());
uint32_t elements_length = static_cast<uint32_t>(elements->length());
FixedArrayBase* elements_base = FixedArrayBase::cast(this->elements());
uint32_t elements_length = static_cast<uint32_t>(elements_base->length());
if (limit > elements_length) {
limit = elements_length ;
}
@ -10521,47 +10710,78 @@ MaybeObject* JSObject::PrepareElementsForSort(uint32_t limit) {
result_double = HeapNumber::cast(new_double);
}
AssertNoAllocation no_alloc;
// Split elements into defined, undefined and the_hole, in that order.
// Only count locations for undefined and the hole, and fill them afterwards.
WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_alloc);
unsigned int undefs = limit;
unsigned int holes = limit;
// Assume most arrays contain no holes and undefined values, so minimize the
// number of stores of non-undefined, non-the-hole values.
for (unsigned int i = 0; i < undefs; i++) {
Object* current = elements->get(i);
if (current->IsTheHole()) {
holes--;
undefs--;
} else if (current->IsUndefined()) {
undefs--;
} else {
continue;
uint32_t result = 0;
if (elements_base->map() == heap->fixed_double_array_map()) {
FixedDoubleArray* elements = FixedDoubleArray::cast(elements_base);
// Split elements into defined and the_hole, in that order.
unsigned int holes = limit;
// Assume most arrays contain no holes and undefined values, so minimize the
// number of stores of non-undefined, non-the-hole values.
for (unsigned int i = 0; i < holes; i++) {
if (elements->is_the_hole(i)) {
holes--;
} else {
continue;
}
// Position i needs to be filled.
while (holes > i) {
if (elements->is_the_hole(holes)) {
holes--;
} else {
elements->set(i, elements->get(holes));
break;
}
}
}
// Position i needs to be filled.
while (undefs > i) {
current = elements->get(undefs);
result = holes;
while (holes < limit) {
elements->set_the_hole(holes);
holes++;
}
} else {
FixedArray* elements = FixedArray::cast(elements_base);
AssertNoAllocation no_alloc;
// Split elements into defined, undefined and the_hole, in that order. Only
// count locations for undefined and the hole, and fill them afterwards.
WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_alloc);
unsigned int undefs = limit;
unsigned int holes = limit;
// Assume most arrays contain no holes and undefined values, so minimize the
// number of stores of non-undefined, non-the-hole values.
for (unsigned int i = 0; i < undefs; i++) {
Object* current = elements->get(i);
if (current->IsTheHole()) {
holes--;
undefs--;
} else if (current->IsUndefined()) {
undefs--;
} else {
elements->set(i, current, write_barrier);
break;
continue;
}
// Position i needs to be filled.
while (undefs > i) {
current = elements->get(undefs);
if (current->IsTheHole()) {
holes--;
undefs--;
} else if (current->IsUndefined()) {
undefs--;
} else {
elements->set(i, current, write_barrier);
break;
}
}
}
}
uint32_t result = undefs;
while (undefs < holes) {
elements->set_undefined(undefs);
undefs++;
}
while (holes < limit) {
elements->set_the_hole(holes);
holes++;
result = undefs;
while (undefs < holes) {
elements->set_undefined(undefs);
undefs++;
}
while (holes < limit) {
elements->set_the_hole(holes);
holes++;
}
}
if (result <= static_cast<uint32_t>(Smi::kMaxValue)) {
@ -11589,6 +11809,64 @@ MaybeObject* StringDictionary::TransformPropertiesToFastFor(
}
Object* ObjectHashTable::Lookup(JSObject* key) {
// If the object does not have an identity hash, it was never used as a key.
MaybeObject* maybe_hash = key->GetIdentityHash(JSObject::OMIT_CREATION);
if (maybe_hash->IsFailure()) return GetHeap()->undefined_value();
int entry = FindEntry(key);
if (entry == kNotFound) return GetHeap()->undefined_value();
return get(EntryToIndex(entry) + 1);
}
MaybeObject* ObjectHashTable::Put(JSObject* key, Object* value) {
// Make sure the key object has an identity hash code.
int hash;
{ MaybeObject* maybe_hash = key->GetIdentityHash(JSObject::ALLOW_CREATION);
if (maybe_hash->IsFailure()) return maybe_hash;
hash = Smi::cast(maybe_hash->ToObjectUnchecked())->value();
}
int entry = FindEntry(key);
// Check whether to perform removal operation.
if (value->IsUndefined()) {
if (entry == kNotFound) return this;
RemoveEntry(entry);
return Shrink(key);
}
// Key is already in table, just overwrite value.
if (entry != kNotFound) {
set(EntryToIndex(entry) + 1, value);
return this;
}
// Check whether the hash table should be extended.
Object* obj;
{ MaybeObject* maybe_obj = EnsureCapacity(1, key);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
ObjectHashTable* table = ObjectHashTable::cast(obj);
table->AddEntry(table->FindInsertionEntry(hash), key, value);
return table;
}
void ObjectHashTable::AddEntry(int entry, JSObject* key, Object* value) {
set(EntryToIndex(entry), key);
set(EntryToIndex(entry) + 1, value);
ElementAdded();
}
void ObjectHashTable::RemoveEntry(int entry) {
Object* null_value = GetHeap()->null_value();
set(EntryToIndex(entry), null_value);
set(EntryToIndex(entry) + 1, null_value);
ElementRemoved();
}
#ifdef ENABLE_DEBUGGER_SUPPORT
// Check if there is a break point at this code position.
bool DebugInfo::HasBreakPoint(int code_position) {

116
deps/v8/src/objects.h

@ -790,6 +790,8 @@ class Object : public MaybeObject {
STRUCT_LIST(DECLARE_STRUCT_PREDICATE)
#undef DECLARE_STRUCT_PREDICATE
INLINE(bool IsSpecObject());
// Oddball testing.
INLINE(bool IsUndefined());
INLINE(bool IsNull());
@ -1636,6 +1638,23 @@ class JSObject: public JSReceiver {
MUST_USE_RESULT inline MaybeObject* SetHiddenPropertiesObject(
Object* hidden_obj);
// Indicates whether the hidden properties object should be created.
enum HiddenPropertiesFlag { ALLOW_CREATION, OMIT_CREATION };
// Retrieves the hidden properties object.
//
// The undefined value might be returned in case no hidden properties object
// is present and creation was omitted.
inline bool HasHiddenProperties();
MUST_USE_RESULT MaybeObject* GetHiddenProperties(HiddenPropertiesFlag flag);
// Retrieves a permanent object identity hash code.
//
// The identity hash is stored as a hidden property. The undefined value might
// be returned in case no hidden properties object is present and creation was
// omitted.
MUST_USE_RESULT MaybeObject* GetIdentityHash(HiddenPropertiesFlag flag);
MUST_USE_RESULT MaybeObject* DeleteProperty(String* name, DeleteMode mode);
MUST_USE_RESULT MaybeObject* DeleteElement(uint32_t index, DeleteMode mode);
@ -1652,7 +1671,7 @@ class JSObject: public JSReceiver {
bool ShouldConvertToFastElements();
// Returns true if the elements of JSObject contains only values that can be
// represented in a FixedDoubleArray.
bool ShouldConvertToFastDoubleElements();
bool CanConvertToFastDoubleElements();
// Tells whether the index'th element is present.
inline bool HasElement(uint32_t index);
@ -1946,8 +1965,21 @@ class JSObject: public JSReceiver {
// Also maximal value of JSArray's length property.
static const uint32_t kMaxElementCount = 0xffffffffu;
// Constants for heuristics controlling conversion of fast elements
// to slow elements.
// Maximal gap that can be introduced by adding an element beyond
// the current elements length.
static const uint32_t kMaxGap = 1024;
static const int kMaxFastElementsLength = 5000;
// Maximal length of fast elements array that won't be checked for
// being dense enough on expansion.
static const int kMaxUncheckedFastElementsLength = 5000;
// Same as above but for old arrays. This limit is more strict. We
// don't want to be wasteful with long lived objects.
static const int kMaxUncheckedOldFastElementsLength = 500;
static const int kInitialMaxFastElementArray = 100000;
static const int kMaxFastProperties = 12;
static const int kMaxInstanceSize = 255 * kPointerSize;
@ -2013,6 +2045,9 @@ class JSObject: public JSReceiver {
// Returns true if most of the elements backing storage is used.
bool HasDenseElements();
// Gets the current elements capacity and the number of used elements.
void GetElementsCapacityAndUsage(int* capacity, int* used);
bool CanSetCallback(String* name);
MUST_USE_RESULT MaybeObject* SetElementCallback(
uint32_t index,
@ -2048,6 +2083,7 @@ class FixedArrayBase: public HeapObject {
static const int kHeaderSize = kLengthOffset + kPointerSize;
};
class FixedDoubleArray;
// FixedArray describes fixed-sized arrays with element type Object*.
class FixedArray: public FixedArrayBase {
@ -2090,6 +2126,10 @@ class FixedArray: public FixedArrayBase {
// Compute the union of this and other.
MUST_USE_RESULT MaybeObject* UnionOfKeys(FixedArray* other);
// Compute the union of this and other.
MUST_USE_RESULT MaybeObject* UnionOfDoubleKeys(
FixedDoubleArray* other);
// Copy a sub array from the receiver to dest.
void CopyTo(int pos, FixedArray* dest, int dest_pos, int len);
@ -2484,6 +2524,10 @@ class HashTable: public FixedArray {
int at_least_space_for,
PretenureFlag pretenure = NOT_TENURED);
// Computes the required capacity for a table holding the given
// number of elements. May be more than HashTable::kMaxCapacity.
static int ComputeCapacity(int at_least_space_for);
// Returns the key at entry.
Object* KeyAt(int entry) { return get(EntryToIndex(entry)); }
@ -2906,6 +2950,40 @@ class NumberDictionary: public Dictionary<NumberDictionaryShape, uint32_t> {
};
class ObjectHashTableShape {
public:
static inline bool IsMatch(JSObject* key, Object* other);
static inline uint32_t Hash(JSObject* key);
static inline uint32_t HashForObject(JSObject* key, Object* object);
MUST_USE_RESULT static inline MaybeObject* AsObject(JSObject* key);
static const int kPrefixSize = 0;
static const int kEntrySize = 2;
};
// ObjectHashTable maps keys that are JavaScript objects to object values by
// using the identity hash of the key for hashing purposes.
class ObjectHashTable: public HashTable<ObjectHashTableShape, JSObject*> {
public:
static inline ObjectHashTable* cast(Object* obj) {
ASSERT(obj->IsHashTable());
return reinterpret_cast<ObjectHashTable*>(obj);
}
// Looks up the value associated with the given key. The undefined value is
// returned in case the key is not present.
Object* Lookup(JSObject* key);
// Adds (or overwrites) the value associated with the given key. Mapping a
// key to the undefined value causes removal of the whole entry.
MUST_USE_RESULT MaybeObject* Put(JSObject* key, Object* value);
private:
void AddEntry(int entry, JSObject* key, Object* value);
void RemoveEntry(int entry);
};
// JSFunctionResultCache caches results of some JSFunction invocation.
// It is a fixed array with fixed structure:
// [0]: factory function
@ -3480,13 +3558,14 @@ class Code: public HeapObject {
UNARY_OP_IC,
BINARY_OP_IC,
COMPARE_IC,
TO_BOOLEAN_IC,
// No more than 16 kinds. The value currently encoded in four bits in
// Flags.
// Pseudo-kinds.
REGEXP = BUILTIN,
FIRST_IC_KIND = LOAD_IC,
LAST_IC_KIND = COMPARE_IC
LAST_IC_KIND = TO_BOOLEAN_IC
};
enum {
@ -3552,13 +3631,10 @@ class Code: public HeapObject {
inline bool is_keyed_store_stub() { return kind() == KEYED_STORE_IC; }
inline bool is_call_stub() { return kind() == CALL_IC; }
inline bool is_keyed_call_stub() { return kind() == KEYED_CALL_IC; }
inline bool is_unary_op_stub() {
return kind() == UNARY_OP_IC;
}
inline bool is_binary_op_stub() {
return kind() == BINARY_OP_IC;
}
inline bool is_unary_op_stub() { return kind() == UNARY_OP_IC; }
inline bool is_binary_op_stub() { return kind() == BINARY_OP_IC; }
inline bool is_compare_ic_stub() { return kind() == COMPARE_IC; }
inline bool is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
// [major_key]: For kind STUB or BINARY_OP_IC, the major key.
inline int major_key();
@ -3600,21 +3676,24 @@ class Code: public HeapObject {
inline CheckType check_type();
inline void set_check_type(CheckType value);
// [type-recording unary op type]: For all UNARY_OP_IC.
// [type-recording unary op type]: For kind UNARY_OP_IC.
inline byte unary_op_type();
inline void set_unary_op_type(byte value);
// [type-recording binary op type]: For all TYPE_RECORDING_BINARY_OP_IC.
// [type-recording binary op type]: For kind BINARY_OP_IC.
inline byte binary_op_type();
inline void set_binary_op_type(byte value);
inline byte binary_op_result_type();
inline void set_binary_op_result_type(byte value);
// [compare state]: For kind compare IC stubs, tells what state the
// stub is in.
// [compare state]: For kind COMPARE_IC, tells what state the stub is in.
inline byte compare_state();
inline void set_compare_state(byte value);
// [to_boolean_foo]: For kind TO_BOOLEAN_IC tells what state the stub is in.
inline byte to_boolean_state();
inline void set_to_boolean_state(byte value);
// Get the safepoint entry for the given pc.
SafepointEntry GetSafepointEntry(Address pc);
@ -3756,9 +3835,10 @@ class Code: public HeapObject {
static const int kStackSlotsOffset = kKindSpecificFlagsOffset;
static const int kCheckTypeOffset = kKindSpecificFlagsOffset;
static const int kCompareStateOffset = kStubMajorKeyOffset + 1;
static const int kUnaryOpTypeOffset = kStubMajorKeyOffset + 1;
static const int kBinaryOpTypeOffset = kStubMajorKeyOffset + 1;
static const int kCompareStateOffset = kStubMajorKeyOffset + 1;
static const int kToBooleanTypeOffset = kStubMajorKeyOffset + 1;
static const int kHasDeoptimizationSupportOffset = kOptimizableOffset + 1;
static const int kBinaryOpReturnTypeOffset = kBinaryOpTypeOffset + 1;
@ -5847,6 +5927,8 @@ class String: public HeapObject {
StringPrint(stdout);
}
void StringPrint(FILE* out);
char* ToAsciiArray();
#endif
#ifdef DEBUG
void StringVerify();
@ -6837,7 +6919,6 @@ class FunctionTemplateInfo: public TemplateInfo {
DECL_ACCESSORS(instance_call_handler, Object)
DECL_ACCESSORS(access_check_info, Object)
DECL_ACCESSORS(flag, Smi)
DECL_ACCESSORS(prototype_attributes, Smi)
// Following properties use flag bits.
DECL_BOOLEAN_ACCESSORS(hidden_prototype)
@ -6845,6 +6926,7 @@ class FunctionTemplateInfo: public TemplateInfo {
// If the bit is set, object instances created by this function
// requires access check.
DECL_BOOLEAN_ACCESSORS(needs_access_check)
DECL_BOOLEAN_ACCESSORS(read_only_prototype)
static inline FunctionTemplateInfo* cast(Object* obj);
@ -6877,14 +6959,14 @@ class FunctionTemplateInfo: public TemplateInfo {
static const int kAccessCheckInfoOffset =
kInstanceCallHandlerOffset + kPointerSize;
static const int kFlagOffset = kAccessCheckInfoOffset + kPointerSize;
static const int kPrototypeAttributesOffset = kFlagOffset + kPointerSize;
static const int kSize = kPrototypeAttributesOffset + kPointerSize;
static const int kSize = kFlagOffset + kPointerSize;
private:
// Bit position in the flag, from least significant bit position.
static const int kHiddenPrototypeBit = 0;
static const int kUndetectableBit = 1;
static const int kNeedsAccessCheckBit = 2;
static const int kReadOnlyPrototypeBit = 3;
DISALLOW_IMPLICIT_CONSTRUCTORS(FunctionTemplateInfo);
};

58
deps/v8/src/parser.cc

@ -2755,7 +2755,7 @@ Expression* Parser::ParseLeftHandSideExpression(bool* ok) {
Handle<String> name = callee->name();
Variable* var = top_scope_->Lookup(name);
if (var == NULL) {
top_scope_->RecordEvalCall();
top_scope_->DeclarationScope()->RecordEvalCall();
}
}
result = NewCall(result, args, pos);
@ -3641,7 +3641,10 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
}
int num_parameters = 0;
Scope* scope = NewScope(top_scope_, Scope::FUNCTION_SCOPE, inside_with());
// Function declarations are hoisted.
Scope* scope = (type == DECLARATION)
? NewScope(top_scope_->DeclarationScope(), Scope::FUNCTION_SCOPE, false)
: NewScope(top_scope_, Scope::FUNCTION_SCOPE, inside_with());
ZoneList<Statement*>* body = new(zone()) ZoneList<Statement*>(8);
int materialized_literal_count;
int expected_property_count;
@ -3715,36 +3718,43 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
RelocInfo::kNoPosition)));
}
// Determine if the function will be lazily compiled. The mode can
// only be PARSE_LAZILY if the --lazy flag is true.
// Determine if the function will be lazily compiled. The mode can only
// be PARSE_LAZILY if the --lazy flag is true. We will not lazily
// compile if we do not have preparser data for the function.
bool is_lazily_compiled = (mode() == PARSE_LAZILY &&
top_scope_->outer_scope()->is_global_scope() &&
top_scope_->HasTrivialOuterContext() &&
!parenthesized_function_);
!parenthesized_function_ &&
pre_data() != NULL);
parenthesized_function_ = false; // The bit was set for this function only.
int function_block_pos = scanner().location().beg_pos;
if (is_lazily_compiled && pre_data() != NULL) {
if (is_lazily_compiled) {
int function_block_pos = scanner().location().beg_pos;
FunctionEntry entry = pre_data()->GetFunctionEntry(function_block_pos);
if (!entry.is_valid()) {
ReportInvalidPreparseData(name, CHECK_OK);
}
end_pos = entry.end_pos();
if (end_pos <= function_block_pos) {
// End position greater than end of stream is safe, and hard to check.
ReportInvalidPreparseData(name, CHECK_OK);
// There is no preparser data for the function, we will not lazily
// compile after all.
is_lazily_compiled = false;
} else {
end_pos = entry.end_pos();
if (end_pos <= function_block_pos) {
// End position greater than end of stream is safe, and hard to check.
ReportInvalidPreparseData(name, CHECK_OK);
}
isolate()->counters()->total_preparse_skipped()->Increment(
end_pos - function_block_pos);
// Seek to position just before terminal '}'.
scanner().SeekForward(end_pos - 1);
materialized_literal_count = entry.literal_count();
expected_property_count = entry.property_count();
if (entry.strict_mode()) top_scope_->EnableStrictMode();
only_simple_this_property_assignments = false;
this_property_assignments = isolate()->factory()->empty_fixed_array();
Expect(Token::RBRACE, CHECK_OK);
}
isolate()->counters()->total_preparse_skipped()->Increment(
end_pos - function_block_pos);
// Seek to position just before terminal '}'.
scanner().SeekForward(end_pos - 1);
materialized_literal_count = entry.literal_count();
expected_property_count = entry.property_count();
if (entry.strict_mode()) top_scope_->EnableStrictMode();
only_simple_this_property_assignments = false;
this_property_assignments = isolate()->factory()->empty_fixed_array();
Expect(Token::RBRACE, CHECK_OK);
} else {
}
if (!is_lazily_compiled) {
ParseSourceElements(body, Token::RBRACE, CHECK_OK);
materialized_literal_count = lexical_scope.materialized_literal_count();

6
deps/v8/src/platform-cygwin.cc

@ -166,6 +166,12 @@ void OS::Free(void* address, const size_t size) {
}
void OS::ProtectCode(void* address, const size_t size) {
DWORD old_protect;
VirtualProtect(address, size, PAGE_EXECUTE_READ, &old_protect);
}
void OS::Guard(void* address, const size_t size) {
DWORD oldprotect;
VirtualProtect(address, size, PAGE_READONLY | PAGE_GUARD, &oldprotect);

38
deps/v8/src/platform-linux.cc

@ -78,13 +78,33 @@ double ceiling(double x) {
static Mutex* limit_mutex = NULL;
static void* GetRandomMmapAddr() {
Isolate* isolate = Isolate::UncheckedCurrent();
// Note that the current isolate isn't set up in a call path via
// CpuFeatures::Probe. We don't care about randomization in this case because
// the code page is immediately freed.
if (isolate != NULL) {
#ifdef V8_TARGET_ARCH_X64
uint64_t rnd1 = V8::RandomPrivate(isolate);
uint64_t rnd2 = V8::RandomPrivate(isolate);
uint64_t raw_addr = (rnd1 << 32) ^ rnd2;
raw_addr &= V8_UINT64_C(0x3ffffffff000);
#else
uint32_t raw_addr = V8::RandomPrivate(isolate);
// The range 0x20000000 - 0x60000000 is relatively unpopulated across a
// variety of ASLR modes (PAE kernel, NX compat mode, etc).
raw_addr &= 0x3ffff000;
raw_addr += 0x20000000;
#endif
return reinterpret_cast<void*>(raw_addr);
}
return NULL;
}
void OS::Setup() {
// Seed the random number generator.
// Convert the current time to a 64-bit integer first, before converting it
// to an unsigned. Going directly can cause an overflow and the seed to be
// set to all ones. The seed will be identical for different instances that
// call this setup code within the same millisecond.
uint64_t seed = static_cast<uint64_t>(TimeCurrentMillis());
// Seed the random number generator. We preserve microsecond resolution.
uint64_t seed = Ticks() ^ (getpid() << 16);
srandom(static_cast<unsigned int>(seed));
limit_mutex = CreateMutex();
@ -367,10 +387,10 @@ size_t OS::AllocateAlignment() {
void* OS::Allocate(const size_t requested,
size_t* allocated,
bool is_executable) {
// TODO(805): Port randomization of allocated executable memory to Linux.
const size_t msize = RoundUp(requested, sysconf(_SC_PAGESIZE));
int prot = PROT_READ | PROT_WRITE | (is_executable ? PROT_EXEC : 0);
void* mbase = mmap(NULL, msize, prot, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
void* addr = GetRandomMmapAddr();
void* mbase = mmap(addr, msize, prot, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
if (mbase == MAP_FAILED) {
LOG(i::Isolate::Current(),
StringEvent("OS::Allocate", "mmap failed"));
@ -586,7 +606,7 @@ static const int kMmapFdOffset = 0;
VirtualMemory::VirtualMemory(size_t size) {
address_ = mmap(NULL, size, PROT_NONE,
address_ = mmap(GetRandomMmapAddr(), size, PROT_NONE,
MAP_PRIVATE | MAP_ANONYMOUS | MAP_NORESERVE,
kMmapFd, kMmapFdOffset);
size_ = size;

6
deps/v8/src/platform-posix.cc

@ -71,6 +71,12 @@ intptr_t OS::MaxVirtualMemory() {
#ifndef __CYGWIN__
// Get rid of writable permission on code allocations.
void OS::ProtectCode(void* address, const size_t size) {
mprotect(address, size, PROT_READ | PROT_EXEC);
}
// Create guard pages.
void OS::Guard(void* address, const size_t size) {
mprotect(address, size, PROT_NONE);

6
deps/v8/src/platform-win32.cc

@ -957,6 +957,12 @@ void OS::Free(void* address, const size_t size) {
}
void OS::ProtectCode(void* address, const size_t size) {
DWORD old_protect;
VirtualProtect(address, size, PAGE_EXECUTE_READ, &old_protect);
}
void OS::Guard(void* address, const size_t size) {
DWORD oldprotect;
VirtualProtect(address, size, PAGE_READONLY | PAGE_GUARD, &oldprotect);

3
deps/v8/src/platform.h

@ -207,6 +207,9 @@ class OS {
bool is_executable);
static void Free(void* address, const size_t size);
// Mark code segments non-writable.
static void ProtectCode(void* address, const size_t size);
// Assign memory as a guard page so that access will cause an exception.
static void Guard(void* address, const size_t size);

4
deps/v8/src/proxy.js

@ -136,6 +136,10 @@ function DerivedHasTrap(name) {
return !!this.getPropertyDescriptor(name)
}
function DerivedHasOwnTrap(name) {
return !!this.getOwnPropertyDescriptor(name)
}
function DerivedKeysTrap() {
var names = this.getOwnPropertyNames()
var enumerableNames = []

88
deps/v8/src/runtime.cc

@ -219,8 +219,20 @@ MUST_USE_RESULT static MaybeObject* DeepCopyBoilerplate(Isolate* isolate,
}
break;
}
default:
UNREACHABLE();
case JSObject::NON_STRICT_ARGUMENTS_ELEMENTS:
UNIMPLEMENTED();
break;
case JSObject::EXTERNAL_PIXEL_ELEMENTS:
case JSObject::EXTERNAL_BYTE_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_BYTE_ELEMENTS:
case JSObject::EXTERNAL_SHORT_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_SHORT_ELEMENTS:
case JSObject::EXTERNAL_INT_ELEMENTS:
case JSObject::EXTERNAL_UNSIGNED_INT_ELEMENTS:
case JSObject::EXTERNAL_FLOAT_ELEMENTS:
case JSObject::EXTERNAL_DOUBLE_ELEMENTS:
case JSObject::FAST_DOUBLE_ELEMENTS:
// No contained objects, nothing to do.
break;
}
return copy;
@ -619,7 +631,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_Fix) {
ASSERT(args.length() == 1);
CONVERT_CHECKED(JSProxy, proxy, args[0]);
proxy->Fix();
return proxy;
return isolate->heap()->undefined_value();
}
@ -1666,7 +1678,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpExec) {
RUNTIME_FUNCTION(MaybeObject*, Runtime_RegExpConstructResult) {
ASSERT(args.length() == 3);
CONVERT_SMI_ARG_CHECKED(elements_count, 0);
if (elements_count > JSArray::kMaxFastElementsLength) {
if (elements_count < 0 ||
elements_count > FixedArray::kMaxLength ||
!Smi::IsValid(elements_count)) {
return isolate->ThrowIllegalOperation();
}
Object* new_object;
@ -1968,6 +1982,61 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetPrototype) {
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionSetReadOnlyPrototype) {
NoHandleAllocation ha;
RUNTIME_ASSERT(args.length() == 1);
CONVERT_CHECKED(JSFunction, function, args[0]);
MaybeObject* maybe_name =
isolate->heap()->AllocateStringFromAscii(CStrVector("prototype"));
String* name;
if (!maybe_name->To(&name)) return maybe_name;
if (function->HasFastProperties()) {
// Construct a new field descriptor with updated attributes.
DescriptorArray* instance_desc = function->map()->instance_descriptors();
int index = instance_desc->Search(name);
ASSERT(index != DescriptorArray::kNotFound);
PropertyDetails details(instance_desc->GetDetails(index));
CallbacksDescriptor new_desc(name,
instance_desc->GetValue(index),
static_cast<PropertyAttributes>(details.attributes() | READ_ONLY),
details.index());
// Construct a new field descriptors array containing the new descriptor.
Object* descriptors_unchecked;
{ MaybeObject* maybe_descriptors_unchecked =
instance_desc->CopyInsert(&new_desc, REMOVE_TRANSITIONS);
if (!maybe_descriptors_unchecked->ToObject(&descriptors_unchecked)) {
return maybe_descriptors_unchecked;
}
}
DescriptorArray* new_descriptors =
DescriptorArray::cast(descriptors_unchecked);
// Create a new map featuring the new field descriptors array.
Object* map_unchecked;
{ MaybeObject* maybe_map_unchecked = function->map()->CopyDropDescriptors();
if (!maybe_map_unchecked->ToObject(&map_unchecked)) {
return maybe_map_unchecked;
}
}
Map* new_map = Map::cast(map_unchecked);
new_map->set_instance_descriptors(new_descriptors);
function->set_map(new_map);
} else { // Dictionary properties.
// Directly manipulate the property details.
int entry = function->property_dictionary()->FindEntry(name);
ASSERT(entry != StringDictionary::kNotFound);
PropertyDetails details = function->property_dictionary()->DetailsAt(entry);
PropertyDetails new_details(
static_cast<PropertyAttributes>(details.attributes() | READ_ONLY),
details.type(),
details.index());
function->property_dictionary()->DetailsAtPut(entry, new_details);
}
return function;
}
RUNTIME_FUNCTION(MaybeObject*, Runtime_FunctionIsAPIFunction) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
@ -4463,7 +4532,7 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetLocalPropertyNames) {
for (int i = 0; i < length; i++) {
jsproto->GetLocalPropertyNames(*names,
i == 0 ? 0 : local_property_count[i - 1]);
if (!GetHiddenProperties(jsproto, false)->IsUndefined()) {
if (jsproto->HasHiddenProperties()) {
proto_with_hidden_properties++;
}
if (i < length - 1) {
@ -9521,6 +9590,9 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_MoveArrayContents) {
if (new_elements->map() == isolate->heap()->fixed_array_map() ||
new_elements->map() == isolate->heap()->fixed_cow_array_map()) {
maybe_new_map = to->map()->GetFastElementsMap();
} else if (new_elements->map() ==
isolate->heap()->fixed_double_array_map()) {
maybe_new_map = to->map()->GetFastDoubleElementsMap();
} else {
maybe_new_map = to->map()->GetSlowElementsMap();
}
@ -9608,12 +9680,13 @@ RUNTIME_FUNCTION(MaybeObject*, Runtime_GetArrayKeys) {
}
return *isolate->factory()->NewJSArrayWithElements(keys);
} else {
ASSERT(array->HasFastElements());
ASSERT(array->HasFastElements() || array->HasFastDoubleElements());
Handle<FixedArray> single_interval = isolate->factory()->NewFixedArray(2);
// -1 means start of array.
single_interval->set(0, Smi::FromInt(-1));
FixedArrayBase* elements = FixedArrayBase::cast(array->elements());
uint32_t actual_length =
static_cast<uint32_t>(FixedArray::cast(array->elements())->length());
static_cast<uint32_t>(elements->length());
uint32_t min_length = actual_length < length ? actual_length : length;
Handle<Object> length_object =
isolate->factory()->NewNumber(static_cast<double>(min_length));
@ -11192,7 +11265,6 @@ static Handle<Object> GetArgumentsObject(Isolate* isolate,
if (sinfo->number_of_stack_slots() > 0) {
index = scope_info->StackSlotIndex(isolate->heap()->arguments_symbol());
if (index != -1) {
CHECK(false);
return Handle<Object>(frame->GetExpression(index), isolate);
}
}

1
deps/v8/src/runtime.h

@ -210,6 +210,7 @@ namespace internal {
F(FunctionSetInstanceClassName, 2, 1) \
F(FunctionSetLength, 2, 1) \
F(FunctionSetPrototype, 2, 1) \
F(FunctionSetReadOnlyPrototype, 1, 1) \
F(FunctionGetName, 1, 1) \
F(FunctionSetName, 2, 1) \
F(FunctionSetBound, 1, 1) \

16
deps/v8/src/scopes.h

@ -357,11 +357,17 @@ class Scope: public ZoneObject {
// Illegal redeclaration.
Expression* illegal_redecl_;
// Scope-specific information.
bool scope_inside_with_; // this scope is inside a 'with' of some outer scope
bool scope_contains_with_; // this scope contains a 'with' statement
bool scope_calls_eval_; // this scope contains an 'eval' call
bool strict_mode_; // this scope is a strict mode scope
// Scope-specific information computed during parsing.
//
// This scope is inside a 'with' of some outer scope.
bool scope_inside_with_;
// This scope contains a 'with' statement.
bool scope_contains_with_;
// This scope or a nested catch scope or with scope contain an 'eval' call. At
// the 'eval' call site this scope is the declaration scope.
bool scope_calls_eval_;
// This scope is a strict mode scope.
bool strict_mode_;
// Computed via PropagateScopeInfo.
bool outer_scope_calls_eval_;

1
deps/v8/src/spaces.cc

@ -1542,6 +1542,7 @@ static void ReportCodeKindStatistics() {
CASE(UNARY_OP_IC);
CASE(BINARY_OP_IC);
CASE(COMPARE_IC);
CASE(TO_BOOLEAN_IC);
}
}

2790
deps/v8/src/third_party/valgrind/valgrind.h

File diff suppressed because it is too large

7
deps/v8/src/type-info.cc

@ -439,6 +439,12 @@ void TypeFeedbackOracle::CollectKeyedReceiverTypes(
}
byte TypeFeedbackOracle::ToBooleanTypes(unsigned ast_id) {
Handle<Object> object = GetInfo(ast_id);
return object->IsCode() ? Handle<Code>::cast(object)->to_boolean_state() : 0;
}
// Things are a bit tricky here: The iterator for the RelocInfos and the infos
// themselves are not GC-safe, so we first get all infos, then we create the
// dictionary (possibly triggering GC), and finally we relocate the collected
@ -523,6 +529,7 @@ void TypeFeedbackOracle::ProcessTarget(unsigned ast_id, Code* target) {
case Code::UNARY_OP_IC:
case Code::BINARY_OP_IC:
case Code::COMPARE_IC:
case Code::TO_BOOLEAN_IC:
SetInfo(ast_id, target);
break;

5
deps/v8/src/type-info.h

@ -238,6 +238,11 @@ class TypeFeedbackOracle BASE_EMBEDDED {
bool LoadIsBuiltin(Property* expr, Builtins::Name id);
// TODO(1571) We can't use ToBooleanStub::Types as the return value because
// of various cylces in our headers. Death to tons of implementations in
// headers!! :-P
byte ToBooleanTypes(unsigned ast_id);
// Get type information for arithmetic operations and compares.
TypeInfo UnaryType(UnaryOperation* expr);
TypeInfo BinaryType(BinaryOperation* expr);

25
deps/v8/src/utils.h

@ -30,6 +30,7 @@
#include <stdlib.h>
#include <string.h>
#include <climits>
#include "globals.h"
#include "checks.h"
@ -885,6 +886,30 @@ class SimpleStringBuilder {
DISALLOW_IMPLICIT_CONSTRUCTORS(SimpleStringBuilder);
};
// A poor man's version of STL's bitset: A bit set of enums E (without explicit
// values), fitting into an integral type T.
template <class E, class T = int>
class EnumSet {
public:
explicit EnumSet(T bits = 0) : bits_(bits) {}
bool IsEmpty() const { return bits_ == 0; }
bool Contains(E element) const { return (bits_ & Mask(element)) != 0; }
void Add(E element) { bits_ |= Mask(element); }
void Remove(E element) { bits_ &= ~Mask(element); }
T ToIntegral() const { return bits_; }
private:
T Mask(E element) const {
// The strange typing in ASSERT is necessary to avoid stupid warnings, see:
// http://gcc.gnu.org/bugzilla/show_bug.cgi?id=43680
ASSERT(element < static_cast<int>(sizeof(T) * CHAR_BIT));
return 1 << element;
}
T bits_;
};
} } // namespace v8::internal
#endif // V8_UTILS_H_

103
deps/v8/src/v8natives.js

@ -157,17 +157,6 @@ function GlobalEval(x) {
}
// execScript for IE compatibility.
function GlobalExecScript(expr, lang) {
// NOTE: We don't care about the character casing.
if (!lang || /javascript/i.test(lang)) {
var f = %CompileString(ToString(expr));
f.call(%GlobalReceiver(global));
}
return null;
}
// ----------------------------------------------------------------------------
@ -187,8 +176,7 @@ function SetupGlobal() {
"isFinite", GlobalIsFinite,
"parseInt", GlobalParseInt,
"parseFloat", GlobalParseFloat,
"eval", GlobalEval,
"execScript", GlobalExecScript
"eval", GlobalEval
));
}
@ -221,7 +209,7 @@ function ObjectToString() {
if (IS_UNDEFINED(this) && !IS_UNDETECTABLE(this)) {
return '[object Undefined]';
}
if (IS_NULL(this)) return '[object Null]';
if (IS_NULL(this)) return '[object Null]';
return "[object " + %_ClassOf(ToObject(this)) + "]";
}
@ -244,6 +232,10 @@ function ObjectValueOf() {
// ECMA-262 - 15.2.4.5
function ObjectHasOwnProperty(V) {
if (%IsJSProxy(this)) {
var handler = %GetHandler(this);
return CallTrap1(handler, "hasOwn", DerivedHasOwnTrap, TO_STRING_INLINE(V));
}
return %HasLocalProperty(TO_OBJECT_INLINE(this), TO_STRING_INLINE(V));
}
@ -261,7 +253,12 @@ function ObjectIsPrototypeOf(V) {
// ECMA-262 - 15.2.4.6
function ObjectPropertyIsEnumerable(V) {
return %IsPropertyEnumerable(ToObject(this), ToString(V));
var P = ToString(V);
if (%IsJSProxy(this)) {
var desc = GetOwnProperty(this, P);
return IS_UNDEFINED(desc) ? false : desc.isEnumerable();
}
return %IsPropertyEnumerable(ToObject(this), P);
}
@ -322,9 +319,7 @@ function ObjectKeys(obj) {
throw MakeTypeError("obj_ctor_property_non_object", ["keys"]);
if (%IsJSProxy(obj)) {
var handler = %GetHandler(obj);
var keys = handler.keys;
if (IS_UNDEFINED(keys)) keys = DerivedKeysTrap;
var names = %_CallFunction(handler, keys);
var names = CallTrap0(handler, "keys", DerivedKeysTrap);
return ToStringArray(names);
}
return %LocalKeys(obj);
@ -595,16 +590,41 @@ function ConvertDescriptorArrayToDescriptor(desc_array) {
}
// For Harmony proxies.
function GetTrap(handler, name, defaultTrap) {
var trap = handler[name];
if (IS_UNDEFINED(trap)) {
if (IS_UNDEFINED(defaultTrap)) {
throw MakeTypeError("handler_trap_missing", [handler, name]);
}
trap = defaultTrap;
} else if (!IS_FUNCTION(trap)) {
throw MakeTypeError("handler_trap_must_be_callable", [handler, name]);
}
return trap;
}
function CallTrap0(handler, name, defaultTrap) {
return %_CallFunction(handler, GetTrap(handler, name, defaultTrap));
}
function CallTrap1(handler, name, defaultTrap, x) {
return %_CallFunction(handler, x, GetTrap(handler, name, defaultTrap));
}
function CallTrap2(handler, name, defaultTrap, x, y) {
return %_CallFunction(handler, x, y, GetTrap(handler, name, defaultTrap));
}
// ES5 section 8.12.2.
function GetProperty(obj, p) {
if (%IsJSProxy(obj)) {
var handler = %GetHandler(obj);
var getProperty = handler.getPropertyDescriptor;
if (IS_UNDEFINED(getProperty)) {
throw MakeTypeError("handler_trap_missing",
[handler, "getPropertyDescriptor"]);
}
var descriptor = %_CallFunction(handler, p, getProperty);
var descriptor = CallTrap1(obj, "getPropertyDescriptor", void 0, p);
if (IS_UNDEFINED(descriptor)) return descriptor;
var desc = ToCompletePropertyDescriptor(descriptor);
if (!desc.isConfigurable()) {
@ -625,9 +645,7 @@ function GetProperty(obj, p) {
function HasProperty(obj, p) {
if (%IsJSProxy(obj)) {
var handler = %GetHandler(obj);
var has = handler.has;
if (IS_UNDEFINED(has)) has = DerivedHasTrap;
return ToBoolean(%_CallFunction(handler, obj, p, has));
return ToBoolean(CallTrap1(handler, "has", DerivedHasTrap, p));
}
var desc = GetProperty(obj, p);
return IS_UNDEFINED(desc) ? false : true;
@ -635,15 +653,11 @@ function HasProperty(obj, p) {
// ES5 section 8.12.1.
function GetOwnProperty(obj, p) {
function GetOwnProperty(obj, v) {
var p = ToString(v);
if (%IsJSProxy(obj)) {
var handler = %GetHandler(obj);
var getOwnProperty = handler.getOwnPropertyDescriptor;
if (IS_UNDEFINED(getOwnProperty)) {
throw MakeTypeError("handler_trap_missing",
[handler, "getOwnPropertyDescriptor"]);
}
var descriptor = %_CallFunction(handler, p, getOwnProperty);
var descriptor = CallTrap1(handler, "getOwnPropertyDescriptor", void 0, p);
if (IS_UNDEFINED(descriptor)) return descriptor;
var desc = ToCompletePropertyDescriptor(descriptor);
if (!desc.isConfigurable()) {
@ -656,7 +670,7 @@ function GetOwnProperty(obj, p) {
// GetOwnProperty returns an array indexed by the constants
// defined in macros.py.
// If p is not a property on obj undefined is returned.
var props = %GetOwnProperty(ToObject(obj), ToString(p));
var props = %GetOwnProperty(ToObject(obj), ToString(v));
// A false value here means that access checks failed.
if (props === false) return void 0;
@ -668,11 +682,7 @@ function GetOwnProperty(obj, p) {
// Harmony proxies.
function DefineProxyProperty(obj, p, attributes, should_throw) {
var handler = %GetHandler(obj);
var defineProperty = handler.defineProperty;
if (IS_UNDEFINED(defineProperty)) {
throw MakeTypeError("handler_trap_missing", [handler, "defineProperty"]);
}
var result = %_CallFunction(handler, p, attributes, defineProperty);
var result = CallTrap2(handler, "defineProperty", void 0, p, attributes);
if (!ToBoolean(result)) {
if (should_throw) {
throw MakeTypeError("handler_returned_false",
@ -901,12 +911,7 @@ function ObjectGetOwnPropertyNames(obj) {
// Special handling for proxies.
if (%IsJSProxy(obj)) {
var handler = %GetHandler(obj);
var getOwnPropertyNames = handler.getOwnPropertyNames;
if (IS_UNDEFINED(getOwnPropertyNames)) {
throw MakeTypeError("handler_trap_missing",
[handler, "getOwnPropertyNames"]);
}
var names = %_CallFunction(handler, getOwnPropertyNames);
var names = CallTrap0(handler, "getOwnPropertyNames", void 0);
return ToStringArray(names, "getOwnPropertyNames");
}
@ -1036,11 +1041,7 @@ function ObjectDefineProperties(obj, properties) {
// Harmony proxies.
function ProxyFix(obj) {
var handler = %GetHandler(obj);
var fix = handler.fix;
if (IS_UNDEFINED(fix)) {
throw MakeTypeError("handler_trap_missing", [handler, "fix"]);
}
var props = %_CallFunction(handler, fix);
var props = CallTrap0(handler, "fix", void 0);
if (IS_UNDEFINED(props)) {
throw MakeTypeError("handler_returned_undefined", [handler, "fix"]);
}

4
deps/v8/src/version.cc

@ -33,8 +33,8 @@
// NOTE these macros are used by the SCons build script so their names
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 3
#define MINOR_VERSION 4
#define BUILD_NUMBER 14
#define MINOR_VERSION 5
#define BUILD_NUMBER 3
#define PATCH_LEVEL 0
// Use 1 for candidates and 0 otherwise.
// (Boolean macro values are not supported by all preprocessors.)

177
deps/v8/src/x64/code-stubs-x64.cc

@ -230,68 +230,151 @@ void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
}
// The stub returns zero for false, and a non-zero value for true.
// The stub expects its argument on the stack and returns its result in tos_:
// zero for false, and a non-zero value for true.
void ToBooleanStub::Generate(MacroAssembler* masm) {
Label false_result, true_result, not_string;
Label patch;
const Register argument = rax;
const Register map = rdx;
__ movq(rax, Operand(rsp, 1 * kPointerSize));
if (!types_.IsEmpty()) {
__ movq(argument, Operand(rsp, 1 * kPointerSize));
}
// undefined -> false
__ CompareRoot(rax, Heap::kUndefinedValueRootIndex);
__ j(equal, &false_result);
CheckOddball(masm, UNDEFINED, Heap::kUndefinedValueRootIndex, false, &patch);
// Boolean -> its value
__ CompareRoot(rax, Heap::kFalseValueRootIndex);
__ j(equal, &false_result);
__ CompareRoot(rax, Heap::kTrueValueRootIndex);
__ j(equal, &true_result);
// Smis: 0 -> false, all other -> true
__ Cmp(rax, Smi::FromInt(0));
__ j(equal, &false_result);
__ JumpIfSmi(rax, &true_result);
CheckOddball(masm, BOOLEAN, Heap::kFalseValueRootIndex, false, &patch);
CheckOddball(masm, BOOLEAN, Heap::kTrueValueRootIndex, true, &patch);
// 'null' -> false.
__ CompareRoot(rax, Heap::kNullValueRootIndex);
__ j(equal, &false_result, Label::kNear);
CheckOddball(masm, NULL_TYPE, Heap::kNullValueRootIndex, false, &patch);
if (types_.Contains(SMI)) {
// Smis: 0 -> false, all other -> true
Label not_smi;
__ JumpIfNotSmi(argument, &not_smi, Label::kNear);
// argument contains the correct return value already
if (!tos_.is(argument)) {
__ movq(tos_, argument);
}
__ ret(1 * kPointerSize);
__ bind(&not_smi);
} else if (types_.NeedsMap()) {
// If we need a map later and have a Smi -> patch.
__ JumpIfSmi(argument, &patch, Label::kNear);
}
// Get the map of the heap object.
__ movq(map, FieldOperand(rax, HeapObject::kMapOffset));
if (types_.NeedsMap()) {
__ movq(map, FieldOperand(argument, HeapObject::kMapOffset));
// Undetectable -> false.
__ testb(FieldOperand(map, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
__ j(not_zero, &false_result, Label::kNear);
// Everything with a map could be undetectable, so check this now.
__ testb(FieldOperand(map, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
// Undetectable -> false.
Label not_undetectable;
__ j(zero, &not_undetectable, Label::kNear);
__ Set(tos_, 0);
__ ret(1 * kPointerSize);
__ bind(&not_undetectable);
}
// JavaScript object -> true.
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(above_equal, &true_result, Label::kNear);
if (types_.Contains(SPEC_OBJECT)) {
// spec object -> true.
Label not_js_object;
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(below, &not_js_object, Label::kNear);
__ Set(tos_, 1);
__ ret(1 * kPointerSize);
__ bind(&not_js_object);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// We've seen a spec object for the first time -> patch.
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(above_equal, &patch, Label::kNear);
}
// String value -> false iff empty.
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
__ j(above_equal, &not_string, Label::kNear);
__ cmpq(FieldOperand(rax, String::kLengthOffset), Immediate(0));
__ j(zero, &false_result, Label::kNear);
__ jmp(&true_result, Label::kNear);
if (types_.Contains(STRING)) {
// String value -> false iff empty.
Label not_string;
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
__ j(above_equal, &not_string, Label::kNear);
__ movq(tos_, FieldOperand(argument, String::kLengthOffset));
__ ret(1 * kPointerSize); // the string length is OK as the return value
__ bind(&not_string);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// We've seen a string for the first time -> patch
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
__ j(below, &patch, Label::kNear);
}
__ bind(&not_string);
// HeapNumber -> false iff +0, -0, or NaN.
// These three cases set the zero flag when compared to zero using ucomisd.
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
__ j(not_equal, &true_result, Label::kNear);
__ xorps(xmm0, xmm0);
__ ucomisd(xmm0, FieldOperand(rax, HeapNumber::kValueOffset));
__ j(zero, &false_result, Label::kNear);
// Fall through to |true_result|.
if (types_.Contains(HEAP_NUMBER)) {
// heap number -> false iff +0, -0, or NaN.
Label not_heap_number, false_result;
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
__ j(not_equal, &not_heap_number, Label::kNear);
__ xorps(xmm0, xmm0);
__ ucomisd(xmm0, FieldOperand(argument, HeapNumber::kValueOffset));
__ j(zero, &false_result, Label::kNear);
__ Set(tos_, 1);
__ ret(1 * kPointerSize);
__ bind(&false_result);
__ Set(tos_, 0);
__ ret(1 * kPointerSize);
__ bind(&not_heap_number);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// We've seen a heap number for the first time -> patch
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
__ j(equal, &patch, Label::kNear);
}
// Return 1/0 for true/false in tos_.
__ bind(&true_result);
__ Set(tos_, 1);
__ ret(1 * kPointerSize);
__ bind(&false_result);
__ Set(tos_, 0);
__ ret(1 * kPointerSize);
if (types_.Contains(INTERNAL_OBJECT)) {
// internal objects -> true
__ Set(tos_, 1);
__ ret(1 * kPointerSize);
}
if (!types_.IsAll()) {
__ bind(&patch);
GenerateTypeTransition(masm);
}
}
void ToBooleanStub::CheckOddball(MacroAssembler* masm,
Type type,
Heap::RootListIndex value,
bool result,
Label* patch) {
const Register argument = rax;
if (types_.Contains(type)) {
// If we see an expected oddball, return its ToBoolean value tos_.
Label different_value;
__ CompareRoot(argument, value);
__ j(not_equal, &different_value, Label::kNear);
__ Set(tos_, result ? 1 : 0);
__ ret(1 * kPointerSize);
__ bind(&different_value);
} else if (types_.Contains(INTERNAL_OBJECT)) {
// If we see an unexpected oddball and handle internal objects, we must
// patch because the code for internal objects doesn't handle it explictly.
__ CompareRoot(argument, value);
__ j(equal, patch);
}
}
void ToBooleanStub::GenerateTypeTransition(MacroAssembler* masm) {
__ pop(rcx); // Get return address, operand is now on top of stack.
__ Push(Smi::FromInt(tos_.code()));
__ Push(Smi::FromInt(types_.ToByte()));
__ push(rcx); // Push return address.
// Patch the caller to an appropriate specialized stub and return the
// operation result to the caller of the stub.
__ TailCallExternalReference(
ExternalReference(IC_Utility(IC::kToBoolean_Patch), masm->isolate()),
3,
1);
}

1
deps/v8/src/x64/codegen-x64.cc

@ -132,6 +132,7 @@ ModuloFunction CreateModuloFunction() {
CodeDesc desc;
masm.GetCode(&desc);
OS::ProtectCode(buffer, actual_size);
// Call the function from C++ through this pointer.
return FUNCTION_CAST<ModuloFunction>(buffer);
}

3
deps/v8/src/x64/cpu-x64.cc

@ -67,7 +67,8 @@ void CPU::FlushICache(void* start, size_t size) {
// solution is to run valgrind with --smc-check=all, but this comes at a big
// performance cost. We can notify valgrind to invalidate its cache.
#ifdef VALGRIND_DISCARD_TRANSLATIONS
VALGRIND_DISCARD_TRANSLATIONS(start, size);
unsigned res = VALGRIND_DISCARD_TRANSLATIONS(start, size);
USE(res);
#endif
}

150
deps/v8/src/x64/lithium-codegen-x64.cc

@ -1402,39 +1402,119 @@ void LCodeGen::DoBranch(LBranch* instr) {
Label* true_label = chunk_->GetAssemblyLabel(true_block);
Label* false_label = chunk_->GetAssemblyLabel(false_block);
__ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
__ j(equal, false_label);
__ CompareRoot(reg, Heap::kTrueValueRootIndex);
__ j(equal, true_label);
__ CompareRoot(reg, Heap::kFalseValueRootIndex);
__ j(equal, false_label);
__ Cmp(reg, Smi::FromInt(0));
__ j(equal, false_label);
__ JumpIfSmi(reg, true_label);
// Test for double values. Plus/minus zero and NaN are false.
Label call_stub;
__ CompareRoot(FieldOperand(reg, HeapObject::kMapOffset),
Heap::kHeapNumberMapRootIndex);
__ j(not_equal, &call_stub, Label::kNear);
// HeapNumber => false iff +0, -0, or NaN. These three cases set the
// zero flag when compared to zero using ucomisd.
__ xorps(xmm0, xmm0);
__ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
__ j(zero, false_label);
__ jmp(true_label);
// The conversion stub doesn't cause garbage collections so it's
// safe to not record a safepoint after the call.
__ bind(&call_stub);
ToBooleanStub stub(rax);
__ Pushad();
__ push(reg);
__ CallStub(&stub);
__ testq(rax, rax);
__ Popad();
EmitBranch(true_block, false_block, not_zero);
ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types();
// Avoid deopts in the case where we've never executed this path before.
if (expected.IsEmpty()) expected = ToBooleanStub::all_types();
if (expected.Contains(ToBooleanStub::UNDEFINED)) {
// undefined -> false.
__ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
__ j(equal, false_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen undefined for the first time -> deopt.
__ CompareRoot(reg, Heap::kUndefinedValueRootIndex);
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::BOOLEAN)) {
// true -> true.
__ CompareRoot(reg, Heap::kTrueValueRootIndex);
__ j(equal, true_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a boolean for the first time -> deopt.
__ CompareRoot(reg, Heap::kTrueValueRootIndex);
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::BOOLEAN)) {
// false -> false.
__ CompareRoot(reg, Heap::kFalseValueRootIndex);
__ j(equal, false_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a boolean for the first time -> deopt.
__ CompareRoot(reg, Heap::kFalseValueRootIndex);
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::NULL_TYPE)) {
// 'null' -> false.
__ CompareRoot(reg, Heap::kNullValueRootIndex);
__ j(equal, false_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen null for the first time -> deopt.
__ CompareRoot(reg, Heap::kNullValueRootIndex);
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::SMI)) {
// Smis: 0 -> false, all other -> true.
__ Cmp(reg, Smi::FromInt(0));
__ j(equal, false_label);
__ JumpIfSmi(reg, true_label);
} else if (expected.NeedsMap()) {
// If we need a map later and have a Smi -> deopt.
__ testb(reg, Immediate(kSmiTagMask));
DeoptimizeIf(zero, instr->environment());
}
const Register map = kScratchRegister;
if (expected.NeedsMap()) {
__ movq(map, FieldOperand(reg, HeapObject::kMapOffset));
// Everything with a map could be undetectable, so check this now.
__ testb(FieldOperand(map, Map::kBitFieldOffset),
Immediate(1 << Map::kIsUndetectable));
// Undetectable -> false.
__ j(not_zero, false_label);
}
if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) {
// spec object -> true.
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
__ j(above_equal, true_label);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a spec object for the first time -> deopt.
__ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
DeoptimizeIf(above_equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::STRING)) {
// String value -> false iff empty.
Label not_string;
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
__ j(above_equal, &not_string, Label::kNear);
__ cmpq(FieldOperand(reg, String::kLengthOffset), Immediate(0));
__ j(not_zero, true_label);
__ jmp(false_label);
__ bind(&not_string);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a string for the first time -> deopt
__ CmpInstanceType(map, FIRST_NONSTRING_TYPE);
DeoptimizeIf(below, instr->environment());
}
if (expected.Contains(ToBooleanStub::HEAP_NUMBER)) {
// heap number -> false iff +0, -0, or NaN.
Label not_heap_number;
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
__ j(not_equal, &not_heap_number, Label::kNear);
__ xorps(xmm0, xmm0);
__ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset));
__ j(zero, false_label);
__ jmp(true_label);
__ bind(&not_heap_number);
} else if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// We've seen a heap number for the first time -> deopt.
__ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
DeoptimizeIf(equal, instr->environment());
}
if (expected.Contains(ToBooleanStub::INTERNAL_OBJECT)) {
// internal objects -> true
__ jmp(true_label);
} else {
// We've seen something for the first time -> deopt.
DeoptimizeIf(no_condition, instr->environment());
}
}
}
}
@ -2246,7 +2326,6 @@ void LCodeGen::DoLoadKeyedFastElement(LLoadKeyedFastElement* instr) {
void LCodeGen::DoLoadKeyedFastDoubleElement(
LLoadKeyedFastDoubleElement* instr) {
Register elements = ToRegister(instr->elements());
XMMRegister result(ToDoubleRegister(instr->result()));
if (instr->hydrogen()->RequiresHoleCheck()) {
@ -3101,14 +3180,11 @@ void LCodeGen::DoStoreKeyedFastElement(LStoreKeyedFastElement* instr) {
void LCodeGen::DoStoreKeyedFastDoubleElement(
LStoreKeyedFastDoubleElement* instr) {
XMMRegister value = ToDoubleRegister(instr->value());
Register elements = ToRegister(instr->elements());
Label have_value;
__ ucomisd(value, value);
__ j(parity_odd, &have_value); // NaN.
ExternalReference canonical_nan_reference =
ExternalReference::address_of_canonical_non_hole_nan();
__ Set(kScratchRegister, BitCast<uint64_t>(
FixedDoubleArray::canonical_not_the_hole_nan_as_double()));
__ movq(value, kScratchRegister);

6
deps/v8/src/x64/lithium-x64.cc

@ -1036,7 +1036,11 @@ LInstruction* LChunkBuilder::DoBranch(HBranch* instr) {
: instr->SecondSuccessor();
return new LGoto(successor->block_id());
}
return new LBranch(UseRegisterAtStart(v));
LInstruction* branch = new LBranch(UseRegister(v));
// When we handle all cases, we never deopt, so we don't need to assign the
// environment then.
bool all_cases_handled = instr->expected_input_types().IsAll();
return all_cases_handled ? branch : AssignEnvironment(branch);
}

7
deps/v8/src/x64/stub-cache-x64.cc

@ -3752,10 +3752,11 @@ void KeyedStoreStubCompiler::GenerateStoreFastDoubleElement(
__ bind(&smi_value);
// Value is a smi. convert to a double and store.
__ SmiToInteger32(rax, rax);
__ push(rax);
// Preserve original value.
__ SmiToInteger32(rdx, rax);
__ push(rdx);
__ fild_s(Operand(rsp, 0));
__ pop(rax);
__ pop(rdx);
__ SmiToInteger32(rcx, rcx);
__ fstp_d(FieldOperand(rdi, rcx, times_8, FixedDoubleArray::kHeaderSize));
__ ret(0);

2
deps/v8/test/benchmarks/testcfg.py

@ -91,7 +91,7 @@ class BenchmarkTestConfiguration(test.TestConfiguration):
return [test]
def GetBuildRequirements(self):
return ['sample', 'sample=shell']
return ['d8']
def GetTestStatus(self, sections, defs):
pass

1
deps/v8/test/cctest/SConscript

@ -65,6 +65,7 @@ SOURCES = {
'test-debug.cc',
'test-decls.cc',
'test-deoptimization.cc',
'test-dictionary.cc',
'test-diy-fp.cc',
'test-double.cc',
'test-dtoa.cc',

1
deps/v8/test/cctest/cctest.gyp

@ -61,6 +61,7 @@
'test-debug.cc',
'test-decls.cc',
'test-deoptimization.cc',
'test-dictionary.cc',
'test-diy-fp.cc',
'test-double.cc',
'test-dtoa.cc',

114
deps/v8/test/cctest/test-api.cc

@ -3560,6 +3560,68 @@ THREADED_TEST(IndexedInterceptorWithIndexedAccessor) {
}
static v8::Handle<Value> UnboxedDoubleIndexedPropertyGetter(
uint32_t index,
const AccessorInfo& info) {
ApiTestFuzzer::Fuzz();
if (index < 25) {
return v8::Handle<Value>(v8_num(index));
}
return v8::Handle<Value>();
}
static v8::Handle<Value> UnboxedDoubleIndexedPropertySetter(
uint32_t index,
Local<Value> value,
const AccessorInfo& info) {
ApiTestFuzzer::Fuzz();
if (index < 25) {
return v8::Handle<Value>(v8_num(index));
}
return v8::Handle<Value>();
}
Handle<v8::Array> UnboxedDoubleIndexedPropertyEnumerator(
const AccessorInfo& info) {
// Force the list of returned keys to be stored in a FastDoubleArray.
Local<Script> indexed_property_names_script = Script::Compile(v8_str(
"keys = new Array(); keys[125000] = 1;"
"for(i = 0; i < 80000; i++) { keys[i] = i; };"
"keys.length = 25; keys;"));
Local<Value> result = indexed_property_names_script->Run();
return Local<v8::Array>(::v8::Array::Cast(*result));
}
// Make sure that the the interceptor code in the runtime properly handles
// merging property name lists for double-array-backed arrays.
THREADED_TEST(IndexedInterceptorUnboxedDoubleWithIndexedAccessor) {
v8::HandleScope scope;
Local<ObjectTemplate> templ = ObjectTemplate::New();
templ->SetIndexedPropertyHandler(UnboxedDoubleIndexedPropertyGetter,
UnboxedDoubleIndexedPropertySetter,
0,
0,
UnboxedDoubleIndexedPropertyEnumerator);
LocalContext context;
context->Global()->Set(v8_str("obj"), templ->NewInstance());
// When obj is created, force it to be Stored in a FastDoubleArray.
Local<Script> create_unboxed_double_script = Script::Compile(v8_str(
"obj[125000] = 1; for(i = 0; i < 80000; i+=2) { obj[i] = i; } "
"key_count = 0; "
"for (x in obj) {key_count++;};"
"obj;"));
Local<Value> result = create_unboxed_double_script->Run();
CHECK(result->ToObject()->HasRealIndexedProperty(2000));
Local<Script> key_count_check = Script::Compile(v8_str(
"key_count;"));
result = key_count_check->Run();
CHECK_EQ(v8_num(40013), result);
}
static v8::Handle<Value> IdentityIndexedPropertyGetter(
uint32_t index,
const AccessorInfo& info) {
@ -6993,53 +7055,34 @@ THREADED_TEST(SetPrototype) {
}
THREADED_TEST(SetPrototypeProperties) {
THREADED_TEST(FunctionReadOnlyPrototype) {
v8::HandleScope handle_scope;
LocalContext context;
Local<v8::FunctionTemplate> t1 = v8::FunctionTemplate::New();
t1->SetPrototypeAttributes(v8::DontDelete);
t1->PrototypeTemplate()->Set(v8_str("x"), v8::Integer::New(42));
t1->ReadOnlyPrototype();
context->Global()->Set(v8_str("func1"), t1->GetFunction());
// Configured value of ReadOnly flag.
CHECK(CompileRun(
"(function() {"
" descriptor = Object.getOwnPropertyDescriptor(func1, 'prototype');"
" return (descriptor['writable'] == true) &&"
" (descriptor['enumerable'] == true) &&"
" (descriptor['configurable'] == false);"
" return (descriptor['writable'] == false);"
"})()")->BooleanValue());
CHECK_EQ(42, CompileRun("func1.prototype.x")->Int32Value());
CHECK_EQ(42,
CompileRun("func1.prototype = {}; func1.prototype.x")->Int32Value());
Local<v8::FunctionTemplate> t2 = v8::FunctionTemplate::New();
t2->SetPrototypeAttributes(v8::DontEnum);
t2->PrototypeTemplate()->Set(v8_str("x"), v8::Integer::New(42));
context->Global()->Set(v8_str("func2"), t2->GetFunction());
// Default value of ReadOnly flag.
CHECK(CompileRun(
"(function() {"
" descriptor = Object.getOwnPropertyDescriptor(func2, 'prototype');"
" return (descriptor['writable'] == true) &&"
" (descriptor['enumerable'] == false) &&"
" (descriptor['configurable'] == true);"
"})()")->BooleanValue());
Local<v8::FunctionTemplate> t3 = v8::FunctionTemplate::New();
t3->SetPrototypeAttributes(v8::ReadOnly);
context->Global()->Set(v8_str("func3"), t3->GetFunction());
CHECK(CompileRun(
"(function() {"
" descriptor = Object.getOwnPropertyDescriptor(func3, 'prototype');"
" return (descriptor['writable'] == false) &&"
" (descriptor['enumerable'] == true) &&"
" (descriptor['configurable'] == true);"
"})()")->BooleanValue());
Local<v8::FunctionTemplate> t4 = v8::FunctionTemplate::New();
t4->SetPrototypeAttributes(v8::ReadOnly | v8::DontEnum | v8::DontDelete);
context->Global()->Set(v8_str("func4"), t4->GetFunction());
CHECK(CompileRun(
"(function() {"
" descriptor = Object.getOwnPropertyDescriptor(func4, 'prototype');"
" return (descriptor['writable'] == false) &&"
" (descriptor['enumerable'] == false) &&"
" (descriptor['configurable'] == false);"
" return (descriptor['writable'] == true);"
"})()")->BooleanValue());
CHECK_EQ(42, CompileRun("func2.prototype.x")->Int32Value());
}
@ -10620,17 +10663,16 @@ TEST(PreCompileInvalidPreparseDataError) {
*exception_value);
try_catch.Reset();
// Overwrite function bar's start position with 200. The function entry
// will not be found when searching for it by position.
// will not be found when searching for it by position and we should fall
// back on eager compilation.
sd = v8::ScriptData::PreCompile(script, i::StrLength(script));
sd_data = reinterpret_cast<unsigned*>(const_cast<char*>(sd->Data()));
sd_data[kHeaderSize + 1 * kFunctionEntrySize + kFunctionEntryStartOffset] =
200;
compiled_script = Script::New(source, NULL, sd);
CHECK(try_catch.HasCaught());
String::AsciiValue second_exception_value(try_catch.Message()->Get());
CHECK_EQ("Uncaught SyntaxError: Invalid preparser data for function bar",
*second_exception_value);
CHECK(!try_catch.HasCaught());
delete sd;
}

11
deps/v8/test/cctest/test-ast.cc

@ -56,14 +56,3 @@ TEST(List) {
CHECK_EQ(0, list->length());
delete list;
}
TEST(DeleteEmpty) {
{
List<int>* list = new List<int>(0);
delete list;
}
{
List<int> list(0);
}
}

2
deps/v8/test/cctest/test-compiler.cc

@ -377,7 +377,7 @@ static void CheckCodeForUnsafeLiteral(Handle<JSFunction> f) {
while (pc < end) {
int num_const = d.ConstantPoolSizeAt(pc);
if (num_const >= 0) {
pc += num_const * kPointerSize;
pc += (num_const + 1) * kPointerSize;
} else {
pc += d.InstructionDecode(decode_buffer, pc);
CHECK(strstr(decode_buffer.start(), "mov eax,0x178c29c") == NULL);

85
deps/v8/test/cctest/test-dictionary.cc

@ -0,0 +1,85 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "api.h"
#include "debug.h"
#include "execution.h"
#include "factory.h"
#include "macro-assembler.h"
#include "objects.h"
#include "global-handles.h"
#include "cctest.h"
using namespace v8::internal;
TEST(ObjectHashTable) {
v8::HandleScope scope;
LocalContext context;
Handle<ObjectHashTable> table = FACTORY->NewObjectHashTable(23);
Handle<JSObject> a = FACTORY->NewJSArray(7);
Handle<JSObject> b = FACTORY->NewJSArray(11);
table = PutIntoObjectHashTable(table, a, b);
CHECK_EQ(table->NumberOfElements(), 1);
CHECK_EQ(table->Lookup(*a), *b);
CHECK_EQ(table->Lookup(*b), HEAP->undefined_value());
// Keys still have to be valid after objects were moved.
HEAP->CollectGarbage(NEW_SPACE);
CHECK_EQ(table->NumberOfElements(), 1);
CHECK_EQ(table->Lookup(*a), *b);
CHECK_EQ(table->Lookup(*b), HEAP->undefined_value());
// Keys that are overwritten should not change number of elements.
table = PutIntoObjectHashTable(table, a, FACTORY->NewJSArray(13));
CHECK_EQ(table->NumberOfElements(), 1);
CHECK_NE(table->Lookup(*a), *b);
// Keys mapped to undefined should be removed permanently.
table = PutIntoObjectHashTable(table, a, FACTORY->undefined_value());
CHECK_EQ(table->NumberOfElements(), 0);
CHECK_EQ(table->NumberOfDeletedElements(), 1);
CHECK_EQ(table->Lookup(*a), HEAP->undefined_value());
// Keys should map back to their respective values.
for (int i = 0; i < 100; i++) {
Handle<JSObject> key = FACTORY->NewJSArray(7);
Handle<JSObject> value = FACTORY->NewJSArray(11);
table = PutIntoObjectHashTable(table, key, value);
CHECK_EQ(table->NumberOfElements(), i + 1);
CHECK_NE(table->FindEntry(*key), ObjectHashTable::kNotFound);
CHECK_EQ(table->Lookup(*key), *value);
}
// Keys never added to the map should not be found.
for (int i = 0; i < 1000; i++) {
Handle<JSObject> o = FACTORY->NewJSArray(100);
CHECK_EQ(table->FindEntry(*o), ObjectHashTable::kNotFound);
CHECK_EQ(table->Lookup(*o), HEAP->undefined_value());
}
}

13
deps/v8/test/cctest/test-list.cc

@ -1,4 +1,4 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -138,3 +138,14 @@ TEST(Clear) {
list.Clear();
CHECK_EQ(0, list.length());
}
TEST(DeleteEmpty) {
{
List<int>* list = new List<int>(0);
delete list;
}
{
List<int> list(0);
}
}

8
deps/v8/test/cctest/test-serialize.cc

@ -459,7 +459,9 @@ DEPENDENT_TEST(PartialDeserialization, PartialSerialization) {
CHECK(root->IsString());
}
v8::HandleScope handle_scope;
Handle<Object>root_handle(root);
Handle<Object> root_handle(root);
ReserveSpaceForPartialSnapshot(file_name);
Object* root2;
{
@ -542,7 +544,9 @@ DEPENDENT_TEST(ContextDeserialization, ContextSerialization) {
CHECK(root->IsContext());
}
v8::HandleScope handle_scope;
Handle<Object>root_handle(root);
Handle<Object> root_handle(root);
ReserveSpaceForPartialSnapshot(file_name);
Object* root2;
{

2
deps/v8/test/es5conform/testcfg.py

@ -97,7 +97,7 @@ class ES5ConformTestConfiguration(test.TestConfiguration):
return tests
def GetBuildRequirements(self):
return ['sample', 'sample=shell']
return ['d8']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, 'es5conform.status')

2
deps/v8/test/message/testcfg.py

@ -125,7 +125,7 @@ class MessageTestConfiguration(test.TestConfiguration):
return result
def GetBuildRequirements(self):
return ['sample', 'sample=shell']
return ['d8']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, 'message.status')

40
deps/v8/test/mjsunit/compiler/regress-lbranch-double.js

@ -0,0 +1,40 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
// ARM's code generator for LBranch had a bug, swapping the true/false
// branches when the representation of the condition is a double.
function foo() {
return Math.sqrt(2.6415) ? 88 : 99;
}
assertEquals(88, foo());
assertEquals(88, foo());
%OptimizeFunctionOnNextCall(foo)
assertEquals(88, foo());

2
deps/v8/test/mjsunit/function-names.js

@ -128,6 +128,6 @@ var globalFunctions = [
"encodeURI", "encodeURIComponent", "Error", "TypeError",
"RangeError", "SyntaxError", "ReferenceError", "EvalError",
"URIError", "isNaN", "isFinite", "parseInt", "parseFloat",
"eval", "execScript"];
"eval"];
TestFunctionNames(this, globalFunctions);

217
deps/v8/test/mjsunit/harmony/proxies.js

@ -42,22 +42,27 @@ function TestGet(handler) {
TestGet({
get: function(r, k) { return 42 }
})
TestGet({
get: function(r, k) { return this.get2(r, k) },
get2: function(r, k) { return 42 }
})
TestGet({
getPropertyDescriptor: function(k) { return {value: 42} }
})
TestGet({
getPropertyDescriptor: function(k) { return this.getPropertyDescriptor2(k) },
getPropertyDescriptor2: function(k) { return {value: 42} }
})
TestGet({
getPropertyDescriptor: function(k) {
return {get value() { return 42 }}
}
})
TestGet({
get: undefined,
getPropertyDescriptor: function(k) { return {value: 42} }
@ -83,32 +88,38 @@ function TestGetCall(handler) {
TestGetCall({
get: function(r, k) { return function() { return 55 } }
})
TestGetCall({
get: function(r, k) { return this.get2(r, k) },
get2: function(r, k) { return function() { return 55 } }
})
TestGetCall({
getPropertyDescriptor: function(k) {
return {value: function() { return 55 }}
}
})
TestGetCall({
getPropertyDescriptor: function(k) { return this.getPropertyDescriptor2(k) },
getPropertyDescriptor2: function(k) {
return {value: function() { return 55 }}
}
})
TestGetCall({
getPropertyDescriptor: function(k) {
return {get value() { return function() { return 55 } }}
}
})
TestGetCall({
get: undefined,
getPropertyDescriptor: function(k) {
return {value: function() { return 55 }}
}
})
TestGetCall({
get: function(r, k) {
if (k == "gg") {
@ -146,14 +157,17 @@ function TestSet(handler) {
TestSet({
set: function(r, k, v) { key = k; val = v; return true }
})
TestSet({
set: function(r, k, v) { return this.set2(r, k, v) },
set2: function(r, k, v) { key = k; val = v; return true }
})
TestSet({
getOwnPropertyDescriptor: function(k) { return {writable: true} },
defineProperty: function(k, desc) { key = k; val = desc.value }
})
TestSet({
getOwnPropertyDescriptor: function(k) {
return this.getOwnPropertyDescriptor2(k)
@ -162,22 +176,26 @@ TestSet({
defineProperty: function(k, desc) { this.defineProperty2(k, desc) },
defineProperty2: function(k, desc) { key = k; val = desc.value }
})
TestSet({
getOwnPropertyDescriptor: function(k) {
return {get writable() { return true }}
},
defineProperty: function(k, desc) { key = k; val = desc.value }
})
TestSet({
getOwnPropertyDescriptor: function(k) {
return {set: function(v) { key = k; val = v }}
}
})
TestSet({
getOwnPropertyDescriptor: function(k) { return null },
getPropertyDescriptor: function(k) { return {writable: true} },
defineProperty: function(k, desc) { key = k; val = desc.value }
})
TestSet({
getOwnPropertyDescriptor: function(k) { return null },
getPropertyDescriptor: function(k) {
@ -185,12 +203,14 @@ TestSet({
},
defineProperty: function(k, desc) { key = k; val = desc.value }
})
TestSet({
getOwnPropertyDescriptor: function(k) { return null },
getPropertyDescriptor: function(k) {
return {set: function(v) { key = k; val = v }}
}
})
TestSet({
getOwnPropertyDescriptor: function(k) { return null },
getPropertyDescriptor: function(k) { return null },
@ -279,10 +299,12 @@ function TestDefine(handler) {
TestDefine({
defineProperty: function(k, d) { key = k; desc = d; return true }
})
TestDefine({
defineProperty: function(k, d) { return this.defineProperty2(k, d) },
defineProperty2: function(k, d) { key = k; desc = d; return true }
})
TestDefine(Proxy.create({
get: function(pr, pk) {
return function(k, d) { key = k; desc = d; return true }
@ -323,10 +345,12 @@ function TestDelete(handler) {
TestDelete({
'delete': function(k) { key = k; return k < "z" }
})
TestDelete({
'delete': function(k) { return this.delete2(k) },
delete2: function(k) { key = k; return k < "z" }
})
TestDelete(Proxy.create({
get: function(pr, pk) {
return function(k) { key = k; return k < "z" }
@ -363,6 +387,7 @@ TestDescriptor({
defineProperty: function(k, d) { this["__" + k] = d; return true },
getOwnPropertyDescriptor: function(k) { return this["__" + k] }
})
TestDescriptor({
defineProperty: function(k, d) { this["__" + k] = d; return true },
getOwnPropertyDescriptor: function(k) {
@ -404,7 +429,7 @@ assertTrue("object" == typeof Proxy.create({}))
// Element (in).
// Membership test (in).
var key
function TestIn(handler) {
@ -442,26 +467,31 @@ function TestIn(handler) {
TestIn({
has: function(k) { key = k; return k < "z" }
})
TestIn({
has: function(k) { return this.has2(k) },
has2: function(k) { key = k; return k < "z" }
})
TestIn({
getPropertyDescriptor: function(k) {
key = k; return k < "z" ? {value: 42} : void 0
}
})
TestIn({
getPropertyDescriptor: function(k) { return this.getPropertyDescriptor2(k) },
getPropertyDescriptor2: function(k) {
key = k; return k < "z" ? {value: 42} : void 0
}
})
TestIn({
getPropertyDescriptor: function(k) {
key = k; return k < "z" ? {get value() { return 42 }} : void 0
}
})
TestIn({
get: undefined,
getPropertyDescriptor: function(k) {
@ -477,7 +507,65 @@ TestIn(Proxy.create({
// Instanceof (instanceof).
// Own Properties (Object.prototype.hasOwnProperty).
var key
function TestHasOwn(handler) {
var o = Proxy.create(handler)
assertTrue(Object.prototype.hasOwnProperty.call(o, "a"))
assertEquals("a", key)
assertTrue(Object.prototype.hasOwnProperty.call(o, 99))
assertEquals("99", key)
assertFalse(Object.prototype.hasOwnProperty.call(o, "z"))
assertEquals("z", key)
}
TestHasOwn({
hasOwn: function(k) { key = k; return k < "z" }
})
TestHasOwn({
hasOwn: function(k) { return this.hasOwn2(k) },
hasOwn2: function(k) { key = k; return k < "z" }
})
TestHasOwn({
getOwnPropertyDescriptor: function(k) {
key = k; return k < "z" ? {value: 42} : void 0
}
})
TestHasOwn({
getOwnPropertyDescriptor: function(k) {
return this.getOwnPropertyDescriptor2(k)
},
getOwnPropertyDescriptor2: function(k) {
key = k; return k < "z" ? {value: 42} : void 0
}
})
TestHasOwn({
getOwnPropertyDescriptor: function(k) {
key = k; return k < "z" ? {get value() { return 42 }} : void 0
}
})
TestHasOwn({
hasOwn: undefined,
getOwnPropertyDescriptor: function(k) {
key = k; return k < "z" ? {value: 42} : void 0
}
})
TestHasOwn(Proxy.create({
get: function(pr, pk) {
return function(k) { key = k; return k < "z" }
}
}))
// Instanceof (instanceof)
function TestInstanceof() {
var o = {}
@ -514,7 +602,7 @@ TestInstanceof()
// Prototype (Object.getPrototypeOf).
// Prototype (Object.getPrototypeOf, Object.prototype.isPrototypeOf).
function TestPrototype() {
var o = {}
@ -528,6 +616,32 @@ function TestPrototype() {
assertSame(Object.getPrototypeOf(p2), o)
assertSame(Object.getPrototypeOf(p3), p2)
assertSame(Object.getPrototypeOf(p4), null)
assertTrue(Object.prototype.isPrototypeOf(o))
assertFalse(Object.prototype.isPrototypeOf(p1))
assertTrue(Object.prototype.isPrototypeOf(p2))
assertTrue(Object.prototype.isPrototypeOf(p3))
assertFalse(Object.prototype.isPrototypeOf(p4))
assertTrue(Object.prototype.isPrototypeOf.call(Object.prototype, o))
assertFalse(Object.prototype.isPrototypeOf.call(Object.prototype, p1))
assertTrue(Object.prototype.isPrototypeOf.call(Object.prototype, p2))
assertTrue(Object.prototype.isPrototypeOf.call(Object.prototype, p3))
assertFalse(Object.prototype.isPrototypeOf.call(Object.prototype, p4))
assertFalse(Object.prototype.isPrototypeOf.call(o, o))
assertFalse(Object.prototype.isPrototypeOf.call(o, p1))
assertTrue(Object.prototype.isPrototypeOf.call(o, p2))
assertTrue(Object.prototype.isPrototypeOf.call(o, p3))
assertFalse(Object.prototype.isPrototypeOf.call(o, p4))
assertFalse(Object.prototype.isPrototypeOf.call(p1, p1))
assertFalse(Object.prototype.isPrototypeOf.call(p1, o))
assertFalse(Object.prototype.isPrototypeOf.call(p1, p2))
assertFalse(Object.prototype.isPrototypeOf.call(p1, p3))
assertFalse(Object.prototype.isPrototypeOf.call(p1, p4))
assertFalse(Object.prototype.isPrototypeOf.call(p2, p1))
assertFalse(Object.prototype.isPrototypeOf.call(p2, p2))
assertTrue(Object.prototype.isPrototypeOf.call(p2, p3))
assertFalse(Object.prototype.isPrototypeOf.call(p2, p4))
assertFalse(Object.prototype.isPrototypeOf.call(p3, p2))
}
TestPrototype()
@ -544,13 +658,16 @@ function TestPropertyNames(names, handler) {
TestPropertyNames([], {
getOwnPropertyNames: function() { return [] }
})
TestPropertyNames(["a", "zz", " ", "0"], {
getOwnPropertyNames: function() { return ["a", "zz", " ", 0] }
})
TestPropertyNames(["throw", "function "], {
getOwnPropertyNames: function() { return this.getOwnPropertyNames2() },
getOwnPropertyNames2: function() { return ["throw", "function "] }
})
TestPropertyNames(["[object Object]"], {
get getOwnPropertyNames() {
return function() { return [{}] }
@ -566,22 +683,27 @@ function TestKeys(names, handler) {
TestKeys([], {
keys: function() { return [] }
})
TestKeys(["a", "zz", " ", "0"], {
keys: function() { return ["a", "zz", " ", 0] }
})
TestKeys(["throw", "function "], {
keys: function() { return this.keys2() },
keys2: function() { return ["throw", "function "] }
})
TestKeys(["[object Object]"], {
get keys() {
return function() { return [{}] }
}
})
TestKeys(["a", "0"], {
getOwnPropertyNames: function() { return ["a", 23, "zz", "", 0] },
getOwnPropertyDescriptor: function(k) { return {enumerable: k.length == 1} }
})
TestKeys(["23", "zz", ""], {
getOwnPropertyNames: function() { return this.getOwnPropertyNames2() },
getOwnPropertyNames2: function() { return ["a", 23, "zz", "", 0] },
@ -590,6 +712,7 @@ TestKeys(["23", "zz", ""], {
},
getOwnPropertyDescriptor2: function(k) { return {enumerable: k.length != 1} }
})
TestKeys(["a", "b", "c", "5"], {
get getOwnPropertyNames() {
return function() { return ["0", 4, "a", "b", "c", 5] }
@ -598,6 +721,7 @@ TestKeys(["a", "b", "c", "5"], {
return function(k) { return {enumerable: k >= "44"} }
}
})
TestKeys([], {
get getOwnPropertyNames() {
return function() { return ["a", "b", "c"] }
@ -661,6 +785,7 @@ function TestFix(names, handler) {
TestFix([], {
fix: function() { return {} }
})
TestFix(["a", "b", "c", "d", "zz"], {
fix: function() {
return {
@ -672,12 +797,14 @@ TestFix(["a", "b", "c", "d", "zz"], {
}
}
})
TestFix(["a"], {
fix: function() { return this.fix2() },
fix2: function() {
return {a: {value: 4, writable: true, configurable: true, enumerable: true}}
}
})
TestFix(["b"], {
get fix() {
return function() {
@ -685,3 +812,87 @@ TestFix(["b"], {
}
}
})
// String conversion (Object.prototype.toString, Object.prototype.toLocaleString)
var key
function TestToString(handler) {
var o = Proxy.create(handler)
key = ""
assertEquals("[object Object]", Object.prototype.toString.call(o))
assertEquals("", key)
assertEquals("my_proxy", Object.prototype.toLocaleString.call(o))
assertEquals("toString", key)
}
TestToString({
get: function(r, k) { key = k; return function() { return "my_proxy" } }
})
TestToString({
get: function(r, k) { return this.get2(r, k) },
get2: function(r, k) { key = k; return function() { return "my_proxy" } }
})
TestToString(Proxy.create({
get: function(pr, pk) {
return function(r, k) { key = k; return function() { return "my_proxy" } }
}
}))
// Value conversion (Object.prototype.toValue)
function TestValueOf(handler) {
var o = Proxy.create(handler)
assertSame(o, Object.prototype.valueOf.call(o))
}
TestValueOf({})
// Enumerability (Object.prototype.propertyIsEnumerable)
var key
function TestIsEnumerable(handler) {
var o = Proxy.create(handler)
assertTrue(Object.prototype.propertyIsEnumerable.call(o, "a"))
assertEquals("a", key)
assertTrue(Object.prototype.propertyIsEnumerable.call(o, 2))
assertEquals("2", key)
assertFalse(Object.prototype.propertyIsEnumerable.call(o, "z"))
assertEquals("z", key)
}
TestIsEnumerable({
getOwnPropertyDescriptor: function(k) {
key = k; return {enumerable: k < "z", configurable: true}
},
})
TestIsEnumerable({
getOwnPropertyDescriptor: function(k) {
return this.getOwnPropertyDescriptor2(k)
},
getOwnPropertyDescriptor2: function(k) {
key = k; return {enumerable: k < "z", configurable: true}
},
})
TestIsEnumerable({
getOwnPropertyDescriptor: function(k) {
key = k; return {get enumerable() { return k < "z" }, configurable: true}
},
})
TestIsEnumerable(Proxy.create({
get: function(pr, pk) {
return function(k) {
key = k; return {enumerable: k < "z", configurable: true}
}
}
}))

27
deps/v8/test/mjsunit/math-floor.js

@ -51,6 +51,17 @@ function test() {
testFloor(-Infinity, -Infinity);
testFloor(NaN, NaN);
// Ensure that a negative zero coming from Math.floor is properly handled
// by other operations.
function ifloor(x) {
return 1 / Math.floor(x);
}
assertEquals(-Infinity, ifloor(-0));
assertEquals(-Infinity, ifloor(-0));
assertEquals(-Infinity, ifloor(-0));
%OptimizeFunctionOnNextCall(ifloor);
assertEquals(-Infinity, ifloor(-0));
testFloor(0, 0.1);
testFloor(0, 0.49999999999999994);
testFloor(0, 0.5);
@ -129,3 +140,19 @@ function test() {
for (var i = 0; i < 500; i++) {
test();
}
// Regression test for a bug where a negative zero coming from Math.floor
// was not properly handled by other operations.
function floorsum(i, n) {
var ret = Math.floor(n);
while (--i > 0) {
ret += Math.floor(n);
}
return ret;
}
assertEquals(-0, floorsum(1, -0));
%OptimizeFunctionOnNextCall(floorsum);
// The optimized function will deopt. Run it with enough iterations to try
// to optimize via OSR (triggering the bug).
assertEquals(-0, floorsum(100000, -0));

17
deps/v8/test/mjsunit/math-round.js

@ -1,4 +1,4 @@
// Copyright 2010 the V8 project authors. All rights reserved.
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -44,6 +44,21 @@ testRound(Infinity, Infinity);
testRound(-Infinity, -Infinity);
testRound(NaN, NaN);
// Regression test for a bug where a negative zero coming from Math.round
// was not properly handled by other operations.
function roundsum(i, n) {
var ret = Math.round(n);
while (--i > 0) {
ret += Math.round(n);
}
return ret;
}
assertEquals(-0, roundsum(1, -0));
%OptimizeFunctionOnNextCall(roundsum);
// The optimized function will deopt. Run it with enough iterations to try
// to optimize via OSR (triggering the bug).
assertEquals(-0, roundsum(100000, -0));
testRound(1, 0.5);
testRound(1, 0.7);
testRound(1, 1);

21
deps/v8/test/mjsunit/regress/regress-1341167.js → deps/v8/test/mjsunit/regress/regress-1563.js

@ -25,9 +25,20 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Make sure that 'this' is bound to the global object when using
// execScript.
// Flags: --allow-natives-syntax
var result;
execScript("result = this");
assertTrue(result === this);
obj = new PixelArray(10);
// Test that undefined gets properly clamped in Crankshafted pixel array
// assignments.
function set_pixel(obj, arg) {
obj[0] = arg;
}
set_pixel(obj, 1.5);
set_pixel(obj, NaN);
%OptimizeFunctionOnNextCall(set_pixel);
set_pixel(obj, undefined);
set_pixel(obj, undefined);
assertEquals(0, obj[0]);

47
deps/v8/test/mjsunit/regress/regress-1582.js

@ -0,0 +1,47 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Flags: --allow-natives-syntax
function f(restIsArray, rest) {
var arr;
if (typeof rest === "object" && (rest instanceof Array)) {
arr = rest;
} else {
arr = arguments;
}
var i = arr.length;
while (--i >= 0) arr[i];
var arrIsArguments = (arr[1] !== rest);
assertEquals(restIsArray, arrIsArguments);
}
f(false, 'b', 'c');
f(false, 'b', 'c');
f(false, 'b', 'c');
%OptimizeFunctionOnNextCall(f);
f(true, ['b', 'c']);

43
deps/v8/test/mjsunit/regress/regress-91008.js

@ -0,0 +1,43 @@
// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
function testsort(n) {
var numbers=new Array(n);
for (var i=0;i<n;i++) numbers[i]=i;
delete numbers[50];
delete numbers[150];
delete numbers[25000];
delete numbers[n-1];
delete numbers[n-2];
delete numbers[30];
delete numbers[2];
delete numbers[1];
delete numbers[0];
numbers.sort();
}
testsort(100000)

18
deps/v8/test/mjsunit/execScript-case-insensitive.js → deps/v8/test/mjsunit/regress/regress-91010.js

@ -1,4 +1,4 @@
// Copyright 2008 the V8 project authors. All rights reserved.
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -25,10 +25,12 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
var x = 0;
execScript('x = 1', 'javascript');
assertEquals(1, x);
execScript('x = 2', 'JavaScript');
assertEquals(2, x);
try {
try {
var N = 100*1000;
var array = Array(N);
for (var i = 0; i != N; ++i)
array[i] = i;
} catch(ex) {}
array.unshift('Kibo');
} catch(ex) {}

51
deps/v8/test/mjsunit/regress/regress-91013.js

@ -0,0 +1,51 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Test that KeyedStore stub for unboxed double arrays backing store
// correctly returns stored value as the result.
// Flags: --allow-natives-syntax --unbox-double-arrays
// Create array with unboxed double array backing store.
var i = 100000;
var a = new Array(i);
for (var j = 0; j < i; j++) {
a[j] = 0.5;
}
assertTrue(%HasFastDoubleElements(a));
// Store some smis into it.
for (var j = 0; j < 10; j++) {
assertEquals(j, a[j] = j);
}
// Store some heap numbers into it.
for (var j = 0; j < 10; j++) {
var v = j + 0.5;
assertEquals(v, a[j] = v);
}

48
deps/v8/test/mjsunit/regress/regress-91120.js

@ -0,0 +1,48 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// We intend that the function declaration for g inside catch is hoisted to
// function f's scope. Invoke it before try/catch, in the try block, in the
// catch block, after try/catch, and outside f, and verify that it has
// access to the proper binding of x.
var x = 'global';
function f() {
var x = 'function';
assertEquals('function', g());
try {
assertEquals('function', g());
throw 'catch';
} catch (x) {
function g() { return x; }
assertEquals('function', g());
}
assertEquals('function', g());
return g;
}
assertEquals('function', f()());

65
deps/v8/test/mjsunit/scope-calls-eval.js

@ -0,0 +1,65 @@
// Copyright 2011 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Tests if the information about eval calls in a function is
// propagated correctly through catch and with blocks.
function f1() {
var x = 5;
function g() {
try {
throw '';
} catch (e) {
eval('var x = 3;');
}
try {
throw '';
} catch (e) {
return x;
}
}
return g();
}
function f2() {
var x = 5;
function g() {
with ({e:42}) {
eval('var x = 3;');
}
with ({e:42}) {
return x;
}
}
return g();
}
assertEquals(3, f1());
assertEquals(3, f2());

2
deps/v8/test/mjsunit/testcfg.py

@ -145,7 +145,7 @@ class MjsunitTestConfiguration(test.TestConfiguration):
return result
def GetBuildRequirements(self):
return ['sample', 'sample=shell']
return ['d8']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, 'mjsunit.status')

63
deps/v8/test/mjsunit/unbox-double-arrays.js

@ -29,12 +29,12 @@
// Flags: --allow-natives-syntax --unbox-double-arrays --expose-gc
var large_array_size = 100000;
var approx_dict_to_elements_threshold = 75000;
var approx_dict_to_elements_threshold = 70000;
var name = 0;
function expected_array_value(i) {
if ((i % 2) == 0) {
if ((i % 50) != 0) {
return i;
} else {
return i + 0.5;
@ -466,3 +466,62 @@ test_for_in();
test_for_in();
test_for_in();
test_for_in();
function test_get_property_names() {
names = %GetPropertyNames(large_array3);
property_name_count = 0;
for (x in names) { property_name_count++; };
assertEquals(26, property_name_count);
}
test_get_property_names();
test_get_property_names();
test_get_property_names();
// Test elements getters.
assertEquals(expected_array_value(10), large_array3[10]);
assertEquals(expected_array_value(-NaN), large_array3[2]);
large_array3.__defineGetter__("2", function(){
return expected_array_value(10);
});
function test_getter() {
assertEquals(expected_array_value(10), large_array3[10]);
assertEquals(expected_array_value(10), large_array3[2]);
}
test_getter();
test_getter();
test_getter();
%OptimizeFunctionOnNextCall(test_getter);
test_getter();
test_getter();
test_getter();
// Test element setters.
large_array4 = new Array(large_array_size);
force_to_fast_double_array(large_array4);
var setter_called = false;
assertEquals(expected_array_value(10), large_array4[10]);
assertEquals(expected_array_value(2), large_array4[2]);
large_array4.__defineSetter__("10", function(value){
setter_called = true;
});
function test_setter() {
setter_called = false;
large_array4[10] = 119;
assertTrue(setter_called);
assertEquals(undefined, large_array4[10]);
assertEquals(expected_array_value(2), large_array4[2]);
}
test_setter();
test_setter();
test_setter();
%OptimizeFunctionOnNextCall(test_setter);
test_setter();
test_setter();
test_setter();

2
deps/v8/test/mozilla/testcfg.py

@ -125,7 +125,7 @@ class MozillaTestConfiguration(test.TestConfiguration):
return tests
def GetBuildRequirements(self):
return ['sample', 'sample=shell']
return ['d8']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, 'mozilla.status')

2
deps/v8/test/sputnik/testcfg.py

@ -101,7 +101,7 @@ class SputnikTestConfiguration(test.TestConfiguration):
return result
def GetBuildRequirements(self):
return ['sample', 'sample=shell']
return ['d8']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, 'sputnik.status')

2
deps/v8/test/test262/testcfg.py

@ -111,7 +111,7 @@ class Test262TestConfiguration(test.TestConfiguration):
return tests
def GetBuildRequirements(self):
return ['sample', 'sample=shell']
return ['d8']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, 'test262.status')

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save