Browse Source

Upgrade v8 to 1.3.16

and apply my V8 patch for debugging.
v0.7.4-release
Ryan Dahl 15 years ago
parent
commit
8e6d978bba
  1. 4
      deps/v8/.gitignore
  2. 21
      deps/v8/ChangeLog
  3. 5
      deps/v8/SConstruct
  4. 115
      deps/v8/include/v8.h
  5. 168
      deps/v8/src/SConscript
  6. 60
      deps/v8/src/api.cc
  7. 8
      deps/v8/src/arm/assembler-arm-inl.h
  8. 319
      deps/v8/src/arm/codegen-arm.cc
  9. 19
      deps/v8/src/arm/codegen-arm.h
  10. 2
      deps/v8/src/arm/debug-arm.cc
  11. 176
      deps/v8/src/arm/fast-codegen-arm.cc
  12. 1
      deps/v8/src/arm/macro-assembler-arm.h
  13. 14
      deps/v8/src/array.js
  14. 3
      deps/v8/src/assembler.cc
  15. 8
      deps/v8/src/assembler.h
  16. 21
      deps/v8/src/ast.cc
  17. 97
      deps/v8/src/ast.h
  18. 21
      deps/v8/src/checks.h
  19. 2
      deps/v8/src/code-stubs.cc
  20. 1
      deps/v8/src/code-stubs.h
  21. 126
      deps/v8/src/codegen.cc
  22. 4
      deps/v8/src/codegen.h
  23. 10
      deps/v8/src/compilation-cache.cc
  24. 303
      deps/v8/src/compiler.cc
  25. 2
      deps/v8/src/d8-posix.cc
  26. 2
      deps/v8/src/d8.js
  27. 2
      deps/v8/src/dateparser-inl.h
  28. 31
      deps/v8/src/debug-delay.js
  29. 39
      deps/v8/src/debug.cc
  30. 10
      deps/v8/src/execution.cc
  31. 1
      deps/v8/src/execution.h
  32. 3
      deps/v8/src/factory.cc
  33. 1
      deps/v8/src/factory.h
  34. 269
      deps/v8/src/fast-codegen.cc
  35. 71
      deps/v8/src/fast-codegen.h
  36. 22
      deps/v8/src/flag-definitions.h
  37. 10
      deps/v8/src/global-handles.cc
  38. 6
      deps/v8/src/global-handles.h
  39. 56
      deps/v8/src/heap-profiler.cc
  40. 8
      deps/v8/src/heap-profiler.h
  41. 9
      deps/v8/src/heap.cc
  42. 14
      deps/v8/src/ia32/assembler-ia32-inl.h
  43. 13
      deps/v8/src/ia32/assembler-ia32.cc
  44. 5
      deps/v8/src/ia32/assembler-ia32.h
  45. 800
      deps/v8/src/ia32/codegen-ia32.cc
  46. 81
      deps/v8/src/ia32/codegen-ia32.h
  47. 2
      deps/v8/src/ia32/debug-ia32.cc
  48. 34
      deps/v8/src/ia32/disasm-ia32.cc
  49. 163
      deps/v8/src/ia32/fast-codegen-ia32.cc
  50. 11
      deps/v8/src/ia32/ic-ia32.cc
  51. 35
      deps/v8/src/ia32/virtual-frame-ia32.cc
  52. 3
      deps/v8/src/jsregexp.cc
  53. 2
      deps/v8/src/jsregexp.h
  54. 2
      deps/v8/src/jump-target.h
  55. 56
      deps/v8/src/location.h
  56. 15
      deps/v8/src/log.cc
  57. 2
      deps/v8/src/log.h
  58. 4
      deps/v8/src/macros.py
  59. 5
      deps/v8/src/mark-compact.cc
  60. 2
      deps/v8/src/mirror-delay.js
  61. 1
      deps/v8/src/objects-debug.cc
  62. 51
      deps/v8/src/objects-inl.h
  63. 17
      deps/v8/src/objects.cc
  64. 51
      deps/v8/src/objects.h
  65. 55
      deps/v8/src/parser.cc
  66. 10
      deps/v8/src/parser.h
  67. 1
      deps/v8/src/platform-win32.cc
  68. 506
      deps/v8/src/prettyprinter.cc
  69. 118
      deps/v8/src/prettyprinter.h
  70. 56
      deps/v8/src/rewriter.cc
  71. 49
      deps/v8/src/runtime.cc
  72. 1
      deps/v8/src/runtime.h
  73. 20
      deps/v8/src/serialize.cc
  74. 15
      deps/v8/src/string.js
  75. 11
      deps/v8/src/top.cc
  76. 29
      deps/v8/src/usage-analyzer.cc
  77. 3
      deps/v8/src/utils.h
  78. 8
      deps/v8/src/v8-counters.h
  79. 7
      deps/v8/src/v8.cc
  80. 2
      deps/v8/src/version.cc
  81. 23
      deps/v8/src/x64/assembler-x64-inl.h
  82. 92
      deps/v8/src/x64/assembler-x64.cc
  83. 46
      deps/v8/src/x64/assembler-x64.h
  84. 46
      deps/v8/src/x64/builtins-x64.cc
  85. 779
      deps/v8/src/x64/codegen-x64.cc
  86. 19
      deps/v8/src/x64/codegen-x64.h
  87. 5
      deps/v8/src/x64/debug-x64.cc
  88. 181
      deps/v8/src/x64/fast-codegen-x64.cc
  89. 3
      deps/v8/src/x64/frames-x64.h
  90. 51
      deps/v8/src/x64/ic-x64.cc
  91. 1060
      deps/v8/src/x64/macro-assembler-x64.cc
  92. 87
      deps/v8/src/x64/macro-assembler-x64.h
  93. 13
      deps/v8/src/x64/stub-cache-x64.cc
  94. 47
      deps/v8/src/x64/virtual-frame-x64.cc
  95. 1
      deps/v8/src/x64/virtual-frame-x64.h
  96. 4
      deps/v8/test/cctest/SConscript
  97. 162
      deps/v8/test/cctest/test-api.cc
  98. 54
      deps/v8/test/cctest/test-assembler-x64.cc
  99. 138
      deps/v8/test/cctest/test-debug.cc
  100. 24
      deps/v8/test/cctest/test-disasm-ia32.cc

4
deps/v8/.gitignore

@ -10,6 +10,10 @@
*.suo *.suo
*.user *.user
*.xcodeproj *.xcodeproj
*.idb
*.pdb
#*#
*~
d8 d8
d8_g d8_g
shell shell

21
deps/v8/ChangeLog

@ -1,3 +1,24 @@
2009-10-16: Version 1.3.16
X64: Convert smis to holding 32 bits of payload.
Introduce v8::Integer::NewFromUnsigned method.
Add missing null check in Context::GetCurrent.
Add trim, trimLeft and trimRight methods to String
Patch by Jan de Mooij <jandemooij@gmail.com>
Implement ES5 Array.isArray
Patch by Jan de Mooij <jandemooij@gmail.com>
Skip access checks for hidden properties.
Add String::Concat(Handle<String> left, Handle<String> right) to the V8 API.
Fix GYP-based builds of V8.
2009-10-07: Version 1.3.15 2009-10-07: Version 1.3.15
Expand the maximum size of the code space to 512MB for 64-bit mode. Expand the maximum size of the code space to 512MB for 64-bit mode.

5
deps/v8/SConstruct

@ -372,7 +372,8 @@ CCTEST_EXTRA_FLAGS = {
'CPPDEFINES': ['V8_TARGET_ARCH_IA32'] 'CPPDEFINES': ['V8_TARGET_ARCH_IA32']
}, },
'arch:x64': { 'arch:x64': {
'CPPDEFINES': ['V8_TARGET_ARCH_X64'] 'CPPDEFINES': ['V8_TARGET_ARCH_X64'],
'LINKFLAGS': ['/STACK:2091752']
}, },
} }
} }
@ -473,7 +474,7 @@ SAMPLE_FLAGS = {
}, },
'arch:x64': { 'arch:x64': {
'CPPDEFINES': ['V8_TARGET_ARCH_X64'], 'CPPDEFINES': ['V8_TARGET_ARCH_X64'],
'LINKFLAGS': ['/MACHINE:X64'] 'LINKFLAGS': ['/MACHINE:X64', '/STACK:2091752']
}, },
'mode:debug': { 'mode:debug': {
'CCFLAGS': ['/Od'], 'CCFLAGS': ['/Od'],

115
deps/v8/include/v8.h

@ -756,7 +756,7 @@ class V8EXPORT Value : public Data {
/** JS == */ /** JS == */
bool Equals(Handle<Value> that) const; bool Equals(Handle<Value> that) const;
bool StrictEquals(Handle<Value> that) const; bool StrictEquals(Handle<Value> that) const;
private: private:
inline bool QuickIsString() const; inline bool QuickIsString() const;
bool FullIsString() const; bool FullIsString() const;
@ -918,6 +918,12 @@ class V8EXPORT String : public Primitive {
/** Creates a symbol. Returns one if it exists already.*/ /** Creates a symbol. Returns one if it exists already.*/
static Local<String> NewSymbol(const char* data, int length = -1); static Local<String> NewSymbol(const char* data, int length = -1);
/**
* Creates a new string by concatenating the left and the right strings
* passed in as parameters.
*/
static Local<String> Concat(Handle<String> left, Handle<String>right);
/** /**
* Creates a new external string using the data defined in the given * Creates a new external string using the data defined in the given
* resource. The resource is deleted when the external string is no * resource. The resource is deleted when the external string is no
@ -1036,7 +1042,7 @@ class V8EXPORT String : public Primitive {
Value(const Value&); Value(const Value&);
void operator=(const Value&); void operator=(const Value&);
}; };
private: private:
void VerifyExternalStringResource(ExternalStringResource* val) const; void VerifyExternalStringResource(ExternalStringResource* val) const;
static void CheckCast(v8::Value* obj); static void CheckCast(v8::Value* obj);
@ -1063,6 +1069,7 @@ class V8EXPORT Number : public Primitive {
class V8EXPORT Integer : public Number { class V8EXPORT Integer : public Number {
public: public:
static Local<Integer> New(int32_t value); static Local<Integer> New(int32_t value);
static inline Local<Integer> NewFromUnsigned(uint32_t value);
int64_t Value() const; int64_t Value() const;
static inline Integer* Cast(v8::Value* obj); static inline Integer* Cast(v8::Value* obj);
private: private:
@ -1193,7 +1200,7 @@ class V8EXPORT Object : public Value {
/** Gets a native pointer from an internal field. */ /** Gets a native pointer from an internal field. */
inline void* GetPointerFromInternalField(int index); inline void* GetPointerFromInternalField(int index);
/** Sets a native pointer in an internal field. */ /** Sets a native pointer in an internal field. */
void SetPointerInInternalField(int index, void* value); void SetPointerInInternalField(int index, void* value);
@ -1246,7 +1253,7 @@ class V8EXPORT Object : public Value {
bool SetHiddenValue(Handle<String> key, Handle<Value> value); bool SetHiddenValue(Handle<String> key, Handle<Value> value);
Local<Value> GetHiddenValue(Handle<String> key); Local<Value> GetHiddenValue(Handle<String> key);
bool DeleteHiddenValue(Handle<String> key); bool DeleteHiddenValue(Handle<String> key);
/** /**
* Returns true if this is an instance of an api function (one * Returns true if this is an instance of an api function (one
* created from a function created from a function template) and has * created from a function created from a function template) and has
@ -1277,10 +1284,11 @@ class V8EXPORT Object : public Value {
Object(); Object();
static void CheckCast(Value* obj); static void CheckCast(Value* obj);
Local<Value> CheckedGetInternalField(int index); Local<Value> CheckedGetInternalField(int index);
void* SlowGetPointerFromInternalField(int index);
/** /**
* If quick access to the internal field is possible this method * If quick access to the internal field is possible this method
* returns the value. Otherwise an empty handle is returned. * returns the value. Otherwise an empty handle is returned.
*/ */
inline Local<Value> UncheckedGetInternalField(int index); inline Local<Value> UncheckedGetInternalField(int index);
}; };
@ -2719,12 +2727,37 @@ const int kHeapObjectTag = 1;
const int kHeapObjectTagSize = 2; const int kHeapObjectTagSize = 2;
const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1; const intptr_t kHeapObjectTagMask = (1 << kHeapObjectTagSize) - 1;
// Tag information for Smi. // Tag information for Smi.
const int kSmiTag = 0; const int kSmiTag = 0;
const int kSmiTagSize = 1; const int kSmiTagSize = 1;
const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1; const intptr_t kSmiTagMask = (1 << kSmiTagSize) - 1;
template <size_t ptr_size> struct SmiConstants;
// Smi constants for 32-bit systems.
template <> struct SmiConstants<4> {
static const int kSmiShiftSize = 0;
static const int kSmiValueSize = 31;
static inline int SmiToInt(internal::Object* value) {
int shift_bits = kSmiTagSize + kSmiShiftSize;
// Throw away top 32 bits and shift down (requires >> to be sign extending).
return static_cast<int>(reinterpret_cast<intptr_t>(value)) >> shift_bits;
}
};
// Smi constants for 64-bit systems.
template <> struct SmiConstants<8> {
static const int kSmiShiftSize = 31;
static const int kSmiValueSize = 32;
static inline int SmiToInt(internal::Object* value) {
int shift_bits = kSmiTagSize + kSmiShiftSize;
// Shift down and throw away top 32 bits.
return static_cast<int>(reinterpret_cast<intptr_t>(value) >> shift_bits);
}
};
const int kSmiShiftSize = SmiConstants<sizeof(void*)>::kSmiShiftSize;
const int kSmiValueSize = SmiConstants<sizeof(void*)>::kSmiValueSize;
/** /**
* This class exports constants and functionality from within v8 that * This class exports constants and functionality from within v8 that
@ -2743,7 +2776,6 @@ class Internals {
static const int kJSObjectHeaderSize = 3 * sizeof(void*); static const int kJSObjectHeaderSize = 3 * sizeof(void*);
static const int kFullStringRepresentationMask = 0x07; static const int kFullStringRepresentationMask = 0x07;
static const int kExternalTwoByteRepresentationTag = 0x03; static const int kExternalTwoByteRepresentationTag = 0x03;
static const int kAlignedPointerShift = 2;
// These constants are compiler dependent so their values must be // These constants are compiler dependent so their values must be
// defined within the implementation. // defined within the implementation.
@ -2761,7 +2793,23 @@ class Internals {
} }
static inline int SmiValue(internal::Object* value) { static inline int SmiValue(internal::Object* value) {
return static_cast<int>(reinterpret_cast<intptr_t>(value)) >> kSmiTagSize; return SmiConstants<sizeof(void*)>::SmiToInt(value);
}
static inline int GetInstanceType(internal::Object* obj) {
typedef internal::Object O;
O* map = ReadField<O*>(obj, kHeapObjectMapOffset);
return ReadField<uint8_t>(map, kMapInstanceTypeOffset);
}
static inline void* GetExternalPointer(internal::Object* obj) {
if (HasSmiTag(obj)) {
return obj;
} else if (GetInstanceType(obj) == kProxyType) {
return ReadField<void*>(obj, kProxyProxyOffset);
} else {
return NULL;
}
} }
static inline bool IsExternalTwoByteString(int instance_type) { static inline bool IsExternalTwoByteString(int instance_type) {
@ -2921,9 +2969,7 @@ Local<Value> Object::UncheckedGetInternalField(int index) {
typedef internal::Object O; typedef internal::Object O;
typedef internal::Internals I; typedef internal::Internals I;
O* obj = *reinterpret_cast<O**>(this); O* obj = *reinterpret_cast<O**>(this);
O* map = I::ReadField<O*>(obj, I::kHeapObjectMapOffset); if (I::GetInstanceType(obj) == I::kJSObjectType) {
int instance_type = I::ReadField<uint8_t>(map, I::kMapInstanceTypeOffset);
if (instance_type == I::kJSObjectType) {
// If the object is a plain JSObject, which is the common case, // If the object is a plain JSObject, which is the common case,
// we know where to find the internal fields and can return the // we know where to find the internal fields and can return the
// value directly. // value directly.
@ -2948,25 +2994,27 @@ void* External::Unwrap(Handle<v8::Value> obj) {
void* External::QuickUnwrap(Handle<v8::Value> wrapper) { void* External::QuickUnwrap(Handle<v8::Value> wrapper) {
typedef internal::Object O; typedef internal::Object O;
typedef internal::Internals I;
O* obj = *reinterpret_cast<O**>(const_cast<v8::Value*>(*wrapper)); O* obj = *reinterpret_cast<O**>(const_cast<v8::Value*>(*wrapper));
if (I::HasSmiTag(obj)) { return internal::Internals::GetExternalPointer(obj);
int value = I::SmiValue(obj) << I::kAlignedPointerShift;
return reinterpret_cast<void*>(value);
} else {
O* map = I::ReadField<O*>(obj, I::kHeapObjectMapOffset);
int instance_type = I::ReadField<uint8_t>(map, I::kMapInstanceTypeOffset);
if (instance_type == I::kProxyType) {
return I::ReadField<void*>(obj, I::kProxyProxyOffset);
} else {
return NULL;
}
}
} }
void* Object::GetPointerFromInternalField(int index) { void* Object::GetPointerFromInternalField(int index) {
return External::Unwrap(GetInternalField(index)); typedef internal::Object O;
typedef internal::Internals I;
O* obj = *reinterpret_cast<O**>(this);
if (I::GetInstanceType(obj) == I::kJSObjectType) {
// If the object is a plain JSObject, which is the common case,
// we know where to find the internal fields and can return the
// value directly.
int offset = I::kJSObjectHeaderSize + (sizeof(void*) * index);
O* value = I::ReadField<O*>(obj, offset);
return I::GetExternalPointer(value);
}
return SlowGetPointerFromInternalField(index);
} }
@ -2982,10 +3030,8 @@ String::ExternalStringResource* String::GetExternalStringResource() const {
typedef internal::Object O; typedef internal::Object O;
typedef internal::Internals I; typedef internal::Internals I;
O* obj = *reinterpret_cast<O**>(const_cast<String*>(this)); O* obj = *reinterpret_cast<O**>(const_cast<String*>(this));
O* map = I::ReadField<O*>(obj, I::kHeapObjectMapOffset);
int instance_type = I::ReadField<uint8_t>(map, I::kMapInstanceTypeOffset);
String::ExternalStringResource* result; String::ExternalStringResource* result;
if (I::IsExternalTwoByteString(instance_type)) { if (I::IsExternalTwoByteString(I::GetInstanceType(obj))) {
void* value = I::ReadField<void*>(obj, I::kStringResourceOffset); void* value = I::ReadField<void*>(obj, I::kStringResourceOffset);
result = reinterpret_cast<String::ExternalStringResource*>(value); result = reinterpret_cast<String::ExternalStringResource*>(value);
} else { } else {
@ -3011,9 +3057,7 @@ bool Value::QuickIsString() const {
typedef internal::Internals I; typedef internal::Internals I;
O* obj = *reinterpret_cast<O**>(const_cast<Value*>(this)); O* obj = *reinterpret_cast<O**>(const_cast<Value*>(this));
if (!I::HasHeapObjectTag(obj)) return false; if (!I::HasHeapObjectTag(obj)) return false;
O* map = I::ReadField<O*>(obj, I::kHeapObjectMapOffset); return (I::GetInstanceType(obj) < I::kFirstNonstringType);
int instance_type = I::ReadField<uint8_t>(map, I::kMapInstanceTypeOffset);
return (instance_type < I::kFirstNonstringType);
} }
@ -3025,6 +3069,15 @@ Number* Number::Cast(v8::Value* value) {
} }
Local<Integer> Integer::NewFromUnsigned(uint32_t value) {
bool fits_into_int32_t = (value & (1 << 31)) == 0;
if (fits_into_int32_t) {
return Integer::New(static_cast<int32_t>(value));
}
return Local<Integer>::Cast(Number::New(value));
}
Integer* Integer::Cast(v8::Value* value) { Integer* Integer::Cast(v8::Value* value) {
#ifdef V8_ENABLE_CHECKS #ifdef V8_ENABLE_CHECKS
CheckCast(value); CheckCast(value);

168
deps/v8/src/SConscript

@ -34,51 +34,129 @@ Import('context')
SOURCES = { SOURCES = {
'all': [ 'all': Split("""
'accessors.cc', 'allocation.cc', 'api.cc', 'assembler.cc', 'ast.cc', accessors.cc
'bootstrapper.cc', 'builtins.cc', 'checks.cc', 'code-stubs.cc', allocation.cc
'codegen.cc', 'compilation-cache.cc', 'compiler.cc', 'contexts.cc', api.cc
'conversions.cc', 'counters.cc', 'dateparser.cc', 'debug.cc', assembler.cc
'debug-agent.cc', 'disassembler.cc', 'execution.cc', 'factory.cc', ast.cc
'flags.cc', 'frame-element.cc', 'frames.cc', 'func-name-inferrer.cc', bootstrapper.cc
'global-handles.cc', 'handles.cc', 'hashmap.cc', 'heap.cc', builtins.cc
'heap-profiler.cc', 'ic.cc', 'interpreter-irregexp.cc', 'jsregexp.cc', checks.cc
'jump-target.cc', 'log.cc', 'log-utils.cc', 'mark-compact.cc', code-stubs.cc
'messages.cc', 'objects.cc', 'oprofile-agent.cc', 'parser.cc', codegen.cc
'property.cc', 'regexp-macro-assembler.cc', compilation-cache.cc
'regexp-macro-assembler-irregexp.cc', 'regexp-stack.cc', compiler.cc
'register-allocator.cc', 'rewriter.cc', 'runtime.cc', 'scanner.cc', contexts.cc
'scopeinfo.cc', 'scopes.cc', 'serialize.cc', 'snapshot-common.cc', conversions.cc
'spaces.cc', 'string-stream.cc', 'stub-cache.cc', 'token.cc', 'top.cc', counters.cc
'unicode.cc', 'usage-analyzer.cc', 'utils.cc', 'v8-counters.cc', dateparser.cc
'v8.cc', 'v8threads.cc', 'variables.cc', 'version.cc', debug-agent.cc
'virtual-frame.cc', 'zone.cc' debug.cc
], disassembler.cc
'arch:arm': [ execution.cc
'arm/assembler-arm.cc', 'arm/builtins-arm.cc', 'arm/codegen-arm.cc', factory.cc
'arm/constants-arm.cc', 'arm/cpu-arm.cc', 'arm/disasm-arm.cc', fast-codegen.cc
'arm/debug-arm.cc', 'arm/frames-arm.cc', 'arm/ic-arm.cc', flags.cc
'arm/jump-target-arm.cc', 'arm/macro-assembler-arm.cc', frame-element.cc
'arm/regexp-macro-assembler-arm.cc', 'arm/register-allocator-arm.cc', frames.cc
'arm/stub-cache-arm.cc', 'arm/virtual-frame-arm.cc' func-name-inferrer.cc
], global-handles.cc
'arch:ia32': [ handles.cc
'ia32/assembler-ia32.cc', 'ia32/builtins-ia32.cc', hashmap.cc
'ia32/codegen-ia32.cc', 'ia32/cpu-ia32.cc', 'ia32/disasm-ia32.cc', heap-profiler.cc
'ia32/debug-ia32.cc', 'ia32/frames-ia32.cc', 'ia32/ic-ia32.cc', heap.cc
'ia32/jump-target-ia32.cc', 'ia32/macro-assembler-ia32.cc', ic.cc
'ia32/regexp-macro-assembler-ia32.cc', interpreter-irregexp.cc
'ia32/register-allocator-ia32.cc', 'ia32/stub-cache-ia32.cc', jsregexp.cc
'ia32/virtual-frame-ia32.cc' jump-target.cc
], log-utils.cc
'arch:x64': [ log.cc
'x64/assembler-x64.cc', 'x64/builtins-x64.cc', 'x64/codegen-x64.cc', mark-compact.cc
'x64/cpu-x64.cc', 'x64/disasm-x64.cc', 'x64/debug-x64.cc', messages.cc
'x64/frames-x64.cc', 'x64/ic-x64.cc', 'x64/jump-target-x64.cc', objects.cc
'x64/macro-assembler-x64.cc', 'x64/regexp-macro-assembler-x64.cc', oprofile-agent.cc
'x64/register-allocator-x64.cc', 'x64/stub-cache-x64.cc', parser.cc
'x64/virtual-frame-x64.cc' property.cc
], regexp-macro-assembler-irregexp.cc
regexp-macro-assembler.cc
regexp-stack.cc
register-allocator.cc
rewriter.cc
runtime.cc
scanner.cc
scopeinfo.cc
scopes.cc
serialize.cc
snapshot-common.cc
spaces.cc
string-stream.cc
stub-cache.cc
token.cc
top.cc
unicode.cc
usage-analyzer.cc
utils.cc
v8-counters.cc
v8.cc
v8threads.cc
variables.cc
version.cc
virtual-frame.cc
zone.cc
"""),
'arch:arm': Split("""
arm/assembler-arm.cc
arm/builtins-arm.cc
arm/codegen-arm.cc
arm/constants-arm.cc
arm/cpu-arm.cc
arm/debug-arm.cc
arm/disasm-arm.cc
arm/fast-codegen-arm.cc
arm/frames-arm.cc
arm/ic-arm.cc
arm/jump-target-arm.cc
arm/macro-assembler-arm.cc
arm/regexp-macro-assembler-arm.cc
arm/register-allocator-arm.cc
arm/stub-cache-arm.cc
arm/virtual-frame-arm.cc
"""),
'arch:ia32': Split("""
ia32/assembler-ia32.cc
ia32/builtins-ia32.cc
ia32/codegen-ia32.cc
ia32/cpu-ia32.cc
ia32/debug-ia32.cc
ia32/disasm-ia32.cc
ia32/fast-codegen-ia32.cc
ia32/frames-ia32.cc
ia32/ic-ia32.cc
ia32/jump-target-ia32.cc
ia32/macro-assembler-ia32.cc
ia32/regexp-macro-assembler-ia32.cc
ia32/register-allocator-ia32.cc
ia32/stub-cache-ia32.cc
ia32/virtual-frame-ia32.cc
"""),
'arch:x64': Split("""
x64/assembler-x64.cc
x64/builtins-x64.cc
x64/codegen-x64.cc
x64/cpu-x64.cc
x64/debug-x64.cc
x64/disasm-x64.cc
x64/fast-codegen-x64.cc
x64/frames-x64.cc
x64/ic-x64.cc
x64/jump-target-x64.cc
x64/macro-assembler-x64.cc
x64/regexp-macro-assembler-x64.cc
x64/register-allocator-x64.cc
x64/stub-cache-x64.cc
x64/virtual-frame-x64.cc
"""),
'simulator:arm': ['arm/simulator-arm.cc'], 'simulator:arm': ['arm/simulator-arm.cc'],
'os:freebsd': ['platform-freebsd.cc', 'platform-posix.cc'], 'os:freebsd': ['platform-freebsd.cc', 'platform-posix.cc'],
'os:linux': ['platform-linux.cc', 'platform-posix.cc'], 'os:linux': ['platform-linux.cc', 'platform-posix.cc'],

60
deps/v8/src/api.cc

@ -2290,7 +2290,7 @@ void v8::Object::SetIndexedPropertiesToPixelData(uint8_t* data, int length) {
ON_BAILOUT("v8::SetElementsToPixelData()", return); ON_BAILOUT("v8::SetElementsToPixelData()", return);
ENTER_V8; ENTER_V8;
HandleScope scope; HandleScope scope;
if (!ApiCheck(i::Smi::IsValid(length), if (!ApiCheck(length <= i::PixelArray::kMaxLength,
"v8::Object::SetIndexedPropertiesToPixelData()", "v8::Object::SetIndexedPropertiesToPixelData()",
"length exceeds max acceptable value")) { "length exceeds max acceptable value")) {
return; return;
@ -2578,7 +2578,16 @@ void v8::Object::SetInternalField(int index, v8::Handle<Value> value) {
void v8::Object::SetPointerInInternalField(int index, void* value) { void v8::Object::SetPointerInInternalField(int index, void* value) {
SetInternalField(index, External::Wrap(value)); i::Object* as_object = reinterpret_cast<i::Object*>(value);
if (as_object->IsSmi()) {
Utils::OpenHandle(this)->SetInternalField(index, as_object);
return;
}
HandleScope scope;
i::Handle<i::Proxy> proxy =
i::Factory::NewProxy(reinterpret_cast<i::Address>(value), i::TENURED);
if (!proxy.is_null())
Utils::OpenHandle(this)->SetInternalField(index, *proxy);
} }
@ -2760,7 +2769,9 @@ v8::Local<v8::Context> Context::GetEntered() {
v8::Local<v8::Context> Context::GetCurrent() { v8::Local<v8::Context> Context::GetCurrent() {
if (IsDeadCheck("v8::Context::GetCurrent()")) return Local<Context>(); if (IsDeadCheck("v8::Context::GetCurrent()")) return Local<Context>();
i::Handle<i::Context> context(i::Top::global_context()); i::Handle<i::Object> current = i::Top::global_context();
if (current.is_null()) return Local<Context>();
i::Handle<i::Context> context = i::Handle<i::Context>::cast(current);
return Utils::ToLocal(context); return Utils::ToLocal(context);
} }
@ -2837,36 +2848,39 @@ static void* ExternalValueImpl(i::Handle<i::Object> obj) {
} }
static const intptr_t kAlignedPointerMask = 3;
Local<Value> v8::External::Wrap(void* data) { Local<Value> v8::External::Wrap(void* data) {
STATIC_ASSERT(sizeof(data) == sizeof(i::Address)); STATIC_ASSERT(sizeof(data) == sizeof(i::Address));
LOG_API("External::Wrap"); LOG_API("External::Wrap");
EnsureInitialized("v8::External::Wrap()"); EnsureInitialized("v8::External::Wrap()");
ENTER_V8; ENTER_V8;
if ((reinterpret_cast<intptr_t>(data) & kAlignedPointerMask) == 0) { i::Object* as_object = reinterpret_cast<i::Object*>(data);
uintptr_t data_ptr = reinterpret_cast<uintptr_t>(data); if (as_object->IsSmi()) {
intptr_t data_value = return Utils::ToLocal(i::Handle<i::Object>(as_object));
static_cast<intptr_t>(data_ptr >> i::Internals::kAlignedPointerShift);
STATIC_ASSERT(sizeof(data_ptr) == sizeof(data_value));
if (i::Smi::IsIntptrValid(data_value)) {
i::Handle<i::Object> obj(i::Smi::FromIntptr(data_value));
return Utils::ToLocal(obj);
}
} }
return ExternalNewImpl(data); return ExternalNewImpl(data);
} }
void* v8::Object::SlowGetPointerFromInternalField(int index) {
i::Handle<i::JSObject> obj = Utils::OpenHandle(this);
i::Object* value = obj->GetInternalField(index);
if (value->IsSmi()) {
return value;
} else if (value->IsProxy()) {
return reinterpret_cast<void*>(i::Proxy::cast(value)->proxy());
} else {
return NULL;
}
}
void* v8::External::FullUnwrap(v8::Handle<v8::Value> wrapper) { void* v8::External::FullUnwrap(v8::Handle<v8::Value> wrapper) {
if (IsDeadCheck("v8::External::Unwrap()")) return 0; if (IsDeadCheck("v8::External::Unwrap()")) return 0;
i::Handle<i::Object> obj = Utils::OpenHandle(*wrapper); i::Handle<i::Object> obj = Utils::OpenHandle(*wrapper);
void* result; void* result;
if (obj->IsSmi()) { if (obj->IsSmi()) {
// The external value was an aligned pointer. // The external value was an aligned pointer.
uintptr_t value = static_cast<uintptr_t>( result = *obj;
i::Smi::cast(*obj)->value()) << i::Internals::kAlignedPointerShift;
result = reinterpret_cast<void*>(value);
} else if (obj->IsProxy()) { } else if (obj->IsProxy()) {
result = ExternalValueImpl(obj); result = ExternalValueImpl(obj);
} else { } else {
@ -2912,6 +2926,18 @@ Local<String> v8::String::New(const char* data, int length) {
} }
Local<String> v8::String::Concat(Handle<String> left, Handle<String> right) {
EnsureInitialized("v8::String::New()");
LOG_API("String::New(char)");
ENTER_V8;
i::Handle<i::String> left_string = Utils::OpenHandle(*left);
i::Handle<i::String> right_string = Utils::OpenHandle(*right);
i::Handle<i::String> result = i::Factory::NewConsString(left_string,
right_string);
return Utils::ToLocal(result);
}
Local<String> v8::String::NewUndetectable(const char* data, int length) { Local<String> v8::String::NewUndetectable(const char* data, int length) {
EnsureInitialized("v8::String::NewUndetectable()"); EnsureInitialized("v8::String::NewUndetectable()");
LOG_API("String::NewUndetectable(char)"); LOG_API("String::NewUndetectable(char)");

8
deps/v8/src/arm/assembler-arm-inl.h

@ -110,7 +110,7 @@ Address* RelocInfo::target_reference_address() {
Address RelocInfo::call_address() { Address RelocInfo::call_address() {
ASSERT(IsCallInstruction()); ASSERT(IsPatchedReturnSequence());
// The 2 instructions offset assumes patched return sequence. // The 2 instructions offset assumes patched return sequence.
ASSERT(IsJSReturn(rmode())); ASSERT(IsJSReturn(rmode()));
return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize); return Memory::Address_at(pc_ + 2 * Assembler::kInstrSize);
@ -118,7 +118,7 @@ Address RelocInfo::call_address() {
void RelocInfo::set_call_address(Address target) { void RelocInfo::set_call_address(Address target) {
ASSERT(IsCallInstruction()); ASSERT(IsPatchedReturnSequence());
// The 2 instructions offset assumes patched return sequence. // The 2 instructions offset assumes patched return sequence.
ASSERT(IsJSReturn(rmode())); ASSERT(IsJSReturn(rmode()));
Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target; Memory::Address_at(pc_ + 2 * Assembler::kInstrSize) = target;
@ -131,7 +131,7 @@ Object* RelocInfo::call_object() {
Object** RelocInfo::call_object_address() { Object** RelocInfo::call_object_address() {
ASSERT(IsCallInstruction()); ASSERT(IsPatchedReturnSequence());
// The 2 instructions offset assumes patched return sequence. // The 2 instructions offset assumes patched return sequence.
ASSERT(IsJSReturn(rmode())); ASSERT(IsJSReturn(rmode()));
return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize); return reinterpret_cast<Object**>(pc_ + 2 * Assembler::kInstrSize);
@ -143,7 +143,7 @@ void RelocInfo::set_call_object(Object* target) {
} }
bool RelocInfo::IsCallInstruction() { bool RelocInfo::IsPatchedReturnSequence() {
// On ARM a "call instruction" is actually two instructions. // On ARM a "call instruction" is actually two instructions.
// mov lr, pc // mov lr, pc
// ldr pc, [pc, #XXX] // ldr pc, [pc, #XXX]

319
deps/v8/src/arm/codegen-arm.cc

@ -1539,191 +1539,200 @@ void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
} }
void CodeGenerator::VisitLoopStatement(LoopStatement* node) { void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) {
#ifdef DEBUG #ifdef DEBUG
int original_height = frame_->height(); int original_height = frame_->height();
#endif #endif
VirtualFrame::SpilledScope spilled_scope; VirtualFrame::SpilledScope spilled_scope;
Comment cmnt(masm_, "[ LoopStatement"); Comment cmnt(masm_, "[ DoWhileStatement");
CodeForStatementPosition(node); CodeForStatementPosition(node);
node->break_target()->set_direction(JumpTarget::FORWARD_ONLY); node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
JumpTarget body(JumpTarget::BIDIRECTIONAL);
// Simple condition analysis. ALWAYS_TRUE and ALWAYS_FALSE represent a
// known result for the test expression, with no side effects. // Label the top of the loop for the backward CFG edge. If the test
enum { ALWAYS_TRUE, ALWAYS_FALSE, DONT_KNOW } info = DONT_KNOW; // is always true we can use the continue target, and if the test is
if (node->cond() == NULL) { // always false there is no need.
ASSERT(node->type() == LoopStatement::FOR_LOOP); ConditionAnalysis info = AnalyzeCondition(node->cond());
info = ALWAYS_TRUE; switch (info) {
} else { case ALWAYS_TRUE:
Literal* lit = node->cond()->AsLiteral(); node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
if (lit != NULL) { node->continue_target()->Bind();
if (lit->IsTrue()) { break;
info = ALWAYS_TRUE; case ALWAYS_FALSE:
} else if (lit->IsFalse()) { node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
info = ALWAYS_FALSE; break;
} case DONT_KNOW:
} node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
body.Bind();
break;
} }
switch (node->type()) { CheckStack(); // TODO(1222600): ignore if body contains calls.
case LoopStatement::DO_LOOP: { VisitAndSpill(node->body());
JumpTarget body(JumpTarget::BIDIRECTIONAL);
// Label the top of the loop for the backward CFG edge. If the test // Compile the test.
// is always true we can use the continue target, and if the test is switch (info) {
// always false there is no need. case ALWAYS_TRUE:
if (info == ALWAYS_TRUE) { // If control can fall off the end of the body, jump back to the
node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL); // top.
if (has_valid_frame()) {
node->continue_target()->Jump();
}
break;
case ALWAYS_FALSE:
// If we have a continue in the body, we only have to bind its
// jump target.
if (node->continue_target()->is_linked()) {
node->continue_target()->Bind(); node->continue_target()->Bind();
} else if (info == ALWAYS_FALSE) {
node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
} else {
ASSERT(info == DONT_KNOW);
node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
body.Bind();
} }
break;
CheckStack(); // TODO(1222600): ignore if body contains calls. case DONT_KNOW:
VisitAndSpill(node->body()); // We have to compile the test expression if it can be reached by
// control flow falling out of the body or via continue.
// Compile the test. if (node->continue_target()->is_linked()) {
if (info == ALWAYS_TRUE) { node->continue_target()->Bind();
if (has_valid_frame()) { }
// If control can fall off the end of the body, jump back to the if (has_valid_frame()) {
// top. LoadConditionAndSpill(node->cond(), NOT_INSIDE_TYPEOF,
node->continue_target()->Jump(); &body, node->break_target(), true);
}
} else if (info == ALWAYS_FALSE) {
// If we have a continue in the body, we only have to bind its jump
// target.
if (node->continue_target()->is_linked()) {
node->continue_target()->Bind();
}
} else {
ASSERT(info == DONT_KNOW);
// We have to compile the test expression if it can be reached by
// control flow falling out of the body or via continue.
if (node->continue_target()->is_linked()) {
node->continue_target()->Bind();
}
if (has_valid_frame()) { if (has_valid_frame()) {
LoadConditionAndSpill(node->cond(), NOT_INSIDE_TYPEOF, // A invalid frame here indicates that control did not
&body, node->break_target(), true); // fall out of the test expression.
if (has_valid_frame()) { Branch(true, &body);
// A invalid frame here indicates that control did not
// fall out of the test expression.
Branch(true, &body);
}
} }
} }
break; break;
} }
case LoopStatement::WHILE_LOOP: { if (node->break_target()->is_linked()) {
// If the test is never true and has no side effects there is no need node->break_target()->Bind();
// to compile the test or body. }
if (info == ALWAYS_FALSE) break; ASSERT(!has_valid_frame() || frame_->height() == original_height);
}
// Label the top of the loop with the continue target for the backward
// CFG edge.
node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
node->continue_target()->Bind();
if (info == DONT_KNOW) { void CodeGenerator::VisitWhileStatement(WhileStatement* node) {
JumpTarget body; #ifdef DEBUG
LoadConditionAndSpill(node->cond(), NOT_INSIDE_TYPEOF, int original_height = frame_->height();
&body, node->break_target(), true); #endif
if (has_valid_frame()) { VirtualFrame::SpilledScope spilled_scope;
// A NULL frame indicates that control did not fall out of the Comment cmnt(masm_, "[ WhileStatement");
// test expression. CodeForStatementPosition(node);
Branch(false, node->break_target());
}
if (has_valid_frame() || body.is_linked()) {
body.Bind();
}
}
if (has_valid_frame()) { // If the test is never true and has no side effects there is no need
CheckStack(); // TODO(1222600): ignore if body contains calls. // to compile the test or body.
VisitAndSpill(node->body()); ConditionAnalysis info = AnalyzeCondition(node->cond());
if (info == ALWAYS_FALSE) return;
// If control flow can fall out of the body, jump back to the top. node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
if (has_valid_frame()) {
node->continue_target()->Jump(); // Label the top of the loop with the continue target for the backward
} // CFG edge.
} node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
break; node->continue_target()->Bind();
if (info == DONT_KNOW) {
JumpTarget body;
LoadConditionAndSpill(node->cond(), NOT_INSIDE_TYPEOF,
&body, node->break_target(), true);
if (has_valid_frame()) {
// A NULL frame indicates that control did not fall out of the
// test expression.
Branch(false, node->break_target());
} }
if (has_valid_frame() || body.is_linked()) {
body.Bind();
}
}
case LoopStatement::FOR_LOOP: { if (has_valid_frame()) {
JumpTarget loop(JumpTarget::BIDIRECTIONAL); CheckStack(); // TODO(1222600): ignore if body contains calls.
VisitAndSpill(node->body());
if (node->init() != NULL) { // If control flow can fall out of the body, jump back to the top.
VisitAndSpill(node->init()); if (has_valid_frame()) {
} node->continue_target()->Jump();
}
}
if (node->break_target()->is_linked()) {
node->break_target()->Bind();
}
ASSERT(!has_valid_frame() || frame_->height() == original_height);
}
// There is no need to compile the test or body.
if (info == ALWAYS_FALSE) break;
// If there is no update statement, label the top of the loop with the void CodeGenerator::VisitForStatement(ForStatement* node) {
// continue target, otherwise with the loop target. #ifdef DEBUG
if (node->next() == NULL) { int original_height = frame_->height();
node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL); #endif
node->continue_target()->Bind(); VirtualFrame::SpilledScope spilled_scope;
} else { Comment cmnt(masm_, "[ ForStatement");
node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY); CodeForStatementPosition(node);
loop.Bind(); if (node->init() != NULL) {
} VisitAndSpill(node->init());
}
// If the test is always true, there is no need to compile it. // If the test is never true there is no need to compile the test or
if (info == DONT_KNOW) { // body.
JumpTarget body; ConditionAnalysis info = AnalyzeCondition(node->cond());
LoadConditionAndSpill(node->cond(), NOT_INSIDE_TYPEOF, if (info == ALWAYS_FALSE) return;
&body, node->break_target(), true);
if (has_valid_frame()) { node->break_target()->set_direction(JumpTarget::FORWARD_ONLY);
Branch(false, node->break_target());
} // If there is no update statement, label the top of the loop with the
if (has_valid_frame() || body.is_linked()) { // continue target, otherwise with the loop target.
body.Bind(); JumpTarget loop(JumpTarget::BIDIRECTIONAL);
} if (node->next() == NULL) {
} node->continue_target()->set_direction(JumpTarget::BIDIRECTIONAL);
node->continue_target()->Bind();
} else {
node->continue_target()->set_direction(JumpTarget::FORWARD_ONLY);
loop.Bind();
}
// If the test is always true, there is no need to compile it.
if (info == DONT_KNOW) {
JumpTarget body;
LoadConditionAndSpill(node->cond(), NOT_INSIDE_TYPEOF,
&body, node->break_target(), true);
if (has_valid_frame()) {
Branch(false, node->break_target());
}
if (has_valid_frame() || body.is_linked()) {
body.Bind();
}
}
if (has_valid_frame()) {
CheckStack(); // TODO(1222600): ignore if body contains calls.
VisitAndSpill(node->body());
if (node->next() == NULL) {
// If there is no update statement and control flow can fall out
// of the loop, jump directly to the continue label.
if (has_valid_frame()) { if (has_valid_frame()) {
CheckStack(); // TODO(1222600): ignore if body contains calls. node->continue_target()->Jump();
VisitAndSpill(node->body()); }
} else {
if (node->next() == NULL) { // If there is an update statement and control flow can reach it
// If there is no update statement and control flow can fall out // via falling out of the body of the loop or continuing, we
// of the loop, jump directly to the continue label. // compile the update statement.
if (has_valid_frame()) { if (node->continue_target()->is_linked()) {
node->continue_target()->Jump(); node->continue_target()->Bind();
} }
} else { if (has_valid_frame()) {
// If there is an update statement and control flow can reach it // Record source position of the statement as this code which is
// via falling out of the body of the loop or continuing, we // after the code for the body actually belongs to the loop
// compile the update statement. // statement and not the body.
if (node->continue_target()->is_linked()) { CodeForStatementPosition(node);
node->continue_target()->Bind(); VisitAndSpill(node->next());
} loop.Jump();
if (has_valid_frame()) {
// Record source position of the statement as this code which is
// after the code for the body actually belongs to the loop
// statement and not the body.
CodeForStatementPosition(node);
VisitAndSpill(node->next());
loop.Jump();
}
}
} }
break;
} }
} }
if (node->break_target()->is_linked()) { if (node->break_target()->is_linked()) {
node->break_target()->Bind(); node->break_target()->Bind();
} }
node->continue_target()->Unuse();
node->break_target()->Unuse();
ASSERT(!has_valid_frame() || frame_->height() == original_height); ASSERT(!has_valid_frame() || frame_->height() == original_height);
} }
@ -1918,12 +1927,12 @@ void CodeGenerator::VisitForInStatement(ForInStatement* node) {
} }
void CodeGenerator::VisitTryCatch(TryCatch* node) { void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
#ifdef DEBUG #ifdef DEBUG
int original_height = frame_->height(); int original_height = frame_->height();
#endif #endif
VirtualFrame::SpilledScope spilled_scope; VirtualFrame::SpilledScope spilled_scope;
Comment cmnt(masm_, "[ TryCatch"); Comment cmnt(masm_, "[ TryCatchStatement");
CodeForStatementPosition(node); CodeForStatementPosition(node);
JumpTarget try_block; JumpTarget try_block;
@ -2043,12 +2052,12 @@ void CodeGenerator::VisitTryCatch(TryCatch* node) {
} }
void CodeGenerator::VisitTryFinally(TryFinally* node) { void CodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* node) {
#ifdef DEBUG #ifdef DEBUG
int original_height = frame_->height(); int original_height = frame_->height();
#endif #endif
VirtualFrame::SpilledScope spilled_scope; VirtualFrame::SpilledScope spilled_scope;
Comment cmnt(masm_, "[ TryFinally"); Comment cmnt(masm_, "[ TryFinallyStatement");
CodeForStatementPosition(node); CodeForStatementPosition(node);
// State: Used to keep track of reason for entering the finally // State: Used to keep track of reason for entering the finally

19
deps/v8/src/arm/codegen-arm.h

@ -147,6 +147,15 @@ class CodeGenerator: public AstVisitor {
Handle<Script> script, Handle<Script> script,
bool is_eval); bool is_eval);
// Printing of AST, etc. as requested by flags.
static void MakeCodePrologue(FunctionLiteral* fun);
// Allocate and install the code.
static Handle<Code> MakeCodeEpilogue(FunctionLiteral* fun,
MacroAssembler* masm,
Code::Flags flags,
Handle<Script> script);
#ifdef ENABLE_LOGGING_AND_PROFILING #ifdef ENABLE_LOGGING_AND_PROFILING
static bool ShouldGenerateLog(Expression* type); static bool ShouldGenerateLog(Expression* type);
#endif #endif
@ -156,6 +165,8 @@ class CodeGenerator: public AstVisitor {
bool is_toplevel, bool is_toplevel,
Handle<Script> script); Handle<Script> script);
static void RecordPositions(MacroAssembler* masm, int pos);
// Accessors // Accessors
MacroAssembler* masm() { return masm_; } MacroAssembler* masm() { return masm_; }
@ -365,6 +376,14 @@ class CodeGenerator: public AstVisitor {
inline void GenerateMathSin(ZoneList<Expression*>* args); inline void GenerateMathSin(ZoneList<Expression*>* args);
inline void GenerateMathCos(ZoneList<Expression*>* args); inline void GenerateMathCos(ZoneList<Expression*>* args);
// Simple condition analysis.
enum ConditionAnalysis {
ALWAYS_TRUE,
ALWAYS_FALSE,
DONT_KNOW
};
ConditionAnalysis AnalyzeCondition(Expression* cond);
// Methods used to indicate which source code is generated for. Source // Methods used to indicate which source code is generated for. Source
// positions are collected by the assembler and emitted with the relocation // positions are collected by the assembler and emitted with the relocation
// information. // information.

2
deps/v8/src/arm/debug-arm.cc

@ -68,7 +68,7 @@ void BreakLocationIterator::ClearDebugBreakAtReturn() {
// A debug break in the exit code is identified by a call. // A debug break in the exit code is identified by a call.
bool Debug::IsDebugBreakAtReturn(RelocInfo* rinfo) { bool Debug::IsDebugBreakAtReturn(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsJSReturn(rinfo->rmode())); ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()));
return rinfo->IsCallInstruction(); return rinfo->IsPatchedReturnSequence();
} }

176
deps/v8/src/arm/fast-codegen-arm.cc

@ -0,0 +1,176 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "codegen-inl.h"
#include "fast-codegen.h"
namespace v8 {
namespace internal {
#define __ ACCESS_MASM(masm_)
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right. The actual
// argument count matches the formal parameter count expected by the
// function.
//
// The live registers are:
// o r1: the JS function object being called (ie, ourselves)
// o cp: our context
// o fp: our caller's frame pointer
// o sp: stack pointer
// o lr: return address
//
// The function builds a JS frame. Please see JavaScriptFrameConstants in
// frames-arm.h for its layout.
void FastCodeGenerator::Generate(FunctionLiteral* fun) {
function_ = fun;
// ARM does NOT call SetFunctionPosition.
__ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
// Adjust fp to point to caller's fp.
__ add(fp, sp, Operand(2 * kPointerSize));
{ Comment cmnt(masm_, "[ Allocate locals");
int locals_count = fun->scope()->num_stack_slots();
if (locals_count > 0) {
__ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
}
if (FLAG_check_stack) {
__ LoadRoot(r2, Heap::kStackLimitRootIndex);
}
for (int i = 0; i < locals_count; i++) {
__ push(ip);
}
}
if (FLAG_check_stack) {
// Put the lr setup instruction in the delay slot. The kInstrSize is
// added to the implicit 8 byte offset that always applies to operations
// with pc and gives a return address 12 bytes down.
Comment cmnt(masm_, "[ Stack check");
__ add(lr, pc, Operand(Assembler::kInstrSize));
__ cmp(sp, Operand(r2));
StackCheckStub stub;
__ mov(pc,
Operand(reinterpret_cast<intptr_t>(stub.GetCode().location()),
RelocInfo::CODE_TARGET),
LeaveCC,
lo);
}
{ Comment cmnt(masm_, "[ Body");
VisitStatements(fun->body());
}
{ Comment cmnt(masm_, "[ return <undefined>;");
// Emit a 'return undefined' in case control fell off the end of the
// body.
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
SetReturnPosition(fun);
__ RecordJSReturn();
__ mov(sp, fp);
__ ldm(ia_w, sp, fp.bit() | lr.bit());
int num_parameters = function_->scope()->num_parameters();
__ add(sp, sp, Operand((num_parameters + 1) * kPointerSize));
__ Jump(lr);
}
}
void FastCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
Comment cmnt(masm_, "[ ExpressionStatement");
SetStatementPosition(stmt);
Visit(stmt->expression());
}
void FastCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
Comment cmnt(masm_, "[ ReturnStatement");
SetStatementPosition(stmt);
Visit(stmt->expression());
__ pop(r0);
__ RecordJSReturn();
__ mov(sp, fp);
__ ldm(ia_w, sp, fp.bit() | lr.bit());
int num_parameters = function_->scope()->num_parameters();
__ add(sp, sp, Operand((num_parameters + 1) * kPointerSize));
__ Jump(lr);
}
void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
Comment cmnt(masm_, "[ VariableProxy");
Expression* rewrite = expr->var()->rewrite();
ASSERT(rewrite != NULL);
Slot* slot = rewrite->AsSlot();
ASSERT(slot != NULL);
{ Comment cmnt(masm_, "[ Slot");
if (expr->location().is_temporary()) {
__ ldr(ip, MemOperand(fp, SlotOffset(slot)));
__ push(ip);
} else {
ASSERT(expr->location().is_nowhere());
}
}
}
void FastCodeGenerator::VisitLiteral(Literal* expr) {
Comment cmnt(masm_, "[ Literal");
if (expr->location().is_temporary()) {
__ mov(ip, Operand(expr->handle()));
__ push(ip);
} else {
ASSERT(expr->location().is_nowhere());
}
}
void FastCodeGenerator::VisitAssignment(Assignment* expr) {
Comment cmnt(masm_, "[ Assignment");
ASSERT(expr->op() == Token::ASSIGN || expr->op() == Token::INIT_VAR);
Visit(expr->value());
Variable* var = expr->target()->AsVariableProxy()->AsVariable();
ASSERT(var != NULL && var->slot() != NULL);
if (expr->location().is_temporary()) {
__ ldr(ip, MemOperand(sp));
} else {
ASSERT(expr->location().is_nowhere());
__ pop(ip);
}
__ str(ip, MemOperand(fp, SlotOffset(var->slot())));
}
} } // namespace v8::internal

1
deps/v8/src/arm/macro-assembler-arm.h

@ -246,7 +246,6 @@ class MacroAssembler: public Assembler {
// Call a code stub. // Call a code stub.
void CallStub(CodeStub* stub, Condition cond = al); void CallStub(CodeStub* stub, Condition cond = al);
void CallJSExitStub(CodeStub* stub);
// Return from a code stub after popping its arguments. // Return from a code stub after popping its arguments.
void StubReturn(int argc); void StubReturn(int argc);

14
deps/v8/src/array.js

@ -1058,6 +1058,10 @@ function ArrayReduceRight(callback, current) {
return current; return current;
} }
// ES5, 15.4.3.2
function ArrayIsArray(obj) {
return IS_ARRAY(obj);
}
// ------------------------------------------------------------------- // -------------------------------------------------------------------
@ -1075,6 +1079,11 @@ function SetupArray() {
// object. // object.
%SetProperty($Array.prototype, "constructor", $Array, DONT_ENUM); %SetProperty($Array.prototype, "constructor", $Array, DONT_ENUM);
// Setup non-enumerable functions on the Array object.
InstallFunctions($Array, DONT_ENUM, $Array(
"isArray", ArrayIsArray
));
// Setup non-enumerable functions of the Array.prototype object and // Setup non-enumerable functions of the Array.prototype object and
// set their names. // set their names.
InstallFunctionsOnHiddenPrototype($Array.prototype, DONT_ENUM, $Array( InstallFunctionsOnHiddenPrototype($Array.prototype, DONT_ENUM, $Array(
@ -1098,8 +1107,9 @@ function SetupArray() {
"indexOf", ArrayIndexOf, "indexOf", ArrayIndexOf,
"lastIndexOf", ArrayLastIndexOf, "lastIndexOf", ArrayLastIndexOf,
"reduce", ArrayReduce, "reduce", ArrayReduce,
"reduceRight", ArrayReduceRight)); "reduceRight", ArrayReduceRight
));
// Manipulate the length of some of the functions to meet // Manipulate the length of some of the functions to meet
// expectations set by ECMA-262 or Mozilla. // expectations set by ECMA-262 or Mozilla.
UpdateFunctionLengths({ UpdateFunctionLengths({

3
deps/v8/src/assembler.cc

@ -343,9 +343,6 @@ void RelocIterator::next() {
if (SetMode(RelocInfo::EMBEDDED_OBJECT)) return; if (SetMode(RelocInfo::EMBEDDED_OBJECT)) return;
} else if (tag == kCodeTargetTag) { } else if (tag == kCodeTargetTag) {
ReadTaggedPC(); ReadTaggedPC();
if (*(reinterpret_cast<int*>(rinfo_.pc())) == 0x61) {
tag = 0;
}
if (SetMode(RelocInfo::CODE_TARGET)) return; if (SetMode(RelocInfo::CODE_TARGET)) return;
} else if (tag == kPositionTag) { } else if (tag == kPositionTag) {
ReadTaggedPC(); ReadTaggedPC();

8
deps/v8/src/assembler.h

@ -217,10 +217,10 @@ class RelocInfo BASE_EMBEDDED {
// Patch the code with a call. // Patch the code with a call.
void PatchCodeWithCall(Address target, int guard_bytes); void PatchCodeWithCall(Address target, int guard_bytes);
// Check whether the current instruction is currently a call
// sequence (whether naturally or a return sequence overwritten // Check whether this return sequence has been patched
// to enter the debugger). // with a call to the debugger.
INLINE(bool IsCallInstruction()); INLINE(bool IsPatchedReturnSequence());
#ifdef ENABLE_DISASSEMBLER #ifdef ENABLE_DISASSEMBLER
// Printing // Printing

21
deps/v8/src/ast.cc

@ -91,20 +91,6 @@ void VariableProxy::BindTo(Variable* var) {
} }
#ifdef DEBUG
const char* LoopStatement::OperatorString() const {
switch (type()) {
case DO_LOOP: return "DO";
case FOR_LOOP: return "FOR";
case WHILE_LOOP: return "WHILE";
}
return NULL;
}
#endif // DEBUG
Token::Value Assignment::binary_op() const { Token::Value Assignment::binary_op() const {
switch (op_) { switch (op_) {
case Token::ASSIGN_BIT_OR: return Token::BIT_OR; case Token::ASSIGN_BIT_OR: return Token::BIT_OR;
@ -187,6 +173,13 @@ void TargetCollector::AddTarget(BreakTarget* target) {
// Implementation of AstVisitor // Implementation of AstVisitor
void AstVisitor::VisitDeclarations(ZoneList<Declaration*>* declarations) {
for (int i = 0; i < declarations->length(); i++) {
Visit(declarations->at(i));
}
}
void AstVisitor::VisitStatements(ZoneList<Statement*>* statements) { void AstVisitor::VisitStatements(ZoneList<Statement*>* statements) {
for (int i = 0; i < statements->length(); i++) { for (int i = 0; i < statements->length(); i++) {
Visit(statements->at(i)); Visit(statements->at(i));

97
deps/v8/src/ast.h

@ -28,14 +28,14 @@
#ifndef V8_AST_H_ #ifndef V8_AST_H_
#define V8_AST_H_ #define V8_AST_H_
#include "location.h"
#include "execution.h" #include "execution.h"
#include "factory.h" #include "factory.h"
#include "jsregexp.h"
#include "jump-target.h"
#include "runtime.h" #include "runtime.h"
#include "token.h" #include "token.h"
#include "variables.h" #include "variables.h"
#include "macro-assembler.h"
#include "jsregexp.h"
#include "jump-target.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
@ -64,10 +64,12 @@ namespace internal {
V(WithEnterStatement) \ V(WithEnterStatement) \
V(WithExitStatement) \ V(WithExitStatement) \
V(SwitchStatement) \ V(SwitchStatement) \
V(LoopStatement) \ V(DoWhileStatement) \
V(WhileStatement) \
V(ForStatement) \
V(ForInStatement) \ V(ForInStatement) \
V(TryCatch) \ V(TryCatchStatement) \
V(TryFinally) \ V(TryFinallyStatement) \
V(DebuggerStatement) V(DebuggerStatement)
#define EXPRESSION_NODE_LIST(V) \ #define EXPRESSION_NODE_LIST(V) \
@ -160,6 +162,8 @@ class Statement: public AstNode {
class Expression: public AstNode { class Expression: public AstNode {
public: public:
Expression() : location_(Location::Temporary()) {}
virtual Expression* AsExpression() { return this; } virtual Expression* AsExpression() { return this; }
virtual bool IsValidJSON() { return false; } virtual bool IsValidJSON() { return false; }
@ -173,8 +177,12 @@ class Expression: public AstNode {
// Static type information for this expression. // Static type information for this expression.
SmiAnalysis* type() { return &type_; } SmiAnalysis* type() { return &type_; }
Location location() { return location_; }
void set_location(Location loc) { location_ = loc; }
private: private:
SmiAnalysis type_; SmiAnalysis type_;
Location location_;
}; };
@ -294,13 +302,59 @@ class IterationStatement: public BreakableStatement {
}; };
class LoopStatement: public IterationStatement { class DoWhileStatement: public IterationStatement {
public: public:
enum Type { DO_LOOP, FOR_LOOP, WHILE_LOOP }; explicit DoWhileStatement(ZoneStringList* labels)
: IterationStatement(labels), cond_(NULL) {
}
void Initialize(Expression* cond, Statement* body) {
IterationStatement::Initialize(body);
cond_ = cond;
}
virtual void Accept(AstVisitor* v);
LoopStatement(ZoneStringList* labels, Type type) Expression* cond() const { return cond_; }
private:
Expression* cond_;
};
class WhileStatement: public IterationStatement {
public:
explicit WhileStatement(ZoneStringList* labels)
: IterationStatement(labels),
cond_(NULL),
may_have_function_literal_(true) {
}
void Initialize(Expression* cond, Statement* body) {
IterationStatement::Initialize(body);
cond_ = cond;
}
virtual void Accept(AstVisitor* v);
Expression* cond() const { return cond_; }
bool may_have_function_literal() const {
return may_have_function_literal_;
}
private:
Expression* cond_;
// True if there is a function literal subexpression in the condition.
bool may_have_function_literal_;
friend class AstOptimizer;
};
class ForStatement: public IterationStatement {
public:
explicit ForStatement(ZoneStringList* labels)
: IterationStatement(labels), : IterationStatement(labels),
type_(type),
init_(NULL), init_(NULL),
cond_(NULL), cond_(NULL),
next_(NULL), next_(NULL),
@ -311,8 +365,6 @@ class LoopStatement: public IterationStatement {
Expression* cond, Expression* cond,
Statement* next, Statement* next,
Statement* body) { Statement* body) {
ASSERT(init == NULL || type_ == FOR_LOOP);
ASSERT(next == NULL || type_ == FOR_LOOP);
IterationStatement::Initialize(body); IterationStatement::Initialize(body);
init_ = init; init_ = init;
cond_ = cond; cond_ = cond;
@ -321,7 +373,6 @@ class LoopStatement: public IterationStatement {
virtual void Accept(AstVisitor* v); virtual void Accept(AstVisitor* v);
Type type() const { return type_; }
Statement* init() const { return init_; } Statement* init() const { return init_; }
Expression* cond() const { return cond_; } Expression* cond() const { return cond_; }
Statement* next() const { return next_; } Statement* next() const { return next_; }
@ -329,12 +380,7 @@ class LoopStatement: public IterationStatement {
return may_have_function_literal_; return may_have_function_literal_;
} }
#ifdef DEBUG
const char* OperatorString() const;
#endif
private: private:
Type type_;
Statement* init_; Statement* init_;
Expression* cond_; Expression* cond_;
Statement* next_; Statement* next_;
@ -569,9 +615,11 @@ class TryStatement: public Statement {
}; };
class TryCatch: public TryStatement { class TryCatchStatement: public TryStatement {
public: public:
TryCatch(Block* try_block, Expression* catch_var, Block* catch_block) TryCatchStatement(Block* try_block,
Expression* catch_var,
Block* catch_block)
: TryStatement(try_block), : TryStatement(try_block),
catch_var_(catch_var), catch_var_(catch_var),
catch_block_(catch_block) { catch_block_(catch_block) {
@ -589,9 +637,9 @@ class TryCatch: public TryStatement {
}; };
class TryFinally: public TryStatement { class TryFinallyStatement: public TryStatement {
public: public:
TryFinally(Block* try_block, Block* finally_block) TryFinallyStatement(Block* try_block, Block* finally_block)
: TryStatement(try_block), : TryStatement(try_block),
finally_block_(finally_block) { } finally_block_(finally_block) { }
@ -1212,7 +1260,6 @@ class FunctionLiteral: public Expression {
Scope* scope, Scope* scope,
ZoneList<Statement*>* body, ZoneList<Statement*>* body,
int materialized_literal_count, int materialized_literal_count,
bool contains_array_literal,
int expected_property_count, int expected_property_count,
bool has_only_this_property_assignments, bool has_only_this_property_assignments,
bool has_only_simple_this_property_assignments, bool has_only_simple_this_property_assignments,
@ -1225,7 +1272,6 @@ class FunctionLiteral: public Expression {
scope_(scope), scope_(scope),
body_(body), body_(body),
materialized_literal_count_(materialized_literal_count), materialized_literal_count_(materialized_literal_count),
contains_array_literal_(contains_array_literal),
expected_property_count_(expected_property_count), expected_property_count_(expected_property_count),
has_only_this_property_assignments_(has_only_this_property_assignments), has_only_this_property_assignments_(has_only_this_property_assignments),
has_only_simple_this_property_assignments_( has_only_simple_this_property_assignments_(
@ -1258,7 +1304,6 @@ class FunctionLiteral: public Expression {
bool is_expression() const { return is_expression_; } bool is_expression() const { return is_expression_; }
int materialized_literal_count() { return materialized_literal_count_; } int materialized_literal_count() { return materialized_literal_count_; }
bool contains_array_literal() { return contains_array_literal_; }
int expected_property_count() { return expected_property_count_; } int expected_property_count() { return expected_property_count_; }
bool has_only_this_property_assignments() { bool has_only_this_property_assignments() {
return has_only_this_property_assignments_; return has_only_this_property_assignments_;
@ -1293,7 +1338,6 @@ class FunctionLiteral: public Expression {
Scope* scope_; Scope* scope_;
ZoneList<Statement*>* body_; ZoneList<Statement*>* body_;
int materialized_literal_count_; int materialized_literal_count_;
bool contains_array_literal_;
int expected_property_count_; int expected_property_count_;
bool has_only_this_property_assignments_; bool has_only_this_property_assignments_;
bool has_only_simple_this_property_assignments_; bool has_only_simple_this_property_assignments_;
@ -1690,6 +1734,7 @@ class AstVisitor BASE_EMBEDDED {
void Visit(AstNode* node) { node->Accept(this); } void Visit(AstNode* node) { node->Accept(this); }
// Iteration // Iteration
virtual void VisitDeclarations(ZoneList<Declaration*>* declarations);
virtual void VisitStatements(ZoneList<Statement*>* statements); virtual void VisitStatements(ZoneList<Statement*>* statements);
virtual void VisitExpressions(ZoneList<Expression*>* expressions); virtual void VisitExpressions(ZoneList<Expression*>* expressions);

21
deps/v8/src/checks.h

@ -80,6 +80,27 @@ static inline void CheckEqualsHelper(const char* file, int line,
} }
} }
// Helper function used by the CHECK_EQ function when given int64_t
// arguments. Should not be called directly.
static inline void CheckEqualsHelper(const char* file, int line,
const char* expected_source,
int64_t expected,
const char* value_source,
int64_t value) {
if (expected != value) {
// Print int64_t values in hex, as two int32s,
// to avoid platform-dependencies.
V8_Fatal(file, line,
"CHECK_EQ(%s, %s) failed\n#"
" Expected: 0x%08x%08x\n# Found: 0x%08x%08x",
expected_source, value_source,
static_cast<uint32_t>(expected >> 32),
static_cast<uint32_t>(expected),
static_cast<uint32_t>(value >> 32),
static_cast<uint32_t>(value));
}
}
// Helper function used by the CHECK_NE function when given int // Helper function used by the CHECK_NE function when given int
// arguments. Should not be called directly. // arguments. Should not be called directly.

2
deps/v8/src/code-stubs.cc

@ -132,8 +132,6 @@ const char* CodeStub::MajorName(CodeStub::Major major_key) {
return "SetProperty"; return "SetProperty";
case InvokeBuiltin: case InvokeBuiltin:
return "InvokeBuiltin"; return "InvokeBuiltin";
case JSExit:
return "JSExit";
case ConvertToDouble: case ConvertToDouble:
return "ConvertToDouble"; return "ConvertToDouble";
case WriteInt32ToHeapNumber: case WriteInt32ToHeapNumber:

1
deps/v8/src/code-stubs.h

@ -56,7 +56,6 @@ class CodeStub BASE_EMBEDDED {
GetProperty, // ARM only GetProperty, // ARM only
SetProperty, // ARM only SetProperty, // ARM only
InvokeBuiltin, // ARM only InvokeBuiltin, // ARM only
JSExit, // ARM only
RegExpCEntry, // ARM only RegExpCEntry, // ARM only
NUMBER_OF_IDS NUMBER_OF_IDS
}; };

126
deps/v8/src/codegen.cc

@ -125,102 +125,114 @@ void CodeGenerator::DeleteFrame() {
} }
// Generate the code. Takes a function literal, generates code for it, assemble void CodeGenerator::MakeCodePrologue(FunctionLiteral* fun) {
// all the pieces into a Code object. This function is only to be called by
// the compiler.cc code.
Handle<Code> CodeGenerator::MakeCode(FunctionLiteral* flit,
Handle<Script> script,
bool is_eval) {
#ifdef ENABLE_DISASSEMBLER
bool print_code = Bootstrapper::IsActive()
? FLAG_print_builtin_code
: FLAG_print_code;
#endif
#ifdef DEBUG #ifdef DEBUG
bool print_source = false; bool print_source = false;
bool print_ast = false; bool print_ast = false;
bool print_json_ast = false;
const char* ftype; const char* ftype;
if (Bootstrapper::IsActive()) { if (Bootstrapper::IsActive()) {
print_source = FLAG_print_builtin_source; print_source = FLAG_print_builtin_source;
print_ast = FLAG_print_builtin_ast; print_ast = FLAG_print_builtin_ast;
print_json_ast = FLAG_print_builtin_json_ast;
ftype = "builtin"; ftype = "builtin";
} else { } else {
print_source = FLAG_print_source; print_source = FLAG_print_source;
print_ast = FLAG_print_ast; print_ast = FLAG_print_ast;
print_json_ast = FLAG_print_json_ast;
ftype = "user-defined"; ftype = "user-defined";
} }
if (FLAG_trace_codegen || print_source || print_ast) { if (FLAG_trace_codegen || print_source || print_ast) {
PrintF("*** Generate code for %s function: ", ftype); PrintF("*** Generate code for %s function: ", ftype);
flit->name()->ShortPrint(); fun->name()->ShortPrint();
PrintF(" ***\n"); PrintF(" ***\n");
} }
if (print_source) { if (print_source) {
PrintF("--- Source from AST ---\n%s\n", PrettyPrinter().PrintProgram(flit)); PrintF("--- Source from AST ---\n%s\n", PrettyPrinter().PrintProgram(fun));
} }
if (print_ast) { if (print_ast) {
PrintF("--- AST ---\n%s\n", AstPrinter().PrintProgram(flit)); PrintF("--- AST ---\n%s\n", AstPrinter().PrintProgram(fun));
} }
#endif // DEBUG
// Generate code. if (print_json_ast) {
const int initial_buffer_size = 4 * KB; JsonAstBuilder builder;
CodeGenerator cgen(initial_buffer_size, script, is_eval); PrintF("%s", builder.BuildProgram(fun));
CodeGeneratorScope scope(&cgen);
cgen.GenCode(flit);
if (cgen.HasStackOverflow()) {
ASSERT(!Top::has_pending_exception());
return Handle<Code>::null();
} }
#endif // DEBUG
}
// Allocate and install the code. Time the rest of this function as
// code creation. Handle<Code> CodeGenerator::MakeCodeEpilogue(FunctionLiteral* fun,
HistogramTimerScope timer(&Counters::code_creation); MacroAssembler* masm,
Code::Flags flags,
Handle<Script> script) {
// Allocate and install the code.
CodeDesc desc; CodeDesc desc;
cgen.masm()->GetCode(&desc); masm->GetCode(&desc);
ZoneScopeInfo sinfo(flit->scope()); ZoneScopeInfo sinfo(fun->scope());
InLoopFlag in_loop = (cgen.loop_nesting() != 0) ? IN_LOOP : NOT_IN_LOOP; Handle<Code> code =
Code::Flags flags = Code::ComputeFlags(Code::FUNCTION, in_loop); Factory::NewCode(desc, &sinfo, flags, masm->CodeObject());
Handle<Code> code = Factory::NewCode(desc,
&sinfo,
flags,
cgen.masm()->CodeObject());
// Add unresolved entries in the code to the fixup list. // Add unresolved entries in the code to the fixup list.
Bootstrapper::AddFixup(*code, cgen.masm()); Bootstrapper::AddFixup(*code, masm);
#ifdef ENABLE_DISASSEMBLER #ifdef ENABLE_DISASSEMBLER
bool print_code = Bootstrapper::IsActive()
? FLAG_print_builtin_code
: FLAG_print_code;
if (print_code) { if (print_code) {
// Print the source code if available. // Print the source code if available.
if (!script->IsUndefined() && !script->source()->IsUndefined()) { if (!script->IsUndefined() && !script->source()->IsUndefined()) {
PrintF("--- Raw source ---\n"); PrintF("--- Raw source ---\n");
StringInputBuffer stream(String::cast(script->source())); StringInputBuffer stream(String::cast(script->source()));
stream.Seek(flit->start_position()); stream.Seek(fun->start_position());
// flit->end_position() points to the last character in the stream. We // fun->end_position() points to the last character in the stream. We
// need to compensate by adding one to calculate the length. // need to compensate by adding one to calculate the length.
int source_len = flit->end_position() - flit->start_position() + 1; int source_len = fun->end_position() - fun->start_position() + 1;
for (int i = 0; i < source_len; i++) { for (int i = 0; i < source_len; i++) {
if (stream.has_more()) PrintF("%c", stream.GetNext()); if (stream.has_more()) PrintF("%c", stream.GetNext());
} }
PrintF("\n\n"); PrintF("\n\n");
} }
PrintF("--- Code ---\n"); PrintF("--- Code ---\n");
code->Disassemble(*flit->name()->ToCString()); code->Disassemble(*fun->name()->ToCString());
} }
#endif // ENABLE_DISASSEMBLER #endif // ENABLE_DISASSEMBLER
if (!code.is_null()) { if (!code.is_null()) {
Counters::total_compiled_code_size.Increment(code->instruction_size()); Counters::total_compiled_code_size.Increment(code->instruction_size());
} }
return code; return code;
} }
// Generate the code. Takes a function literal, generates code for it, assemble
// all the pieces into a Code object. This function is only to be called by
// the compiler.cc code.
Handle<Code> CodeGenerator::MakeCode(FunctionLiteral* fun,
Handle<Script> script,
bool is_eval) {
MakeCodePrologue(fun);
// Generate code.
const int kInitialBufferSize = 4 * KB;
CodeGenerator cgen(kInitialBufferSize, script, is_eval);
CodeGeneratorScope scope(&cgen);
cgen.GenCode(fun);
if (cgen.HasStackOverflow()) {
ASSERT(!Top::has_pending_exception());
return Handle<Code>::null();
}
InLoopFlag in_loop = (cgen.loop_nesting() != 0) ? IN_LOOP : NOT_IN_LOOP;
Code::Flags flags = Code::ComputeFlags(Code::FUNCTION, in_loop);
return MakeCodeEpilogue(fun, cgen.masm(), flags, script);
}
#ifdef ENABLE_LOGGING_AND_PROFILING #ifdef ENABLE_LOGGING_AND_PROFILING
bool CodeGenerator::ShouldGenerateLog(Expression* type) { bool CodeGenerator::ShouldGenerateLog(Expression* type) {
@ -314,7 +326,6 @@ Handle<JSFunction> CodeGenerator::BuildBoilerplate(FunctionLiteral* node) {
Handle<JSFunction> function = Handle<JSFunction> function =
Factory::NewFunctionBoilerplate(node->name(), Factory::NewFunctionBoilerplate(node->name(),
node->materialized_literal_count(), node->materialized_literal_count(),
node->contains_array_literal(),
code); code);
CodeGenerator::SetFunctionInfo(function, node, false, script_); CodeGenerator::SetFunctionInfo(function, node, false, script_);
@ -469,26 +480,45 @@ bool CodeGenerator::PatchInlineRuntimeEntry(Handle<String> name,
} }
static inline void RecordPositions(CodeGenerator* cgen, int pos) { // Simple condition analysis. ALWAYS_TRUE and ALWAYS_FALSE represent a
// known result for the test expression, with no side effects.
CodeGenerator::ConditionAnalysis CodeGenerator::AnalyzeCondition(
Expression* cond) {
if (cond == NULL) return ALWAYS_TRUE;
Literal* lit = cond->AsLiteral();
if (lit == NULL) return DONT_KNOW;
if (lit->IsTrue()) {
return ALWAYS_TRUE;
} else if (lit->IsFalse()) {
return ALWAYS_FALSE;
}
return DONT_KNOW;
}
void CodeGenerator::RecordPositions(MacroAssembler* masm, int pos) {
if (pos != RelocInfo::kNoPosition) { if (pos != RelocInfo::kNoPosition) {
cgen->masm()->RecordStatementPosition(pos); masm->RecordStatementPosition(pos);
cgen->masm()->RecordPosition(pos); masm->RecordPosition(pos);
} }
} }
void CodeGenerator::CodeForFunctionPosition(FunctionLiteral* fun) { void CodeGenerator::CodeForFunctionPosition(FunctionLiteral* fun) {
if (FLAG_debug_info) RecordPositions(this, fun->start_position()); if (FLAG_debug_info) RecordPositions(masm(), fun->start_position());
} }
void CodeGenerator::CodeForReturnPosition(FunctionLiteral* fun) { void CodeGenerator::CodeForReturnPosition(FunctionLiteral* fun) {
if (FLAG_debug_info) RecordPositions(this, fun->end_position()); if (FLAG_debug_info) RecordPositions(masm(), fun->end_position());
} }
void CodeGenerator::CodeForStatementPosition(Statement* stmt) { void CodeGenerator::CodeForStatementPosition(Statement* stmt) {
if (FLAG_debug_info) RecordPositions(this, stmt->statement_pos()); if (FLAG_debug_info) RecordPositions(masm(), stmt->statement_pos());
} }

4
deps/v8/src/codegen.h

@ -36,6 +36,8 @@
// The contract to the shared code is that the the CodeGenerator is a subclass // The contract to the shared code is that the the CodeGenerator is a subclass
// of Visitor and that the following methods are available publicly: // of Visitor and that the following methods are available publicly:
// MakeCode // MakeCode
// MakeCodePrologue
// MakeCodeEpilogue
// SetFunctionInfo // SetFunctionInfo
// masm // masm
// frame // frame
@ -46,6 +48,7 @@
// AddDeferred // AddDeferred
// in_spilled_code // in_spilled_code
// set_in_spilled_code // set_in_spilled_code
// RecordPositions
// //
// These methods are either used privately by the shared code or implemented as // These methods are either used privately by the shared code or implemented as
// shared code: // shared code:
@ -61,6 +64,7 @@
// FindInlineRuntimeLUT // FindInlineRuntimeLUT
// CheckForInlineRuntimeCall // CheckForInlineRuntimeCall
// PatchInlineRuntimeEntry // PatchInlineRuntimeEntry
// AnalyzeCondition
// CodeForFunctionPosition // CodeForFunctionPosition
// CodeForReturnPosition // CodeForReturnPosition
// CodeForStatementPosition // CodeForStatementPosition

10
deps/v8/src/compilation-cache.cc

@ -43,20 +43,22 @@ static const int kEvalGlobalGenerations = 1;
static const int kEvalContextualGenerations = 1; static const int kEvalContextualGenerations = 1;
static const int kRegExpGenerations = 1; static const int kRegExpGenerations = 1;
#else #else
// The number of ScriptGenerations is carefully chosen based on histograms.
// See issue 458: http://code.google.com/p/v8/issues/detail?id=458
static const int kScriptGenerations = 5; static const int kScriptGenerations = 5;
static const int kEvalGlobalGenerations = 2; static const int kEvalGlobalGenerations = 2;
static const int kEvalContextualGenerations = 2; static const int kEvalContextualGenerations = 2;
static const int kRegExpGenerations = 2; static const int kRegExpGenerations = 2;
#endif #endif
// Initial of each compilation cache table allocated. // Initial size of each compilation cache table allocated.
static const int kInitialCacheSize = 64; static const int kInitialCacheSize = 64;
// The compilation cache consists of several generational sub-caches which uses // The compilation cache consists of several generational sub-caches which uses
// this class as a base class. A sub-cache contains a compilation cache tables // this class as a base class. A sub-cache contains a compilation cache tables
// for each generation of the sub-cache. As the same source code string has // for each generation of the sub-cache. Since the same source code string has
// different compiled code for scripts and evals. Internally, we use separate // different compiled code for scripts and evals, we use separate sub-caches
// sub-caches to avoid getting the wrong kind of result when looking up. // for different compilation modes, to avoid retrieving the wrong result.
class CompilationSubCache { class CompilationSubCache {
public: public:
explicit CompilationSubCache(int generations): generations_(generations) { explicit CompilationSubCache(int generations): generations_(generations) {

303
deps/v8/src/compiler.cc

@ -32,6 +32,7 @@
#include "compilation-cache.h" #include "compilation-cache.h"
#include "compiler.h" #include "compiler.h"
#include "debug.h" #include "debug.h"
#include "fast-codegen.h"
#include "oprofile-agent.h" #include "oprofile-agent.h"
#include "rewriter.h" #include "rewriter.h"
#include "scopes.h" #include "scopes.h"
@ -40,6 +41,29 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
class CodeGenSelector: public AstVisitor {
public:
enum CodeGenTag { NORMAL, FAST };
CodeGenSelector() : has_supported_syntax_(true) {}
CodeGenTag Select(FunctionLiteral* fun);
private:
void VisitStatements(ZoneList<Statement*>* stmts);
// AST node visit functions.
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
bool has_supported_syntax_;
DISALLOW_COPY_AND_ASSIGN(CodeGenSelector);
};
static Handle<Code> MakeCode(FunctionLiteral* literal, static Handle<Code> MakeCode(FunctionLiteral* literal,
Handle<Script> script, Handle<Script> script,
Handle<Context> context, Handle<Context> context,
@ -79,8 +103,15 @@ static Handle<Code> MakeCode(FunctionLiteral* literal,
} }
// Generate code and return it. // Generate code and return it.
Handle<Code> result = CodeGenerator::MakeCode(literal, script, is_eval); if (FLAG_fast_compiler) {
return result; CodeGenSelector selector;
CodeGenSelector::CodeGenTag code_gen = selector.Select(literal);
if (code_gen == CodeGenSelector::FAST) {
return FastCodeGenerator::MakeCode(literal, script);
}
ASSERT(code_gen == CodeGenSelector::NORMAL);
}
return CodeGenerator::MakeCode(literal, script, is_eval);
} }
@ -197,7 +228,6 @@ static Handle<JSFunction> MakeFunction(bool is_global,
Handle<JSFunction> fun = Handle<JSFunction> fun =
Factory::NewFunctionBoilerplate(lit->name(), Factory::NewFunctionBoilerplate(lit->name(),
lit->materialized_literal_count(), lit->materialized_literal_count(),
lit->contains_array_literal(),
code); code);
ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position()); ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
@ -417,4 +447,271 @@ bool Compiler::CompileLazy(Handle<SharedFunctionInfo> shared,
} }
CodeGenSelector::CodeGenTag CodeGenSelector::Select(FunctionLiteral* fun) {
Scope* scope = fun->scope();
if (!scope->is_global_scope()) return NORMAL;
ASSERT(scope->num_heap_slots() == 0);
ASSERT(scope->arguments() == NULL);
if (!scope->declarations()->is_empty()) return NORMAL;
if (fun->materialized_literal_count() > 0) return NORMAL;
if (fun->body()->is_empty()) return NORMAL;
has_supported_syntax_ = true;
VisitStatements(fun->body());
return has_supported_syntax_ ? FAST : NORMAL;
}
#define BAILOUT(reason) \
do { \
if (FLAG_trace_bailout) { \
PrintF("%s\n", reason); \
} \
has_supported_syntax_ = false; \
return; \
} while (false)
#define CHECK_BAILOUT \
do { \
if (!has_supported_syntax_) return; \
} while (false)
void CodeGenSelector::VisitStatements(ZoneList<Statement*>* stmts) {
for (int i = 0, len = stmts->length(); i < len; i++) {
CHECK_BAILOUT;
Visit(stmts->at(i));
}
}
void CodeGenSelector::VisitDeclaration(Declaration* decl) {
BAILOUT("Declaration");
}
void CodeGenSelector::VisitBlock(Block* stmt) {
BAILOUT("Block");
}
void CodeGenSelector::VisitExpressionStatement(ExpressionStatement* stmt) {
Expression* expr = stmt->expression();
Visit(expr);
CHECK_BAILOUT;
expr->set_location(Location::Nowhere());
}
void CodeGenSelector::VisitEmptyStatement(EmptyStatement* stmt) {
BAILOUT("EmptyStatement");
}
void CodeGenSelector::VisitIfStatement(IfStatement* stmt) {
BAILOUT("IfStatement");
}
void CodeGenSelector::VisitContinueStatement(ContinueStatement* stmt) {
BAILOUT("ContinueStatement");
}
void CodeGenSelector::VisitBreakStatement(BreakStatement* stmt) {
BAILOUT("BreakStatement");
}
void CodeGenSelector::VisitReturnStatement(ReturnStatement* stmt) {
Visit(stmt->expression());
}
void CodeGenSelector::VisitWithEnterStatement(WithEnterStatement* stmt) {
BAILOUT("WithEnterStatement");
}
void CodeGenSelector::VisitWithExitStatement(WithExitStatement* stmt) {
BAILOUT("WithExitStatement");
}
void CodeGenSelector::VisitSwitchStatement(SwitchStatement* stmt) {
BAILOUT("SwitchStatement");
}
void CodeGenSelector::VisitDoWhileStatement(DoWhileStatement* stmt) {
BAILOUT("DoWhileStatement");
}
void CodeGenSelector::VisitWhileStatement(WhileStatement* stmt) {
BAILOUT("WhileStatement");
}
void CodeGenSelector::VisitForStatement(ForStatement* stmt) {
BAILOUT("ForStatement");
}
void CodeGenSelector::VisitForInStatement(ForInStatement* stmt) {
BAILOUT("ForInStatement");
}
void CodeGenSelector::VisitTryCatchStatement(TryCatchStatement* stmt) {
BAILOUT("TryCatchStatement");
}
void CodeGenSelector::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
BAILOUT("TryFinallyStatement");
}
void CodeGenSelector::VisitDebuggerStatement(DebuggerStatement* stmt) {
BAILOUT("DebuggerStatement");
}
void CodeGenSelector::VisitFunctionLiteral(FunctionLiteral* expr) {
BAILOUT("FunctionLiteral");
}
void CodeGenSelector::VisitFunctionBoilerplateLiteral(
FunctionBoilerplateLiteral* expr) {
BAILOUT("FunctionBoilerplateLiteral");
}
void CodeGenSelector::VisitConditional(Conditional* expr) {
BAILOUT("Conditional");
}
void CodeGenSelector::VisitSlot(Slot* expr) {
Slot::Type type = expr->type();
if (type != Slot::PARAMETER && type != Slot::LOCAL) {
BAILOUT("non-parameter/non-local slot reference");
}
}
void CodeGenSelector::VisitVariableProxy(VariableProxy* expr) {
Expression* rewrite = expr->var()->rewrite();
if (rewrite == NULL) BAILOUT("global variable reference");
Visit(rewrite);
}
void CodeGenSelector::VisitLiteral(Literal* expr) {
// All literals are supported.
}
void CodeGenSelector::VisitRegExpLiteral(RegExpLiteral* expr) {
BAILOUT("RegExpLiteral");
}
void CodeGenSelector::VisitObjectLiteral(ObjectLiteral* expr) {
BAILOUT("ObjectLiteral");
}
void CodeGenSelector::VisitArrayLiteral(ArrayLiteral* expr) {
BAILOUT("ArrayLiteral");
}
void CodeGenSelector::VisitCatchExtensionObject(CatchExtensionObject* expr) {
BAILOUT("CatchExtensionObject");
}
void CodeGenSelector::VisitAssignment(Assignment* expr) {
// We support plain non-compound assignments to parameters and
// non-context (stack-allocated) locals.
if (expr->starts_initialization_block()) BAILOUT("initialization block");
Token::Value op = expr->op();
if (op == Token::INIT_CONST) BAILOUT("initialize constant");
if (op != Token::ASSIGN && op != Token::INIT_VAR) {
BAILOUT("compound assignment");
}
Variable* var = expr->target()->AsVariableProxy()->AsVariable();
if (var == NULL || var->is_global()) BAILOUT("non-variable assignment");
ASSERT(var->slot() != NULL);
Slot::Type type = var->slot()->type();
if (type != Slot::PARAMETER && type != Slot::LOCAL) {
BAILOUT("non-parameter/non-local slot assignment");
}
Visit(expr->value());
}
void CodeGenSelector::VisitThrow(Throw* expr) {
BAILOUT("Throw");
}
void CodeGenSelector::VisitProperty(Property* expr) {
BAILOUT("Property");
}
void CodeGenSelector::VisitCall(Call* expr) {
BAILOUT("Call");
}
void CodeGenSelector::VisitCallNew(CallNew* expr) {
BAILOUT("CallNew");
}
void CodeGenSelector::VisitCallRuntime(CallRuntime* expr) {
BAILOUT("CallRuntime");
}
void CodeGenSelector::VisitUnaryOperation(UnaryOperation* expr) {
BAILOUT("UnaryOperation");
}
void CodeGenSelector::VisitCountOperation(CountOperation* expr) {
BAILOUT("CountOperation");
}
void CodeGenSelector::VisitBinaryOperation(BinaryOperation* expr) {
BAILOUT("BinaryOperation");
}
void CodeGenSelector::VisitCompareOperation(CompareOperation* expr) {
BAILOUT("CompareOperation");
}
void CodeGenSelector::VisitThisFunction(ThisFunction* expr) {
BAILOUT("ThisFunction");
}
#undef BAILOUT
#undef CHECK_BAILOUT
} } // namespace v8::internal } } // namespace v8::internal

2
deps/v8/src/d8-posix.cc

@ -311,7 +311,7 @@ static Handle<Value> GetStdout(int child_fd,
int read_timeout, int read_timeout,
int total_timeout) { int total_timeout) {
Handle<String> accumulator = String::Empty(); Handle<String> accumulator = String::Empty();
const char* source = "function(a, b) { return a + b; }"; const char* source = "(function(a, b) { return a + b; })";
Handle<Value> cons_as_obj(Script::Compile(String::New(source))->Run()); Handle<Value> cons_as_obj(Script::Compile(String::New(source))->Run());
Handle<Function> cons_function(Function::Cast(*cons_as_obj)); Handle<Function> cons_function(Function::Cast(*cons_as_obj));
Handle<Value> cons_args[2]; Handle<Value> cons_args[2];

2
deps/v8/src/d8.js

@ -130,7 +130,7 @@ function DebugMessageDetails(message) {
} }
function DebugEventDetails(response) { function DebugEventDetails(response) {
details = {text:'', running:false}; details = {text:'', running:false}
// Get the running state. // Get the running state.
details.running = response.running(); details.running = response.running();

2
deps/v8/src/dateparser-inl.h

@ -28,6 +28,8 @@
#ifndef V8_DATEPARSER_INL_H_ #ifndef V8_DATEPARSER_INL_H_
#define V8_DATEPARSER_INL_H_ #define V8_DATEPARSER_INL_H_
#include "dateparser.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {

31
deps/v8/src/debug-delay.js

@ -795,8 +795,8 @@ ExecutionState.prototype.selectedFrame = function() {
return this.selected_frame; return this.selected_frame;
}; };
ExecutionState.prototype.debugCommandProcessor = function(protocol) { ExecutionState.prototype.debugCommandProcessor = function(opt_is_running) {
return new DebugCommandProcessor(this, protocol); return new DebugCommandProcessor(this, opt_is_running);
}; };
@ -1081,9 +1081,9 @@ function MakeScriptObject_(script, include_source) {
}; };
function DebugCommandProcessor(exec_state) { function DebugCommandProcessor(exec_state, opt_is_running) {
this.exec_state_ = exec_state; this.exec_state_ = exec_state;
this.running_ = false; this.running_ = opt_is_running || false;
}; };
@ -1107,7 +1107,8 @@ function ProtocolMessage(request) {
this.type = 'event'; this.type = 'event';
} }
this.success = true; this.success = true;
this.running = false; // Handler may set this field to control debugger state.
this.running = undefined;
} }
@ -1168,11 +1169,7 @@ ProtocolMessage.prototype.toJSONProtocol = function() {
if (this.message) { if (this.message) {
json.message = this.message; json.message = this.message;
} }
if (this.running) { json.running = this.running;
json.running = true;
} else {
json.running = false;
}
return JSON.stringify(json); return JSON.stringify(json);
} }
@ -1244,6 +1241,8 @@ DebugCommandProcessor.prototype.processDebugJSONRequest = function(json_request)
this.scriptsRequest_(request, response); this.scriptsRequest_(request, response);
} else if (request.command == 'threads') { } else if (request.command == 'threads') {
this.threadsRequest_(request, response); this.threadsRequest_(request, response);
} else if (request.command == 'suspend') {
this.suspendRequest_(request, response);
} else { } else {
throw new Error('Unknown command "' + request.command + '" in request'); throw new Error('Unknown command "' + request.command + '" in request');
} }
@ -1258,7 +1257,11 @@ DebugCommandProcessor.prototype.processDebugJSONRequest = function(json_request)
// Return the response as a JSON encoded string. // Return the response as a JSON encoded string.
try { try {
this.running_ = response.running; // Store the running state. if (!IS_UNDEFINED(response.running)) {
// Response controls running state.
this.running_ = response.running;
}
response.running = this.running_;
return response.toJSONProtocol(); return response.toJSONProtocol();
} catch (e) { } catch (e) {
// Failed to generate response - return generic error. // Failed to generate response - return generic error.
@ -1907,6 +1910,12 @@ DebugCommandProcessor.prototype.threadsRequest_ = function(request, response) {
}; };
DebugCommandProcessor.prototype.suspendRequest_ = function(request, response) {
// TODO(peter.rybin): probably we need some body field here.
response.running = false;
};
// Check whether the previously processed command caused the VM to become // Check whether the previously processed command caused the VM to become
// running. // running.
DebugCommandProcessor.prototype.isRunning = function() { DebugCommandProcessor.prototype.isRunning = function() {

39
deps/v8/src/debug.cc

@ -1614,7 +1614,7 @@ void Debug::SetAfterBreakTarget(JavaScriptFrame* frame) {
if (RelocInfo::IsJSReturn(it.rinfo()->rmode())) { if (RelocInfo::IsJSReturn(it.rinfo()->rmode())) {
at_js_return = (it.rinfo()->pc() == at_js_return = (it.rinfo()->pc() ==
addr - Assembler::kPatchReturnSequenceAddressOffset); addr - Assembler::kPatchReturnSequenceAddressOffset);
break_at_js_return_active = it.rinfo()->IsCallInstruction(); break_at_js_return_active = it.rinfo()->IsPatchedReturnSequence();
} }
it.next(); it.next();
} }
@ -2214,21 +2214,31 @@ void Debugger::NotifyMessageHandler(v8::DebugEvent event,
return; return;
} }
// Get the DebugCommandProcessor.
v8::Local<v8::Object> api_exec_state =
v8::Utils::ToLocal(Handle<JSObject>::cast(exec_state));
v8::Local<v8::String> fun_name =
v8::String::New("debugCommandProcessor");
v8::Local<v8::Function> fun =
v8::Function::Cast(*api_exec_state->Get(fun_name));
v8::TryCatch try_catch; v8::TryCatch try_catch;
v8::Local<v8::Object> cmd_processor =
v8::Object::Cast(*fun->Call(api_exec_state, 0, NULL)); // DebugCommandProcessor goes here.
if (try_catch.HasCaught()) { v8::Local<v8::Object> cmd_processor;
PrintLn(try_catch.Exception()); {
return; v8::Local<v8::Object> api_exec_state =
v8::Utils::ToLocal(Handle<JSObject>::cast(exec_state));
v8::Local<v8::String> fun_name =
v8::String::New("debugCommandProcessor");
v8::Local<v8::Function> fun =
v8::Function::Cast(*api_exec_state->Get(fun_name));
v8::Handle<v8::Boolean> running =
auto_continue ? v8::True() : v8::False();
static const int kArgc = 1;
v8::Handle<Value> argv[kArgc] = { running };
cmd_processor = v8::Object::Cast(*fun->Call(api_exec_state, kArgc, argv));
if (try_catch.HasCaught()) {
PrintLn(try_catch.Exception());
return;
}
} }
bool running = auto_continue;
// Process requests from the debugger. // Process requests from the debugger.
while (true) { while (true) {
// Wait for new command in the queue. // Wait for new command in the queue.
@ -2269,7 +2279,6 @@ void Debugger::NotifyMessageHandler(v8::DebugEvent event,
// Get the response. // Get the response.
v8::Local<v8::String> response; v8::Local<v8::String> response;
bool running = false;
if (!try_catch.HasCaught()) { if (!try_catch.HasCaught()) {
// Get response string. // Get response string.
if (!response_val->IsUndefined()) { if (!response_val->IsUndefined()) {
@ -2312,7 +2321,7 @@ void Debugger::NotifyMessageHandler(v8::DebugEvent event,
// Return from debug event processing if either the VM is put into the // Return from debug event processing if either the VM is put into the
// runnning state (through a continue command) or auto continue is active // runnning state (through a continue command) or auto continue is active
// and there are no more commands queued. // and there are no more commands queued.
if (running || (auto_continue && !HasCommands())) { if (running && !HasCommands()) {
return; return;
} }
} }

10
deps/v8/src/execution.cc

@ -96,8 +96,11 @@ static Handle<Object> Invoke(bool construct,
JSEntryFunction entry = FUNCTION_CAST<JSEntryFunction>(code->entry()); JSEntryFunction entry = FUNCTION_CAST<JSEntryFunction>(code->entry());
// Call the function through the right JS entry stub. // Call the function through the right JS entry stub.
value = CALL_GENERATED_CODE(entry, func->code()->entry(), *func, byte* entry_address= func->code()->entry();
*receiver, argc, args); JSFunction* function = *func;
Object* receiver_pointer = *receiver;
value = CALL_GENERATED_CODE(entry, entry_address, function,
receiver_pointer, argc, args);
} }
#ifdef DEBUG #ifdef DEBUG
@ -383,7 +386,8 @@ void StackGuard::ThreadLocal::Initialize() {
if (initial_climit_ == kIllegalLimit) { if (initial_climit_ == kIllegalLimit) {
// Takes the address of the limit variable in order to find out where // Takes the address of the limit variable in order to find out where
// the top of stack is right now. // the top of stack is right now.
intptr_t limit = reinterpret_cast<intptr_t>(&limit) - kLimitSize; uintptr_t limit = reinterpret_cast<uintptr_t>(&limit) - kLimitSize;
ASSERT(reinterpret_cast<uintptr_t>(&limit) > kLimitSize);
initial_jslimit_ = SimulatorStack::JsLimitFromCLimit(limit); initial_jslimit_ = SimulatorStack::JsLimitFromCLimit(limit);
jslimit_ = SimulatorStack::JsLimitFromCLimit(limit); jslimit_ = SimulatorStack::JsLimitFromCLimit(limit);
initial_climit_ = limit; initial_climit_ = limit;

1
deps/v8/src/execution.h

@ -216,6 +216,7 @@ class StackGuard : public AllStatic {
static void DisableInterrupts(); static void DisableInterrupts();
static const uintptr_t kLimitSize = kPointerSize * 128 * KB; static const uintptr_t kLimitSize = kPointerSize * 128 * KB;
#ifdef V8_TARGET_ARCH_X64 #ifdef V8_TARGET_ARCH_X64
static const uintptr_t kInterruptLimit = V8_UINT64_C(0xfffffffffffffffe); static const uintptr_t kInterruptLimit = V8_UINT64_C(0xfffffffffffffffe);
static const uintptr_t kIllegalLimit = V8_UINT64_C(0xfffffffffffffff8); static const uintptr_t kIllegalLimit = V8_UINT64_C(0xfffffffffffffff8);

3
deps/v8/src/factory.cc

@ -477,7 +477,6 @@ Handle<JSFunction> Factory::NewFunction(Handle<String> name,
Handle<JSFunction> Factory::NewFunctionBoilerplate(Handle<String> name, Handle<JSFunction> Factory::NewFunctionBoilerplate(Handle<String> name,
int number_of_literals, int number_of_literals,
bool contains_array_literal,
Handle<Code> code) { Handle<Code> code) {
Handle<JSFunction> function = NewFunctionBoilerplate(name); Handle<JSFunction> function = NewFunctionBoilerplate(name);
function->set_code(*code); function->set_code(*code);
@ -485,7 +484,7 @@ Handle<JSFunction> Factory::NewFunctionBoilerplate(Handle<String> name,
// If the function contains object, regexp or array literals, // If the function contains object, regexp or array literals,
// allocate extra space for a literals array prefix containing the // allocate extra space for a literals array prefix containing the
// object, regexp and array constructor functions. // object, regexp and array constructor functions.
if (number_of_literals > 0 || contains_array_literal) { if (number_of_literals > 0) {
literals_array_size += JSFunction::kLiteralsPrefixSize; literals_array_size += JSFunction::kLiteralsPrefixSize;
} }
Handle<FixedArray> literals = Handle<FixedArray> literals =

1
deps/v8/src/factory.h

@ -264,7 +264,6 @@ class Factory : public AllStatic {
static Handle<JSFunction> NewFunctionBoilerplate(Handle<String> name, static Handle<JSFunction> NewFunctionBoilerplate(Handle<String> name,
int number_of_literals, int number_of_literals,
bool contains_array_literal,
Handle<Code> code); Handle<Code> code);
static Handle<JSFunction> NewFunctionBoilerplate(Handle<String> name); static Handle<JSFunction> NewFunctionBoilerplate(Handle<String> name);

269
deps/v8/src/fast-codegen.cc

@ -0,0 +1,269 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "codegen-inl.h"
#include "fast-codegen.h"
namespace v8 {
namespace internal {
Handle<Code> FastCodeGenerator::MakeCode(FunctionLiteral* fun,
Handle<Script> script) {
CodeGenerator::MakeCodePrologue(fun);
const int kInitialBufferSize = 4 * KB;
MacroAssembler masm(NULL, kInitialBufferSize);
FastCodeGenerator cgen(&masm);
cgen.Generate(fun);
if (cgen.HasStackOverflow()) {
ASSERT(!Top::has_pending_exception());
return Handle<Code>::null();
}
Code::Flags flags = Code::ComputeFlags(Code::FUNCTION, NOT_IN_LOOP);
return CodeGenerator::MakeCodeEpilogue(fun, &masm, flags, script);
}
int FastCodeGenerator::SlotOffset(Slot* slot) {
// Offset is negative because higher indexes are at lower addresses.
int offset = -slot->index() * kPointerSize;
// Adjust by a (parameter or local) base offset.
switch (slot->type()) {
case Slot::PARAMETER:
offset += (function_->scope()->num_parameters() + 1) * kPointerSize;
break;
case Slot::LOCAL:
offset += JavaScriptFrameConstants::kLocal0Offset;
break;
default:
UNREACHABLE();
}
return offset;
}
void FastCodeGenerator::SetFunctionPosition(FunctionLiteral* fun) {
if (FLAG_debug_info) {
CodeGenerator::RecordPositions(masm_, fun->start_position());
}
}
void FastCodeGenerator::SetReturnPosition(FunctionLiteral* fun) {
if (FLAG_debug_info) {
CodeGenerator::RecordPositions(masm_, fun->end_position());
}
}
void FastCodeGenerator::SetStatementPosition(Statement* stmt) {
if (FLAG_debug_info) {
CodeGenerator::RecordPositions(masm_, stmt->statement_pos());
}
}
void FastCodeGenerator::SetSourcePosition(int pos) {
if (FLAG_debug_info && pos != RelocInfo::kNoPosition) {
masm_->RecordPosition(pos);
}
}
void FastCodeGenerator::VisitDeclaration(Declaration* decl) {
UNREACHABLE();
}
void FastCodeGenerator::VisitBlock(Block* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitIfStatement(IfStatement* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitBreakStatement(BreakStatement* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitWithEnterStatement(WithEnterStatement* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitWithExitStatement(WithExitStatement* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitWhileStatement(WhileStatement* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitForStatement(ForStatement* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) {
UNREACHABLE();
}
void FastCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitFunctionBoilerplateLiteral(
FunctionBoilerplateLiteral* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitConditional(Conditional* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitSlot(Slot* expr) {
// Slots do not appear directly in the AST.
UNREACHABLE();
}
void FastCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitThrow(Throw* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitProperty(Property* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitCall(Call* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitCallNew(CallNew* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitCountOperation(CountOperation* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
UNREACHABLE();
}
void FastCodeGenerator::VisitThisFunction(ThisFunction* expr) {
UNREACHABLE();
}
} } // namespace v8::internal

71
deps/v8/src/fast-codegen.h

@ -0,0 +1,71 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef V8_FAST_CODEGEN_H_
#define V8_FAST_CODEGEN_H_
#include "v8.h"
#include "ast.h"
namespace v8 {
namespace internal {
class FastCodeGenerator: public AstVisitor {
public:
explicit FastCodeGenerator(MacroAssembler* masm)
: masm_(masm), function_(NULL) {
}
static Handle<Code> MakeCode(FunctionLiteral* fun, Handle<Script> script);
void Generate(FunctionLiteral* fun);
private:
int SlotOffset(Slot* slot);
void SetFunctionPosition(FunctionLiteral* fun);
void SetReturnPosition(FunctionLiteral* fun);
void SetStatementPosition(Statement* stmt);
void SetSourcePosition(int pos);
// AST node visit functions.
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
MacroAssembler* masm_;
FunctionLiteral* function_;
DISALLOW_COPY_AND_ASSIGN(FastCodeGenerator);
};
} } // namespace v8::internal
#endif // V8_FAST_CODEGEN_H_

22
deps/v8/src/flag-definitions.h

@ -96,7 +96,7 @@ private:
// //
#define FLAG FLAG_FULL #define FLAG FLAG_FULL
// assembler-ia32.cc / assembler-arm.cc // assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc
DEFINE_bool(debug_code, false, DEFINE_bool(debug_code, false,
"generate extra code (comments, assertions) for debugging") "generate extra code (comments, assertions) for debugging")
DEFINE_bool(emit_branch_hints, false, "emit branch hints") DEFINE_bool(emit_branch_hints, false, "emit branch hints")
@ -104,6 +104,16 @@ DEFINE_bool(push_pop_elimination, true,
"eliminate redundant push/pops in assembly code") "eliminate redundant push/pops in assembly code")
DEFINE_bool(print_push_pop_elimination, false, DEFINE_bool(print_push_pop_elimination, false,
"print elimination of redundant push/pops in assembly code") "print elimination of redundant push/pops in assembly code")
DEFINE_bool(enable_sse2, true,
"enable use of SSE2 instructions if available")
DEFINE_bool(enable_sse3, true,
"enable use of SSE3 instructions if available")
DEFINE_bool(enable_cmov, true,
"enable use of CMOV instruction if available")
DEFINE_bool(enable_rdtsc, true,
"enable use of RDTSC instruction if available")
DEFINE_bool(enable_sahf, true,
"enable use of SAHF instruction if available (X64 only)")
// bootstrapper.cc // bootstrapper.cc
DEFINE_string(expose_natives_as, NULL, "expose natives in global object") DEFINE_string(expose_natives_as, NULL, "expose natives in global object")
@ -132,7 +142,11 @@ DEFINE_bool(debug_info, true, "add debug information to compiled functions")
// compiler.cc // compiler.cc
DEFINE_bool(strict, false, "strict error checking") DEFINE_bool(strict, false, "strict error checking")
DEFINE_int(min_preparse_length, 1024, DEFINE_int(min_preparse_length, 1024,
"Minimum length for automatic enable preparsing") "minimum length for automatic enable preparsing")
DEFINE_bool(fast_compiler, true,
"use the fast-mode compiler for some top-level code")
DEFINE_bool(trace_bailout, false,
"print reasons for failing to use fast compilation")
// compilation-cache.cc // compilation-cache.cc
DEFINE_bool(compilation_cache, true, "enable compilation cache") DEFINE_bool(compilation_cache, true, "enable compilation cache")
@ -263,6 +277,9 @@ DEFINE_bool(print_builtin_source, false,
"pretty print source code for builtins") "pretty print source code for builtins")
DEFINE_bool(print_ast, false, "print source AST") DEFINE_bool(print_ast, false, "print source AST")
DEFINE_bool(print_builtin_ast, false, "print source AST for builtins") DEFINE_bool(print_builtin_ast, false, "print source AST for builtins")
DEFINE_bool(print_json_ast, false, "print source AST as JSON")
DEFINE_bool(print_builtin_json_ast, false,
"print source AST for builtins as JSON")
DEFINE_bool(trace_calls, false, "trace calls") DEFINE_bool(trace_calls, false, "trace calls")
DEFINE_bool(trace_builtin_calls, false, "trace builtins calls") DEFINE_bool(trace_builtin_calls, false, "trace builtins calls")
DEFINE_string(stop_at, "", "function name where to insert a breakpoint") DEFINE_string(stop_at, "", "function name where to insert a breakpoint")
@ -333,6 +350,7 @@ DEFINE_bool(log_gc, false,
DEFINE_bool(log_handles, false, "Log global handle events.") DEFINE_bool(log_handles, false, "Log global handle events.")
DEFINE_bool(log_state_changes, false, "Log state changes.") DEFINE_bool(log_state_changes, false, "Log state changes.")
DEFINE_bool(log_suspect, false, "Log suspect operations.") DEFINE_bool(log_suspect, false, "Log suspect operations.")
DEFINE_bool(log_producers, false, "Log stack traces of JS objects allocations.")
DEFINE_bool(compress_log, false, DEFINE_bool(compress_log, false,
"Compress log to save space (makes log less human-readable).") "Compress log to save space (makes log less human-readable).")
DEFINE_bool(prof, false, DEFINE_bool(prof, false,

10
deps/v8/src/global-handles.cc

@ -264,6 +264,16 @@ void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
} }
void GlobalHandles::IterateWeakRoots(WeakReferenceGuest f,
WeakReferenceCallback callback) {
for (Node* current = head_; current != NULL; current = current->next()) {
if (current->IsWeak() && current->callback() == callback) {
f(current->object_, current->parameter());
}
}
}
void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) { void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) {
for (Node* current = head_; current != NULL; current = current->next()) { for (Node* current = head_; current != NULL; current = current->next()) {
if (current->state_ == Node::WEAK) { if (current->state_ == Node::WEAK) {

6
deps/v8/src/global-handles.h

@ -54,6 +54,8 @@ class ObjectGroup : public Malloced {
}; };
typedef void (*WeakReferenceGuest)(Object* object, void* parameter);
class GlobalHandles : public AllStatic { class GlobalHandles : public AllStatic {
public: public:
// Creates a new global handle that is alive until Destroy is called. // Creates a new global handle that is alive until Destroy is called.
@ -99,6 +101,10 @@ class GlobalHandles : public AllStatic {
// Iterates over all weak roots in heap. // Iterates over all weak roots in heap.
static void IterateWeakRoots(ObjectVisitor* v); static void IterateWeakRoots(ObjectVisitor* v);
// Iterates over weak roots that are bound to a given callback.
static void IterateWeakRoots(WeakReferenceGuest f,
WeakReferenceCallback callback);
// Find all weak handles satisfying the callback predicate, mark // Find all weak handles satisfying the callback predicate, mark
// them as pending. // them as pending.
static void IdentifyWeakHandles(WeakSlotCallback f); static void IdentifyWeakHandles(WeakSlotCallback f);

56
deps/v8/src/heap-profiler.cc

@ -28,6 +28,8 @@
#include "v8.h" #include "v8.h"
#include "heap-profiler.h" #include "heap-profiler.h"
#include "frames-inl.h"
#include "global-handles.h"
#include "string-stream.h" #include "string-stream.h"
namespace v8 { namespace v8 {
@ -327,6 +329,11 @@ void ConstructorHeapProfile::PrintStats() {
} }
static const char* GetConstructorName(const char* name) {
return name[0] != '\0' ? name : "(anonymous)";
}
void JSObjectsCluster::Print(StringStream* accumulator) const { void JSObjectsCluster::Print(StringStream* accumulator) const {
ASSERT(!is_null()); ASSERT(!is_null());
if (constructor_ == FromSpecialCase(ROOTS)) { if (constructor_ == FromSpecialCase(ROOTS)) {
@ -338,7 +345,7 @@ void JSObjectsCluster::Print(StringStream* accumulator) const {
} else { } else {
SmartPointer<char> s_name( SmartPointer<char> s_name(
constructor_->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL)); constructor_->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
accumulator->Add("%s", (*s_name)[0] != '\0' ? *s_name : "(anonymous)"); accumulator->Add("%s", GetConstructorName(*s_name));
if (instance_ != NULL) { if (instance_ != NULL) {
accumulator->Add(":%p", static_cast<void*>(instance_)); accumulator->Add(":%p", static_cast<void*>(instance_));
} }
@ -574,6 +581,23 @@ void HeapProfiler::CollectStats(HeapObject* obj, HistogramInfo* info) {
} }
static void StackWeakReferenceCallback(Persistent<Value> object,
void* trace) {
DeleteArray(static_cast<Address*>(trace));
object.Dispose();
}
static void PrintProducerStackTrace(Object* obj, void* trace) {
if (!obj->IsJSObject()) return;
String* constructor = JSObject::cast(obj)->constructor_name();
SmartPointer<char> s_name(
constructor->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
LOG(HeapSampleJSProducerEvent(GetConstructorName(*s_name),
reinterpret_cast<Address*>(trace)));
}
void HeapProfiler::WriteSample() { void HeapProfiler::WriteSample() {
LOG(HeapSampleBeginEvent("Heap", "allocated")); LOG(HeapSampleBeginEvent("Heap", "allocated"));
LOG(HeapSampleStats( LOG(HeapSampleStats(
@ -616,10 +640,40 @@ void HeapProfiler::WriteSample() {
js_cons_profile.PrintStats(); js_cons_profile.PrintStats();
js_retainer_profile.PrintStats(); js_retainer_profile.PrintStats();
GlobalHandles::IterateWeakRoots(PrintProducerStackTrace,
StackWeakReferenceCallback);
LOG(HeapSampleEndEvent("Heap", "allocated")); LOG(HeapSampleEndEvent("Heap", "allocated"));
} }
bool ProducerHeapProfile::can_log_ = false;
void ProducerHeapProfile::Setup() {
can_log_ = true;
}
void ProducerHeapProfile::RecordJSObjectAllocation(Object* obj) {
if (!can_log_ || !FLAG_log_producers) return;
int framesCount = 0;
for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
++framesCount;
}
if (framesCount == 0) return;
++framesCount; // Reserve place for the terminator item.
Vector<Address> stack(NewArray<Address>(framesCount), framesCount);
int i = 0;
for (JavaScriptFrameIterator it; !it.done(); it.Advance()) {
stack[i++] = it.frame()->pc();
}
stack[i] = NULL;
Handle<Object> handle = GlobalHandles::Create(obj);
GlobalHandles::MakeWeak(handle.location(),
static_cast<void*>(stack.start()),
StackWeakReferenceCallback);
}
#endif // ENABLE_LOGGING_AND_PROFILING #endif // ENABLE_LOGGING_AND_PROFILING

8
deps/v8/src/heap-profiler.h

@ -256,6 +256,14 @@ class RetainerHeapProfile BASE_EMBEDDED {
}; };
class ProducerHeapProfile : public AllStatic {
public:
static void Setup();
static void RecordJSObjectAllocation(Object* obj);
private:
static bool can_log_;
};
#endif // ENABLE_LOGGING_AND_PROFILING #endif // ENABLE_LOGGING_AND_PROFILING
} } // namespace v8::internal } } // namespace v8::internal

9
deps/v8/src/heap.cc

@ -1679,8 +1679,8 @@ Object* Heap::AllocateConsString(String* first, String* second) {
&& second->IsAsciiRepresentation(); && second->IsAsciiRepresentation();
// Make sure that an out of memory exception is thrown if the length // Make sure that an out of memory exception is thrown if the length
// of the new cons string is too large to fit in a Smi. // of the new cons string is too large.
if (length > Smi::kMaxValue || length < -0) { if (length > String::kMaxLength || length < 0) {
Top::context()->mark_out_of_memory(); Top::context()->mark_out_of_memory();
return Failure::OutOfMemoryException(); return Failure::OutOfMemoryException();
} }
@ -2021,6 +2021,7 @@ Object* Heap::Allocate(Map* map, AllocationSpace space) {
TargetSpaceId(map->instance_type())); TargetSpaceId(map->instance_type()));
if (result->IsFailure()) return result; if (result->IsFailure()) return result;
HeapObject::cast(result)->set_map(map); HeapObject::cast(result)->set_map(map);
ProducerHeapProfile::RecordJSObjectAllocation(result);
return result; return result;
} }
@ -2342,6 +2343,7 @@ Object* Heap::CopyJSObject(JSObject* source) {
JSObject::cast(clone)->set_properties(FixedArray::cast(prop)); JSObject::cast(clone)->set_properties(FixedArray::cast(prop));
} }
// Return the new clone. // Return the new clone.
ProducerHeapProfile::RecordJSObjectAllocation(clone);
return clone; return clone;
} }
@ -3308,6 +3310,9 @@ bool Heap::Setup(bool create_heap_objects) {
LOG(IntEvent("heap-capacity", Capacity())); LOG(IntEvent("heap-capacity", Capacity()));
LOG(IntEvent("heap-available", Available())); LOG(IntEvent("heap-available", Available()));
// This should be called only after initial objects have been created.
ProducerHeapProfile::Setup();
return true; return true;
} }

14
deps/v8/src/ia32/assembler-ia32-inl.h

@ -52,7 +52,7 @@ void RelocInfo::apply(intptr_t delta) {
if (rmode_ == RUNTIME_ENTRY || IsCodeTarget(rmode_)) { if (rmode_ == RUNTIME_ENTRY || IsCodeTarget(rmode_)) {
int32_t* p = reinterpret_cast<int32_t*>(pc_); int32_t* p = reinterpret_cast<int32_t*>(pc_);
*p -= delta; // relocate entry *p -= delta; // relocate entry
} else if (rmode_ == JS_RETURN && IsCallInstruction()) { } else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) {
// Special handling of js_return when a break point is set (call // Special handling of js_return when a break point is set (call
// instruction has been inserted). // instruction has been inserted).
int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1); int32_t* p = reinterpret_cast<int32_t*>(pc_ + 1);
@ -114,36 +114,36 @@ Address* RelocInfo::target_reference_address() {
Address RelocInfo::call_address() { Address RelocInfo::call_address() {
ASSERT(IsCallInstruction()); ASSERT(IsPatchedReturnSequence());
return Assembler::target_address_at(pc_ + 1); return Assembler::target_address_at(pc_ + 1);
} }
void RelocInfo::set_call_address(Address target) { void RelocInfo::set_call_address(Address target) {
ASSERT(IsCallInstruction()); ASSERT(IsPatchedReturnSequence());
Assembler::set_target_address_at(pc_ + 1, target); Assembler::set_target_address_at(pc_ + 1, target);
} }
Object* RelocInfo::call_object() { Object* RelocInfo::call_object() {
ASSERT(IsCallInstruction()); ASSERT(IsPatchedReturnSequence());
return *call_object_address(); return *call_object_address();
} }
Object** RelocInfo::call_object_address() { Object** RelocInfo::call_object_address() {
ASSERT(IsCallInstruction()); ASSERT(IsPatchedReturnSequence());
return reinterpret_cast<Object**>(pc_ + 1); return reinterpret_cast<Object**>(pc_ + 1);
} }
void RelocInfo::set_call_object(Object* target) { void RelocInfo::set_call_object(Object* target) {
ASSERT(IsCallInstruction()); ASSERT(IsPatchedReturnSequence());
*call_object_address() = target; *call_object_address() = target;
} }
bool RelocInfo::IsCallInstruction() { bool RelocInfo::IsPatchedReturnSequence() {
return *pc_ == 0xE8; return *pc_ == 0xE8;
} }

13
deps/v8/src/ia32/assembler-ia32.cc

@ -1166,6 +1166,19 @@ void Assembler::shr_cl(Register dst) {
} }
void Assembler::subb(const Operand& op, int8_t imm8) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
if (op.is_reg(eax)) {
EMIT(0x2c);
} else {
EMIT(0x80);
emit_operand(ebp, op); // ebp == 5
}
EMIT(imm8);
}
void Assembler::sub(const Operand& dst, const Immediate& x) { void Assembler::sub(const Operand& dst, const Immediate& x) {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;

5
deps/v8/src/ia32/assembler-ia32.h

@ -367,6 +367,10 @@ class CpuFeatures : public AllStatic {
static void Probe(); static void Probe();
// Check whether a feature is supported by the target CPU. // Check whether a feature is supported by the target CPU.
static bool IsSupported(Feature f) { static bool IsSupported(Feature f) {
if (f == SSE2 && !FLAG_enable_sse2) return false;
if (f == SSE3 && !FLAG_enable_sse3) return false;
if (f == CMOV && !FLAG_enable_cmov) return false;
if (f == RDTSC && !FLAG_enable_rdtsc) return false;
return (supported_ & (static_cast<uint64_t>(1) << f)) != 0; return (supported_ & (static_cast<uint64_t>(1) << f)) != 0;
} }
// Check whether a feature is currently enabled. // Check whether a feature is currently enabled.
@ -590,6 +594,7 @@ class Assembler : public Malloced {
void shr(Register dst); void shr(Register dst);
void shr_cl(Register dst); void shr_cl(Register dst);
void subb(const Operand& dst, int8_t imm8);
void sub(const Operand& dst, const Immediate& x); void sub(const Operand& dst, const Immediate& x);
void sub(Register dst, const Operand& src); void sub(Register dst, const Operand& src);
void sub(const Operand& dst, Register src); void sub(const Operand& dst, Register src);

800
deps/v8/src/ia32/codegen-ia32.cc

File diff suppressed because it is too large

81
deps/v8/src/ia32/codegen-ia32.h

@ -294,6 +294,15 @@ class CodeGenerator: public AstVisitor {
Handle<Script> script, Handle<Script> script,
bool is_eval); bool is_eval);
// Printing of AST, etc. as requested by flags.
static void MakeCodePrologue(FunctionLiteral* fun);
// Allocate and install the code.
static Handle<Code> MakeCodeEpilogue(FunctionLiteral* fun,
MacroAssembler* masm,
Code::Flags flags,
Handle<Script> script);
#ifdef ENABLE_LOGGING_AND_PROFILING #ifdef ENABLE_LOGGING_AND_PROFILING
static bool ShouldGenerateLog(Expression* type); static bool ShouldGenerateLog(Expression* type);
#endif #endif
@ -303,6 +312,8 @@ class CodeGenerator: public AstVisitor {
bool is_toplevel, bool is_toplevel,
Handle<Script> script); Handle<Script> script);
static void RecordPositions(MacroAssembler* masm, int pos);
// Accessors // Accessors
MacroAssembler* masm() { return masm_; } MacroAssembler* masm() { return masm_; }
@ -548,6 +559,14 @@ class CodeGenerator: public AstVisitor {
inline void GenerateMathSin(ZoneList<Expression*>* args); inline void GenerateMathSin(ZoneList<Expression*>* args);
inline void GenerateMathCos(ZoneList<Expression*>* args); inline void GenerateMathCos(ZoneList<Expression*>* args);
// Simple condition analysis.
enum ConditionAnalysis {
ALWAYS_TRUE,
ALWAYS_FALSE,
DONT_KNOW
};
ConditionAnalysis AnalyzeCondition(Expression* cond);
// Methods used to indicate which source code is generated for. Source // Methods used to indicate which source code is generated for. Source
// positions are collected by the assembler and emitted with the relocation // positions are collected by the assembler and emitted with the relocation
// information. // information.
@ -604,47 +623,62 @@ class CodeGenerator: public AstVisitor {
}; };
// Flag that indicates whether or not the code that handles smi arguments // Flag that indicates whether how to generate code for the stub.
// should be placed in the stub, inlined, or omitted entirely.
enum GenericBinaryFlags { enum GenericBinaryFlags {
SMI_CODE_IN_STUB, NO_GENERIC_BINARY_FLAGS = 0,
SMI_CODE_INLINED NO_SMI_CODE_IN_STUB = 1 << 0 // Omit smi code in stub.
}; };
class GenericBinaryOpStub: public CodeStub { class GenericBinaryOpStub: public CodeStub {
public: public:
GenericBinaryOpStub(Token::Value op, GenericBinaryOpStub(Token::Value operation,
OverwriteMode mode, OverwriteMode mode,
GenericBinaryFlags flags) GenericBinaryFlags flags)
: op_(op), mode_(mode), flags_(flags) { : op_(operation),
mode_(mode),
flags_(flags),
args_in_registers_(false),
args_reversed_(false) {
use_sse3_ = CpuFeatures::IsSupported(CpuFeatures::SSE3); use_sse3_ = CpuFeatures::IsSupported(CpuFeatures::SSE3);
ASSERT(OpBits::is_valid(Token::NUM_TOKENS)); ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
} }
void GenerateSmiCode(MacroAssembler* masm, Label* slow); // Generate code to call the stub with the supplied arguments. This will add
// code at the call site to prepare arguments either in registers or on the
// stack together with the actual call.
void GenerateCall(MacroAssembler* masm, Register left, Register right);
void GenerateCall(MacroAssembler* masm, Register left, Smi* right);
void GenerateCall(MacroAssembler* masm, Smi* left, Register right);
private: private:
Token::Value op_; Token::Value op_;
OverwriteMode mode_; OverwriteMode mode_;
GenericBinaryFlags flags_; GenericBinaryFlags flags_;
bool args_in_registers_; // Arguments passed in registers not on the stack.
bool args_reversed_; // Left and right argument are swapped.
bool use_sse3_; bool use_sse3_;
const char* GetName(); const char* GetName();
#ifdef DEBUG #ifdef DEBUG
void Print() { void Print() {
PrintF("GenericBinaryOpStub (op %s), (mode %d, flags %d)\n", PrintF("GenericBinaryOpStub (op %s), "
"(mode %d, flags %d, registers %d, reversed %d)\n",
Token::String(op_), Token::String(op_),
static_cast<int>(mode_), static_cast<int>(mode_),
static_cast<int>(flags_)); static_cast<int>(flags_),
static_cast<int>(args_in_registers_),
static_cast<int>(args_reversed_));
} }
#endif #endif
// Minor key encoding in 16 bits FSOOOOOOOOOOOOMM. // Minor key encoding in 16 bits FRASOOOOOOOOOOMM.
class ModeBits: public BitField<OverwriteMode, 0, 2> {}; class ModeBits: public BitField<OverwriteMode, 0, 2> {};
class OpBits: public BitField<Token::Value, 2, 12> {}; class OpBits: public BitField<Token::Value, 2, 10> {};
class SSE3Bits: public BitField<bool, 14, 1> {}; class SSE3Bits: public BitField<bool, 12, 1> {};
class ArgsInRegistersBits: public BitField<bool, 13, 1> {};
class ArgsReversedBits: public BitField<bool, 14, 1> {};
class FlagBits: public BitField<GenericBinaryFlags, 15, 1> {}; class FlagBits: public BitField<GenericBinaryFlags, 15, 1> {};
Major MajorKey() { return GenericBinaryOp; } Major MajorKey() { return GenericBinaryOp; }
@ -653,9 +687,30 @@ class GenericBinaryOpStub: public CodeStub {
return OpBits::encode(op_) return OpBits::encode(op_)
| ModeBits::encode(mode_) | ModeBits::encode(mode_)
| FlagBits::encode(flags_) | FlagBits::encode(flags_)
| SSE3Bits::encode(use_sse3_); | SSE3Bits::encode(use_sse3_)
| ArgsInRegistersBits::encode(args_in_registers_)
| ArgsReversedBits::encode(args_reversed_);
} }
void Generate(MacroAssembler* masm); void Generate(MacroAssembler* masm);
void GenerateSmiCode(MacroAssembler* masm, Label* slow);
void GenerateLoadArguments(MacroAssembler* masm);
void GenerateReturn(MacroAssembler* masm);
bool ArgsInRegistersSupported() {
return ((op_ == Token::ADD) || (op_ == Token::SUB)
|| (op_ == Token::MUL) || (op_ == Token::DIV))
&& flags_ != NO_SMI_CODE_IN_STUB;
}
bool IsOperationCommutative() {
return (op_ == Token::ADD) || (op_ == Token::MUL);
}
void SetArgsInRegisters() { args_in_registers_ = true; }
void SetArgsReversed() { args_reversed_ = true; }
bool HasSmiCodeInStub() { return (flags_ & NO_SMI_CODE_IN_STUB) == 0; }
bool HasArgumentsInRegisters() { return args_in_registers_; }
bool HasArgumentsReversed() { return args_reversed_; }
}; };

2
deps/v8/src/ia32/debug-ia32.cc

@ -63,7 +63,7 @@ void BreakLocationIterator::ClearDebugBreakAtReturn() {
// having been patched with a call instruction. // having been patched with a call instruction.
bool Debug::IsDebugBreakAtReturn(RelocInfo* rinfo) { bool Debug::IsDebugBreakAtReturn(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsJSReturn(rinfo->rmode())); ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()));
return rinfo->IsCallInstruction(); return rinfo->IsPatchedReturnSequence();
} }

34
deps/v8/src/ia32/disasm-ia32.cc

@ -124,6 +124,14 @@ static const char* set_conditional_mnem[] = {
}; };
static const char* conditional_move_mnem[] = {
/*0*/ "cmovo", "cmovno", "cmovc", "cmovnc",
/*4*/ "cmovz", "cmovnz", "cmovna", "cmova",
/*8*/ "cmovs", "cmovns", "cmovpe", "cmovpo",
/*12*/ "cmovl", "cmovnl", "cmovng", "cmovg"
};
enum InstructionType { enum InstructionType {
NO_INSTR, NO_INSTR,
ZERO_OPERANDS_INSTR, ZERO_OPERANDS_INSTR,
@ -311,6 +319,7 @@ class DisassemblerIA32 {
int JumpConditional(byte* data, const char* comment); int JumpConditional(byte* data, const char* comment);
int JumpConditionalShort(byte* data, const char* comment); int JumpConditionalShort(byte* data, const char* comment);
int SetCC(byte* data); int SetCC(byte* data);
int CMov(byte* data);
int FPUInstruction(byte* data); int FPUInstruction(byte* data);
void AppendToBuffer(const char* format, ...); void AppendToBuffer(const char* format, ...);
@ -614,6 +623,16 @@ int DisassemblerIA32::SetCC(byte* data) {
} }
// Returns number of bytes used, including *data.
int DisassemblerIA32::CMov(byte* data) {
assert(*data == 0x0F);
byte cond = *(data + 1) & 0x0F;
const char* mnem = conditional_move_mnem[cond];
int op_size = PrintOperands(mnem, REG_OPER_OP_ORDER, data + 2);
return 2 + op_size; // includes 0x0F
}
// Returns number of bytes used, including *data. // Returns number of bytes used, including *data.
int DisassemblerIA32::FPUInstruction(byte* data) { int DisassemblerIA32::FPUInstruction(byte* data) {
byte b1 = *data; byte b1 = *data;
@ -861,6 +880,8 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer,
data += PrintOperands(f0mnem, REG_OPER_OP_ORDER, data); data += PrintOperands(f0mnem, REG_OPER_OP_ORDER, data);
} else if ((f0byte & 0xF0) == 0x90) { } else if ((f0byte & 0xF0) == 0x90) {
data += SetCC(data); data += SetCC(data);
} else if ((f0byte & 0xF0) == 0x40) {
data += CMov(data);
} else { } else {
data += 2; data += 2;
if (f0byte == 0xAB || f0byte == 0xA5 || f0byte == 0xAD) { if (f0byte == 0xAB || f0byte == 0xA5 || f0byte == 0xAD) {
@ -956,6 +977,19 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer,
AppendToBuffer("mov_w "); AppendToBuffer("mov_w ");
data += PrintRightOperand(data); data += PrintRightOperand(data);
AppendToBuffer(",%s", NameOfCPURegister(regop)); AppendToBuffer(",%s", NameOfCPURegister(regop));
} else if (*data == 0x0F) {
data++;
if (*data == 0x2F) {
data++;
int mod, regop, rm;
get_modrm(*data, &mod, &regop, &rm);
AppendToBuffer("comisd %s,%s",
NameOfXMMRegister(regop),
NameOfXMMRegister(rm));
data++;
} else {
UnimplementedInstruction();
}
} else { } else {
UnimplementedInstruction(); UnimplementedInstruction();
} }

163
deps/v8/src/ia32/fast-codegen-ia32.cc

@ -0,0 +1,163 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "codegen-inl.h"
#include "fast-codegen.h"
namespace v8 {
namespace internal {
#define __ ACCESS_MASM(masm_)
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right, with the
// return address on top of them. The actual argument count matches the
// formal parameter count expected by the function.
//
// The live registers are:
// o edi: the JS function object being called (ie, ourselves)
// o esi: our context
// o ebp: our caller's frame pointer
// o esp: stack pointer (pointing to return address)
//
// The function builds a JS frame. Please see JavaScriptFrameConstants in
// frames-ia32.h for its layout.
void FastCodeGenerator::Generate(FunctionLiteral* fun) {
function_ = fun;
SetFunctionPosition(fun);
__ push(ebp); // Caller's frame pointer.
__ mov(ebp, esp);
__ push(esi); // Callee's context.
__ push(edi); // Callee's JS Function.
{ Comment cmnt(masm_, "[ Allocate locals");
int locals_count = fun->scope()->num_stack_slots();
for (int i = 0; i < locals_count; i++) {
__ push(Immediate(Factory::undefined_value()));
}
}
{ Comment cmnt(masm_, "[ Stack check");
Label ok;
ExternalReference stack_guard_limit =
ExternalReference::address_of_stack_guard_limit();
__ cmp(esp, Operand::StaticVariable(stack_guard_limit));
__ j(above_equal, &ok, taken);
StackCheckStub stub;
__ CallStub(&stub);
__ bind(&ok);
}
{ Comment cmnt(masm_, "[ Body");
VisitStatements(fun->body());
}
{ Comment cmnt(masm_, "[ return <undefined>;");
// Emit a 'return undefined' in case control fell off the end of the
// body.
__ mov(eax, Factory::undefined_value());
SetReturnPosition(fun);
__ RecordJSReturn();
// Do not use the leave instruction here because it is too short to
// patch with the code required by the debugger.
__ mov(esp, ebp);
__ pop(ebp);
__ ret((fun->scope()->num_parameters() + 1) * kPointerSize);
}
}
void FastCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
Comment cmnt(masm_, "[ ExpressionStatement");
SetStatementPosition(stmt);
Visit(stmt->expression());
}
void FastCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
Comment cmnt(masm_, "[ ReturnStatement");
SetStatementPosition(stmt);
Visit(stmt->expression());
__ pop(eax);
__ RecordJSReturn();
// Do not use the leave instruction here because it is too short to
// patch with the code required by the debugger.
__ mov(esp, ebp);
__ pop(ebp);
__ ret((function_->scope()->num_parameters() + 1) * kPointerSize);
}
void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
Comment cmnt(masm_, "[ VariableProxy");
Expression* rewrite = expr->var()->rewrite();
ASSERT(rewrite != NULL);
Slot* slot = rewrite->AsSlot();
ASSERT(slot != NULL);
{ Comment cmnt(masm_, "[ Slot");
if (expr->location().is_temporary()) {
__ push(Operand(ebp, SlotOffset(slot)));
} else {
ASSERT(expr->location().is_nowhere());
}
}
}
void FastCodeGenerator::VisitLiteral(Literal* expr) {
Comment cmnt(masm_, "[ Literal");
if (expr->location().is_temporary()) {
__ push(Immediate(expr->handle()));
} else {
ASSERT(expr->location().is_nowhere());
}
}
void FastCodeGenerator::VisitAssignment(Assignment* expr) {
Comment cmnt(masm_, "[ Assignment");
ASSERT(expr->op() == Token::ASSIGN || expr->op() == Token::INIT_VAR);
Visit(expr->value());
Variable* var = expr->target()->AsVariableProxy()->AsVariable();
ASSERT(var != NULL && var->slot() != NULL);
if (expr->location().is_temporary()) {
__ mov(eax, Operand(esp, 0));
__ mov(Operand(ebp, SlotOffset(var->slot())), eax);
} else {
ASSERT(expr->location().is_nowhere());
__ pop(Operand(ebp, SlotOffset(var->slot())));
}
}
} } // namespace v8::internal

11
deps/v8/src/ia32/ic-ia32.cc

@ -298,7 +298,6 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ shl(eax, kSmiTagSize); __ shl(eax, kSmiTagSize);
__ ret(0); __ ret(0);
// Slow case: Load name and receiver from stack and jump to runtime. // Slow case: Load name and receiver from stack and jump to runtime.
__ bind(&slow); __ bind(&slow);
__ IncrementCounter(&Counters::keyed_load_generic_slow, 1); __ IncrementCounter(&Counters::keyed_load_generic_slow, 1);
@ -424,14 +423,11 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
__ mov(edx, eax); // Save the value. __ mov(edx, eax); // Save the value.
__ sar(eax, kSmiTagSize); // Untag the value. __ sar(eax, kSmiTagSize); // Untag the value.
{ // Clamp the value to [0..255]. { // Clamp the value to [0..255].
Label done, is_negative; Label done;
__ test(eax, Immediate(0xFFFFFF00)); __ test(eax, Immediate(0xFFFFFF00));
__ j(zero, &done); __ j(zero, &done);
__ j(negative, &is_negative); __ setcc(negative, eax); // 1 if negative, 0 if positive.
__ mov(eax, Immediate(255)); __ dec_b(eax); // 0 if negative, 255 if positive.
__ jmp(&done);
__ bind(&is_negative);
__ xor_(eax, Operand(eax)); // Clear eax.
__ bind(&done); __ bind(&done);
} }
__ mov(ecx, FieldOperand(ecx, PixelArray::kExternalPointerOffset)); __ mov(ecx, FieldOperand(ecx, PixelArray::kExternalPointerOffset));
@ -458,7 +454,6 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
__ sub(Operand(ebx), Immediate(1 << kSmiTagSize)); // decrement ebx again __ sub(Operand(ebx), Immediate(1 << kSmiTagSize)); // decrement ebx again
__ jmp(&fast); __ jmp(&fast);
// Array case: Get the length and the elements array from the JS // Array case: Get the length and the elements array from the JS
// array. Check that the array is in fast mode; if it is the // array. Check that the array is in fast mode; if it is the
// length is always a smi. // length is always a smi.

35
deps/v8/src/ia32/virtual-frame-ia32.cc

@ -161,15 +161,16 @@ void VirtualFrame::SyncRange(int begin, int end) {
// on the stack. // on the stack.
int start = Min(begin, stack_pointer_ + 1); int start = Min(begin, stack_pointer_ + 1);
// If positive we have to adjust the stack pointer. // Emit normal 'push' instructions for elements above stack pointer
int delta = end - stack_pointer_; // and use mov instructions if we are below stack pointer.
if (delta > 0) {
stack_pointer_ = end;
__ sub(Operand(esp), Immediate(delta * kPointerSize));
}
for (int i = start; i <= end; i++) { for (int i = start; i <= end; i++) {
if (!elements_[i].is_synced()) SyncElementBelowStackPointer(i); if (!elements_[i].is_synced()) {
if (i <= stack_pointer_) {
SyncElementBelowStackPointer(i);
} else {
SyncElementByPushing(i);
}
}
} }
} }
@ -454,14 +455,16 @@ void VirtualFrame::Enter() {
Comment cmnt(masm(), "[ Enter JS frame"); Comment cmnt(masm(), "[ Enter JS frame");
#ifdef DEBUG #ifdef DEBUG
// Verify that edi contains a JS function. The following code if (FLAG_debug_code) {
// relies on eax being available for use. // Verify that edi contains a JS function. The following code
__ test(edi, Immediate(kSmiTagMask)); // relies on eax being available for use.
__ Check(not_zero, __ test(edi, Immediate(kSmiTagMask));
"VirtualFrame::Enter - edi is not a function (smi check)."); __ Check(not_zero,
__ CmpObjectType(edi, JS_FUNCTION_TYPE, eax); "VirtualFrame::Enter - edi is not a function (smi check).");
__ Check(equal, __ CmpObjectType(edi, JS_FUNCTION_TYPE, eax);
"VirtualFrame::Enter - edi is not a function (map check)."); __ Check(equal,
"VirtualFrame::Enter - edi is not a function (map check).");
}
#endif #endif
EmitPush(ebp); EmitPush(ebp);

3
deps/v8/src/jsregexp.cc

@ -45,13 +45,10 @@
#ifdef V8_NATIVE_REGEXP #ifdef V8_NATIVE_REGEXP
#if V8_TARGET_ARCH_IA32 #if V8_TARGET_ARCH_IA32
#include "ia32/macro-assembler-ia32.h"
#include "ia32/regexp-macro-assembler-ia32.h" #include "ia32/regexp-macro-assembler-ia32.h"
#elif V8_TARGET_ARCH_X64 #elif V8_TARGET_ARCH_X64
#include "x64/macro-assembler-x64.h"
#include "x64/regexp-macro-assembler-x64.h" #include "x64/regexp-macro-assembler-x64.h"
#elif V8_TARGET_ARCH_ARM #elif V8_TARGET_ARCH_ARM
#include "arm/macro-assembler-arm.h"
#include "arm/regexp-macro-assembler-arm.h" #include "arm/regexp-macro-assembler-arm.h"
#else #else
#error Unsupported target architecture. #error Unsupported target architecture.

2
deps/v8/src/jsregexp.h

@ -28,6 +28,8 @@
#ifndef V8_JSREGEXP_H_ #ifndef V8_JSREGEXP_H_
#define V8_JSREGEXP_H_ #define V8_JSREGEXP_H_
#include "macro-assembler.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {

2
deps/v8/src/jump-target.h

@ -28,6 +28,8 @@
#ifndef V8_JUMP_TARGET_H_ #ifndef V8_JUMP_TARGET_H_
#define V8_JUMP_TARGET_H_ #define V8_JUMP_TARGET_H_
#include "macro-assembler.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {

56
deps/v8/src/location.h

@ -0,0 +1,56 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef V8_LOCATION_H_
#define V8_LOCATION_H_
#include "utils.h"
namespace v8 {
namespace internal {
class Location BASE_EMBEDDED {
public:
static Location Temporary() { return Location(TEMP); }
static Location Nowhere() { return Location(NOWHERE); }
static Location Constant() { return Location(CONSTANT); }
bool is_temporary() { return type_ == TEMP; }
bool is_nowhere() { return type_ == NOWHERE; }
private:
enum Type { TEMP, NOWHERE, CONSTANT };
explicit Location(Type type) : type_(type) {}
Type type_;
};
} } // namespace v8::internal
#endif // V8_LOCATION_H_

15
deps/v8/src/log.cc

@ -934,6 +934,21 @@ void Logger::HeapSampleJSRetainersEvent(
} }
void Logger::HeapSampleJSProducerEvent(const char* constructor,
Address* stack) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!Log::IsEnabled() || !FLAG_log_gc) return;
LogMessageBuilder msg;
msg.Append("heap-js-prod-item,%s", constructor);
while (*stack != NULL) {
msg.Append(",0x%" V8PRIxPTR, *stack++);
}
msg.Append("\n");
msg.WriteToLogFile();
#endif
}
void Logger::DebugTag(const char* call_site_tag) { void Logger::DebugTag(const char* call_site_tag) {
#ifdef ENABLE_LOGGING_AND_PROFILING #ifdef ENABLE_LOGGING_AND_PROFILING
if (!Log::IsEnabled() || !FLAG_log) return; if (!Log::IsEnabled() || !FLAG_log) return;

2
deps/v8/src/log.h

@ -223,6 +223,8 @@ class Logger {
int number, int bytes); int number, int bytes);
static void HeapSampleJSRetainersEvent(const char* constructor, static void HeapSampleJSRetainersEvent(const char* constructor,
const char* event); const char* event);
static void HeapSampleJSProducerEvent(const char* constructor,
Address* stack);
static void HeapSampleStats(const char* space, const char* kind, static void HeapSampleStats(const char* space, const char* kind,
int capacity, int used); int capacity, int used);

4
deps/v8/src/macros.py

@ -118,9 +118,7 @@ macro NUMBER_OF_CAPTURES(array) = ((array)[0]);
# a type error is thrown. # a type error is thrown.
macro DATE_VALUE(arg) = (%_ClassOf(arg) === 'Date' ? %_ValueOf(arg) : ThrowDateTypeError()); macro DATE_VALUE(arg) = (%_ClassOf(arg) === 'Date' ? %_ValueOf(arg) : ThrowDateTypeError());
# Last input and last subject are after the captures so we can omit them on # Last input and last subject of regexp matches.
# results returned from global searches. Beware - these evaluate their
# arguments twice.
macro LAST_SUBJECT(array) = ((array)[1]); macro LAST_SUBJECT(array) = ((array)[1]);
macro LAST_INPUT(array) = ((array)[2]); macro LAST_INPUT(array) = ((array)[2]);

5
deps/v8/src/mark-compact.cc

@ -279,7 +279,7 @@ class MarkingVisitor : public ObjectVisitor {
void VisitDebugTarget(RelocInfo* rinfo) { void VisitDebugTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()) && ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()) &&
rinfo->IsCallInstruction()); rinfo->IsPatchedReturnSequence());
HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address()); HeapObject* code = Code::GetCodeFromTargetAddress(rinfo->call_address());
MarkCompactCollector::MarkObject(code); MarkCompactCollector::MarkObject(code);
} }
@ -1382,7 +1382,8 @@ class UpdatingVisitor: public ObjectVisitor {
} }
void VisitDebugTarget(RelocInfo* rinfo) { void VisitDebugTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()) && rinfo->IsCallInstruction()); ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()) &&
rinfo->IsPatchedReturnSequence());
Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
VisitPointer(&target); VisitPointer(&target);
rinfo->set_call_address( rinfo->set_call_address(

2
deps/v8/src/mirror-delay.js

@ -764,7 +764,7 @@ ObjectMirror.prototype.referencedBy = function(opt_max_objects) {
ObjectMirror.prototype.toText = function() { ObjectMirror.prototype.toText = function() {
var name; var name;
var ctor = this.constructorFunction(); var ctor = this.constructorFunction();
if (ctor.isUndefined()) { if (!ctor.isFunction()) {
name = this.className(); name = this.className();
} else { } else {
name = ctor.name(); name = ctor.name();

1
deps/v8/src/objects-debug.cc

@ -29,7 +29,6 @@
#include "disassembler.h" #include "disassembler.h"
#include "disasm.h" #include "disasm.h"
#include "macro-assembler.h"
#include "jsregexp.h" #include "jsregexp.h"
namespace v8 { namespace v8 {

51
deps/v8/src/objects-inl.h

@ -744,15 +744,17 @@ int Smi::value() {
Smi* Smi::FromInt(int value) { Smi* Smi::FromInt(int value) {
ASSERT(Smi::IsValid(value)); ASSERT(Smi::IsValid(value));
int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
intptr_t tagged_value = intptr_t tagged_value =
(static_cast<intptr_t>(value) << kSmiTagSize) | kSmiTag; (static_cast<intptr_t>(value) << smi_shift_bits) | kSmiTag;
return reinterpret_cast<Smi*>(tagged_value); return reinterpret_cast<Smi*>(tagged_value);
} }
Smi* Smi::FromIntptr(intptr_t value) { Smi* Smi::FromIntptr(intptr_t value) {
ASSERT(Smi::IsValid(value)); ASSERT(Smi::IsValid(value));
return reinterpret_cast<Smi*>((value << kSmiTagSize) | kSmiTag); int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
} }
@ -776,7 +778,7 @@ int Failure::requested() const {
kFailureTypeTagSize + kSpaceTagSize - kObjectAlignmentBits; kFailureTypeTagSize + kSpaceTagSize - kObjectAlignmentBits;
STATIC_ASSERT(kShiftBits >= 0); STATIC_ASSERT(kShiftBits >= 0);
ASSERT(type() == RETRY_AFTER_GC); ASSERT(type() == RETRY_AFTER_GC);
return value() >> kShiftBits; return static_cast<int>(value() >> kShiftBits);
} }
@ -802,29 +804,31 @@ Failure* Failure::OutOfMemoryException() {
} }
int Failure::value() const { intptr_t Failure::value() const {
return static_cast<int>(reinterpret_cast<intptr_t>(this) >> kFailureTagSize); return reinterpret_cast<intptr_t>(this) >> kFailureTagSize;
} }
Failure* Failure::RetryAfterGC(int requested_bytes) { Failure* Failure::RetryAfterGC(int requested_bytes) {
// Assert that the space encoding fits in the three bytes allotted for it. // Assert that the space encoding fits in the three bytes allotted for it.
ASSERT((LAST_SPACE & ~kSpaceTagMask) == 0); ASSERT((LAST_SPACE & ~kSpaceTagMask) == 0);
int requested = requested_bytes >> kObjectAlignmentBits; intptr_t requested = requested_bytes >> kObjectAlignmentBits;
int tag_bits = kSpaceTagSize + kFailureTypeTagSize;
if (((requested << tag_bits) >> tag_bits) != requested) {
// No room for entire requested size in the bits. Round down to
// maximally representable size.
requested = static_cast<intptr_t>(
(~static_cast<uintptr_t>(0)) >> (tag_bits + 1));
}
int value = (requested << kSpaceTagSize) | NEW_SPACE; int value = (requested << kSpaceTagSize) | NEW_SPACE;
ASSERT(value >> kSpaceTagSize == requested);
ASSERT(Smi::IsValid(value));
ASSERT(value == ((value << kFailureTypeTagSize) >> kFailureTypeTagSize));
ASSERT(Smi::IsValid(value << kFailureTypeTagSize));
return Construct(RETRY_AFTER_GC, value); return Construct(RETRY_AFTER_GC, value);
} }
Failure* Failure::Construct(Type type, int value) { Failure* Failure::Construct(Type type, intptr_t value) {
int info = (value << kFailureTypeTagSize) | type; intptr_t info = (static_cast<intptr_t>(value) << kFailureTypeTagSize) | type;
ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info); ASSERT(((info << kFailureTagSize) >> kFailureTagSize) == info);
return reinterpret_cast<Failure*>( return reinterpret_cast<Failure*>((info << kFailureTagSize) | kFailureTag);
(static_cast<intptr_t>(info) << kFailureTagSize) | kFailureTag);
} }
@ -832,6 +836,11 @@ bool Smi::IsValid(intptr_t value) {
#ifdef DEBUG #ifdef DEBUG
bool in_range = (value >= kMinValue) && (value <= kMaxValue); bool in_range = (value >= kMinValue) && (value <= kMaxValue);
#endif #endif
#ifdef V8_TARGET_ARCH_X64
// To be representable as a long smi, the value must be a 32-bit integer.
bool result = (value == static_cast<int32_t>(value));
#else
// To be representable as an tagged small integer, the two // To be representable as an tagged small integer, the two
// most-significant bits of 'value' must be either 00 or 11 due to // most-significant bits of 'value' must be either 00 or 11 due to
// sign-extension. To check this we add 01 to the two // sign-extension. To check this we add 01 to the two
@ -843,20 +852,8 @@ bool Smi::IsValid(intptr_t value) {
// in fact doesn't work correctly with gcc4.1.1 in some cases: The // in fact doesn't work correctly with gcc4.1.1 in some cases: The
// compiler may produce undefined results in case of signed integer // compiler may produce undefined results in case of signed integer
// overflow. The computation must be done w/ unsigned ints. // overflow. The computation must be done w/ unsigned ints.
bool result = bool result = (static_cast<uintptr_t>(value + 0x40000000U) < 0x80000000U);
((static_cast<unsigned int>(value) + 0x40000000U) & 0x80000000U) == 0;
ASSERT(result == in_range);
return result;
}
bool Smi::IsIntptrValid(intptr_t value) {
#ifdef DEBUG
bool in_range = (value >= kMinValue) && (value <= kMaxValue);
#endif #endif
// See Smi::IsValid(int) for description.
bool result =
((static_cast<uintptr_t>(value) + 0x40000000U) < 0x80000000U);
ASSERT(result == in_range); ASSERT(result == in_range);
return result; return result;
} }

17
deps/v8/src/objects.cc

@ -618,12 +618,12 @@ void Smi::SmiPrint(StringStream* accumulator) {
void Failure::FailurePrint(StringStream* accumulator) { void Failure::FailurePrint(StringStream* accumulator) {
accumulator->Add("Failure(%d)", value()); accumulator->Add("Failure(%p)", reinterpret_cast<void*>(value()));
} }
void Failure::FailurePrint() { void Failure::FailurePrint() {
PrintF("Failure(%d)", value()); PrintF("Failure(%p)", reinterpret_cast<void*>(value()));
} }
@ -4983,7 +4983,8 @@ void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) { void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()) && rinfo->IsCallInstruction()); ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()) &&
rinfo->IsPatchedReturnSequence());
Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address()); Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
Object* old_target = target; Object* old_target = target;
VisitPointer(&target); VisitPointer(&target);
@ -5009,7 +5010,7 @@ void Code::CodeIterateBody(ObjectVisitor* v) {
#ifdef ENABLE_DEBUGGER_SUPPORT #ifdef ENABLE_DEBUGGER_SUPPORT
} else if (Debug::has_break_points() && } else if (Debug::has_break_points() &&
RelocInfo::IsJSReturn(rmode) && RelocInfo::IsJSReturn(rmode) &&
it.rinfo()->IsCallInstruction()) { it.rinfo()->IsPatchedReturnSequence()) {
v->VisitDebugTarget(it.rinfo()); v->VisitDebugTarget(it.rinfo());
#endif #endif
} else if (rmode == RelocInfo::RUNTIME_ENTRY) { } else if (rmode == RelocInfo::RUNTIME_ENTRY) {
@ -5047,7 +5048,7 @@ void Code::CopyFrom(const CodeDesc& desc) {
desc.reloc_size); desc.reloc_size);
// unbox handles and relocate // unbox handles and relocate
int delta = instruction_start() - desc.buffer; intptr_t delta = instruction_start() - desc.buffer;
int mode_mask = RelocInfo::kCodeTargetMask | int mode_mask = RelocInfo::kCodeTargetMask |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
RelocInfo::kApplyMask; RelocInfo::kApplyMask;
@ -6562,6 +6563,10 @@ class RegExpKey : public HashTableKey {
: string_(string), : string_(string),
flags_(Smi::FromInt(flags.value())) { } flags_(Smi::FromInt(flags.value())) { }
// Rather than storing the key in the hash table, a pointer to the
// stored value is stored where the key should be. IsMatch then
// compares the search key to the found object, rather than comparing
// a key to a key.
bool IsMatch(Object* obj) { bool IsMatch(Object* obj) {
FixedArray* val = FixedArray::cast(obj); FixedArray* val = FixedArray::cast(obj);
return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex))) return string_->Equals(String::cast(val->get(JSRegExp::kSourceIndex)))
@ -7221,6 +7226,8 @@ Object* CompilationCacheTable::PutRegExp(String* src,
CompilationCacheTable* cache = CompilationCacheTable* cache =
reinterpret_cast<CompilationCacheTable*>(obj); reinterpret_cast<CompilationCacheTable*>(obj);
int entry = cache->FindInsertionEntry(key.Hash()); int entry = cache->FindInsertionEntry(key.Hash());
// We store the value in the key slot, and compare the search key
// to the stored value with a custon IsMatch function during lookups.
cache->set(EntryToIndex(entry), value); cache->set(EntryToIndex(entry), value);
cache->set(EntryToIndex(entry) + 1, value); cache->set(EntryToIndex(entry) + 1, value);
cache->ElementAdded(); cache->ElementAdded();

51
deps/v8/src/objects.h

@ -32,6 +32,9 @@
#include "code-stubs.h" #include "code-stubs.h"
#include "smart-pointer.h" #include "smart-pointer.h"
#include "unicode-inl.h" #include "unicode-inl.h"
#if V8_TARGET_ARCH_ARM
#include "arm/constants-arm.h"
#endif
// //
// All object types in the V8 JavaScript are described in this file. // All object types in the V8 JavaScript are described in this file.
@ -904,10 +907,10 @@ class Object BASE_EMBEDDED {
// Smi represents integer Numbers that can be stored in 31 bits. // Smi represents integer Numbers that can be stored in 31 bits.
// Smis are immediate which means they are NOT allocated in the heap. // Smis are immediate which means they are NOT allocated in the heap.
// Smi stands for small integer.
// The this pointer has the following format: [31 bit signed int] 0 // The this pointer has the following format: [31 bit signed int] 0
// On 64-bit, the top 32 bits of the pointer is allowed to have any // For long smis it has the following format:
// value. // [32 bit signed int] [31 bits zero padding] 0
// Smi stands for small integer.
class Smi: public Object { class Smi: public Object {
public: public:
// Returns the integer value. // Returns the integer value.
@ -921,8 +924,6 @@ class Smi: public Object {
// Returns whether value can be represented in a Smi. // Returns whether value can be represented in a Smi.
static inline bool IsValid(intptr_t value); static inline bool IsValid(intptr_t value);
static inline bool IsIntptrValid(intptr_t);
// Casting. // Casting.
static inline Smi* cast(Object* object); static inline Smi* cast(Object* object);
@ -933,10 +934,8 @@ class Smi: public Object {
void SmiVerify(); void SmiVerify();
#endif #endif
static const int kSmiNumBits = 31; static const int kMinValue = (-1 << (kSmiValueSize - 1));
// Min and max limits for Smi values. static const int kMaxValue = -(kMinValue + 1);
static const int kMinValue = -(1 << (kSmiNumBits - 1));
static const int kMaxValue = (1 << (kSmiNumBits - 1)) - 1;
private: private:
DISALLOW_IMPLICIT_CONSTRUCTORS(Smi); DISALLOW_IMPLICIT_CONSTRUCTORS(Smi);
@ -949,10 +948,10 @@ class Smi: public Object {
// //
// Failures are a single word, encoded as follows: // Failures are a single word, encoded as follows:
// +-------------------------+---+--+--+ // +-------------------------+---+--+--+
// |rrrrrrrrrrrrrrrrrrrrrrrrr|sss|tt|11| // |...rrrrrrrrrrrrrrrrrrrrrr|sss|tt|11|
// +-------------------------+---+--+--+ // +-------------------------+---+--+--+
// 3 7 6 4 32 10 // 7 6 4 32 10
// 1 //
// //
// The low two bits, 0-1, are the failure tag, 11. The next two bits, // The low two bits, 0-1, are the failure tag, 11. The next two bits,
// 2-3, are a failure type tag 'tt' with possible values: // 2-3, are a failure type tag 'tt' with possible values:
@ -1014,8 +1013,8 @@ class Failure: public Object {
#endif #endif
private: private:
inline int value() const; inline intptr_t value() const;
static inline Failure* Construct(Type type, int value = 0); static inline Failure* Construct(Type type, intptr_t value = 0);
DISALLOW_IMPLICIT_CONSTRUCTORS(Failure); DISALLOW_IMPLICIT_CONSTRUCTORS(Failure);
}; };
@ -1291,7 +1290,7 @@ class HeapNumber: public HeapObject {
// is a mixture of sign, exponent and mantissa. Our current platforms are all // is a mixture of sign, exponent and mantissa. Our current platforms are all
// little endian apart from non-EABI arm which is little endian with big // little endian apart from non-EABI arm which is little endian with big
// endian floating point word ordering! // endian floating point word ordering!
#if !defined(V8_HOST_ARCH_ARM) || __ARM_EABI__ #if !defined(V8_HOST_ARCH_ARM) || defined(USE_ARM_EABI)
static const int kMantissaOffset = kValueOffset; static const int kMantissaOffset = kValueOffset;
static const int kExponentOffset = kValueOffset + 4; static const int kExponentOffset = kValueOffset + 4;
#else #else
@ -2036,33 +2035,33 @@ class DescriptorArray: public FixedArray {
// // The Element size indicates number of elements per entry. // // The Element size indicates number of elements per entry.
// static const int kEntrySize = ..; // static const int kEntrySize = ..;
// }; // };
// table. The prefix size indicates an amount of memory in the // The prefix size indicates an amount of memory in the
// beginning of the backing storage that can be used for non-element // beginning of the backing storage that can be used for non-element
// information by subclasses. // information by subclasses.
template<typename Shape, typename Key> template<typename Shape, typename Key>
class HashTable: public FixedArray { class HashTable: public FixedArray {
public: public:
// Returns the number of elements in the dictionary. // Returns the number of elements in the hash table.
int NumberOfElements() { int NumberOfElements() {
return Smi::cast(get(kNumberOfElementsIndex))->value(); return Smi::cast(get(kNumberOfElementsIndex))->value();
} }
// Returns the capacity of the dictionary. // Returns the capacity of the hash table.
int Capacity() { int Capacity() {
return Smi::cast(get(kCapacityIndex))->value(); return Smi::cast(get(kCapacityIndex))->value();
} }
// ElementAdded should be called whenever an element is added to a // ElementAdded should be called whenever an element is added to a
// dictionary. // hash table.
void ElementAdded() { SetNumberOfElements(NumberOfElements() + 1); } void ElementAdded() { SetNumberOfElements(NumberOfElements() + 1); }
// ElementRemoved should be called whenever an element is removed from // ElementRemoved should be called whenever an element is removed from
// a dictionary. // a hash table.
void ElementRemoved() { SetNumberOfElements(NumberOfElements() - 1); } void ElementRemoved() { SetNumberOfElements(NumberOfElements() - 1); }
void ElementsRemoved(int n) { SetNumberOfElements(NumberOfElements() - n); } void ElementsRemoved(int n) { SetNumberOfElements(NumberOfElements() - n); }
// Returns a new array for dictionary usage. Might return Failure. // Returns a new HashTable object. Might return Failure.
static Object* Allocate(int at_least_space_for); static Object* Allocate(int at_least_space_for);
// Returns the key at entry. // Returns the key at entry.
@ -2112,7 +2111,7 @@ class HashTable: public FixedArray {
return (entry * kEntrySize) + kElementsStartIndex; return (entry * kEntrySize) + kElementsStartIndex;
} }
// Update the number of elements in the dictionary. // Update the number of elements in the hash table.
void SetNumberOfElements(int nof) { void SetNumberOfElements(int nof) {
fast_set(this, kNumberOfElementsIndex, Smi::FromInt(nof)); fast_set(this, kNumberOfElementsIndex, Smi::FromInt(nof));
} }
@ -2148,7 +2147,7 @@ class HashTableKey {
virtual uint32_t Hash() = 0; virtual uint32_t Hash() = 0;
// Returns the hash value for object. // Returns the hash value for object.
virtual uint32_t HashForObject(Object* key) = 0; virtual uint32_t HashForObject(Object* key) = 0;
// Returns the key object for storing into the dictionary. // Returns the key object for storing into the hash table.
// If allocations fails a failure object is returned. // If allocations fails a failure object is returned.
virtual Object* AsObject() = 0; virtual Object* AsObject() = 0;
// Required. // Required.
@ -2495,6 +2494,9 @@ class PixelArray: public Array {
void PixelArrayVerify(); void PixelArrayVerify();
#endif // DEBUG #endif // DEBUG
// Maximal acceptable length for a pixel array.
static const int kMaxLength = 0x3fffffff;
// PixelArray headers are not quadword aligned. // PixelArray headers are not quadword aligned.
static const int kExternalPointerOffset = Array::kAlignedSize; static const int kExternalPointerOffset = Array::kAlignedSize;
static const int kHeaderSize = kExternalPointerOffset + kPointerSize; static const int kHeaderSize = kExternalPointerOffset + kPointerSize;
@ -3576,6 +3578,7 @@ class CompilationCacheShape {
static const int kEntrySize = 2; static const int kEntrySize = 2;
}; };
class CompilationCacheTable: public HashTable<CompilationCacheShape, class CompilationCacheTable: public HashTable<CompilationCacheShape,
HashTableKey*> { HashTableKey*> {
public: public:
@ -3849,6 +3852,8 @@ class String: public HeapObject {
static const int kShortLengthShift = kHashShift + kShortStringTag; static const int kShortLengthShift = kHashShift + kShortStringTag;
static const int kMediumLengthShift = kHashShift + kMediumStringTag; static const int kMediumLengthShift = kHashShift + kMediumStringTag;
static const int kLongLengthShift = kHashShift + kLongStringTag; static const int kLongLengthShift = kHashShift + kLongStringTag;
// Maximal string length that can be stored in the hash/length field.
static const int kMaxLength = (1 << (32 - kLongLengthShift)) - 1;
// Limit for truncation in short printing. // Limit for truncation in short printing.
static const int kMaxShortPrintLength = 1024; static const int kMaxShortPrintLength = 1024;

55
deps/v8/src/parser.cc

@ -177,8 +177,8 @@ class Parser {
Statement* ParseWithStatement(ZoneStringList* labels, bool* ok); Statement* ParseWithStatement(ZoneStringList* labels, bool* ok);
CaseClause* ParseCaseClause(bool* default_seen_ptr, bool* ok); CaseClause* ParseCaseClause(bool* default_seen_ptr, bool* ok);
SwitchStatement* ParseSwitchStatement(ZoneStringList* labels, bool* ok); SwitchStatement* ParseSwitchStatement(ZoneStringList* labels, bool* ok);
LoopStatement* ParseDoStatement(ZoneStringList* labels, bool* ok); DoWhileStatement* ParseDoWhileStatement(ZoneStringList* labels, bool* ok);
LoopStatement* ParseWhileStatement(ZoneStringList* labels, bool* ok); WhileStatement* ParseWhileStatement(ZoneStringList* labels, bool* ok);
Statement* ParseForStatement(ZoneStringList* labels, bool* ok); Statement* ParseForStatement(ZoneStringList* labels, bool* ok);
Statement* ParseThrowStatement(bool* ok); Statement* ParseThrowStatement(bool* ok);
Expression* MakeCatchContext(Handle<String> id, VariableProxy* value); Expression* MakeCatchContext(Handle<String> id, VariableProxy* value);
@ -675,9 +675,6 @@ class TemporaryScope BASE_EMBEDDED {
} }
int materialized_literal_count() { return materialized_literal_count_; } int materialized_literal_count() { return materialized_literal_count_; }
void set_contains_array_literal() { contains_array_literal_ = true; }
bool contains_array_literal() { return contains_array_literal_; }
void SetThisPropertyAssignmentInfo( void SetThisPropertyAssignmentInfo(
bool only_this_property_assignments, bool only_this_property_assignments,
bool only_simple_this_property_assignments, bool only_simple_this_property_assignments,
@ -700,17 +697,11 @@ class TemporaryScope BASE_EMBEDDED {
void AddProperty() { expected_property_count_++; } void AddProperty() { expected_property_count_++; }
int expected_property_count() { return expected_property_count_; } int expected_property_count() { return expected_property_count_; }
private: private:
// Captures the number of nodes that need materialization in the // Captures the number of literals that need materialization in the
// function. regexp literals, and boilerplate for object literals. // function. Includes regexp literals, and boilerplate for object
// and array literals.
int materialized_literal_count_; int materialized_literal_count_;
// Captures whether or not the function contains array literals. If
// the function contains array literals, we have to allocate space
// for the array constructor in the literals array of the function.
// This array constructor is used when creating the actual array
// literals.
bool contains_array_literal_;
// Properties count estimation. // Properties count estimation.
int expected_property_count_; int expected_property_count_;
@ -728,7 +719,6 @@ class TemporaryScope BASE_EMBEDDED {
TemporaryScope::TemporaryScope(Parser* parser) TemporaryScope::TemporaryScope(Parser* parser)
: materialized_literal_count_(0), : materialized_literal_count_(0),
contains_array_literal_(false),
expected_property_count_(0), expected_property_count_(0),
only_this_property_assignments_(false), only_this_property_assignments_(false),
only_simple_this_property_assignments_(false), only_simple_this_property_assignments_(false),
@ -1236,7 +1226,6 @@ FunctionLiteral* Parser::ParseProgram(Handle<String> source,
top_scope_, top_scope_,
body.elements(), body.elements(),
temp_scope.materialized_literal_count(), temp_scope.materialized_literal_count(),
temp_scope.contains_array_literal(),
temp_scope.expected_property_count(), temp_scope.expected_property_count(),
temp_scope.only_this_property_assignments(), temp_scope.only_this_property_assignments(),
temp_scope.only_simple_this_property_assignments(), temp_scope.only_simple_this_property_assignments(),
@ -1692,7 +1681,7 @@ Statement* Parser::ParseStatement(ZoneStringList* labels, bool* ok) {
break; break;
case Token::DO: case Token::DO:
stmt = ParseDoStatement(labels, ok); stmt = ParseDoWhileStatement(labels, ok);
break; break;
case Token::WHILE: case Token::WHILE:
@ -1903,7 +1892,7 @@ Statement* Parser::ParseNativeDeclaration(bool* ok) {
const int literals = fun->NumberOfLiterals(); const int literals = fun->NumberOfLiterals();
Handle<Code> code = Handle<Code>(fun->shared()->code()); Handle<Code> code = Handle<Code>(fun->shared()->code());
Handle<JSFunction> boilerplate = Handle<JSFunction> boilerplate =
Factory::NewFunctionBoilerplate(name, literals, false, code); Factory::NewFunctionBoilerplate(name, literals, code);
// Copy the function data to the boilerplate. Used by // Copy the function data to the boilerplate. Used by
// builtins.cc:HandleApiCall to perform argument type checks and to // builtins.cc:HandleApiCall to perform argument type checks and to
@ -2361,7 +2350,7 @@ Block* Parser::WithHelper(Expression* obj,
exit->AddStatement(NEW(WithExitStatement())); exit->AddStatement(NEW(WithExitStatement()));
// Return a try-finally statement. // Return a try-finally statement.
TryFinally* wrapper = NEW(TryFinally(body, exit)); TryFinallyStatement* wrapper = NEW(TryFinallyStatement(body, exit));
wrapper->set_escaping_targets(collector.targets()); wrapper->set_escaping_targets(collector.targets());
result->AddStatement(wrapper); result->AddStatement(wrapper);
} }
@ -2537,7 +2526,8 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
// 'try { try { } catch { } } finally { }' // 'try { try { } catch { } } finally { }'
if (!is_pre_parsing_ && catch_block != NULL && finally_block != NULL) { if (!is_pre_parsing_ && catch_block != NULL && finally_block != NULL) {
TryCatch* statement = NEW(TryCatch(try_block, catch_var, catch_block)); TryCatchStatement* statement =
NEW(TryCatchStatement(try_block, catch_var, catch_block));
statement->set_escaping_targets(collector.targets()); statement->set_escaping_targets(collector.targets());
try_block = NEW(Block(NULL, 1, false)); try_block = NEW(Block(NULL, 1, false));
try_block->AddStatement(statement); try_block->AddStatement(statement);
@ -2548,11 +2538,11 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
if (!is_pre_parsing_) { if (!is_pre_parsing_) {
if (catch_block != NULL) { if (catch_block != NULL) {
ASSERT(finally_block == NULL); ASSERT(finally_block == NULL);
result = NEW(TryCatch(try_block, catch_var, catch_block)); result = NEW(TryCatchStatement(try_block, catch_var, catch_block));
result->set_escaping_targets(collector.targets()); result->set_escaping_targets(collector.targets());
} else { } else {
ASSERT(finally_block != NULL); ASSERT(finally_block != NULL);
result = NEW(TryFinally(try_block, finally_block)); result = NEW(TryFinallyStatement(try_block, finally_block));
// Add the jump targets of the try block and the catch block. // Add the jump targets of the try block and the catch block.
for (int i = 0; i < collector.targets()->length(); i++) { for (int i = 0; i < collector.targets()->length(); i++) {
catch_collector.AddTarget(collector.targets()->at(i)); catch_collector.AddTarget(collector.targets()->at(i));
@ -2565,11 +2555,12 @@ TryStatement* Parser::ParseTryStatement(bool* ok) {
} }
LoopStatement* Parser::ParseDoStatement(ZoneStringList* labels, bool* ok) { DoWhileStatement* Parser::ParseDoWhileStatement(ZoneStringList* labels,
bool* ok) {
// DoStatement :: // DoStatement ::
// 'do' Statement 'while' '(' Expression ')' ';' // 'do' Statement 'while' '(' Expression ')' ';'
LoopStatement* loop = NEW(LoopStatement(labels, LoopStatement::DO_LOOP)); DoWhileStatement* loop = NEW(DoWhileStatement(labels));
Target target(this, loop); Target target(this, loop);
Expect(Token::DO, CHECK_OK); Expect(Token::DO, CHECK_OK);
@ -2585,16 +2576,16 @@ LoopStatement* Parser::ParseDoStatement(ZoneStringList* labels, bool* ok) {
// ExpectSemicolon() functionality here. // ExpectSemicolon() functionality here.
if (peek() == Token::SEMICOLON) Consume(Token::SEMICOLON); if (peek() == Token::SEMICOLON) Consume(Token::SEMICOLON);
if (loop) loop->Initialize(NULL, cond, NULL, body); if (loop != NULL) loop->Initialize(cond, body);
return loop; return loop;
} }
LoopStatement* Parser::ParseWhileStatement(ZoneStringList* labels, bool* ok) { WhileStatement* Parser::ParseWhileStatement(ZoneStringList* labels, bool* ok) {
// WhileStatement :: // WhileStatement ::
// 'while' '(' Expression ')' Statement // 'while' '(' Expression ')' Statement
LoopStatement* loop = NEW(LoopStatement(labels, LoopStatement::WHILE_LOOP)); WhileStatement* loop = NEW(WhileStatement(labels));
Target target(this, loop); Target target(this, loop);
Expect(Token::WHILE, CHECK_OK); Expect(Token::WHILE, CHECK_OK);
@ -2603,7 +2594,7 @@ LoopStatement* Parser::ParseWhileStatement(ZoneStringList* labels, bool* ok) {
Expect(Token::RPAREN, CHECK_OK); Expect(Token::RPAREN, CHECK_OK);
Statement* body = ParseStatement(NULL, CHECK_OK); Statement* body = ParseStatement(NULL, CHECK_OK);
if (loop) loop->Initialize(NULL, cond, NULL, body); if (loop != NULL) loop->Initialize(cond, body);
return loop; return loop;
} }
@ -2676,7 +2667,7 @@ Statement* Parser::ParseForStatement(ZoneStringList* labels, bool* ok) {
} }
// Standard 'for' loop // Standard 'for' loop
LoopStatement* loop = NEW(LoopStatement(labels, LoopStatement::FOR_LOOP)); ForStatement* loop = NEW(ForStatement(labels));
Target target(this, loop); Target target(this, loop);
// Parsed initializer at this point. // Parsed initializer at this point.
@ -3304,7 +3295,6 @@ Expression* Parser::ParseArrayLiteral(bool* ok) {
Expect(Token::RBRACK, CHECK_OK); Expect(Token::RBRACK, CHECK_OK);
// Update the scope information before the pre-parsing bailout. // Update the scope information before the pre-parsing bailout.
temp_scope_->set_contains_array_literal();
int literal_index = temp_scope_->NextMaterializedLiteralIndex(); int literal_index = temp_scope_->NextMaterializedLiteralIndex();
if (is_pre_parsing_) return NULL; if (is_pre_parsing_) return NULL;
@ -3634,7 +3624,6 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
int materialized_literal_count; int materialized_literal_count;
int expected_property_count; int expected_property_count;
bool contains_array_literal;
bool only_this_property_assignments; bool only_this_property_assignments;
bool only_simple_this_property_assignments; bool only_simple_this_property_assignments;
Handle<FixedArray> this_property_assignments; Handle<FixedArray> this_property_assignments;
@ -3648,12 +3637,10 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
only_this_property_assignments = false; only_this_property_assignments = false;
only_simple_this_property_assignments = false; only_simple_this_property_assignments = false;
this_property_assignments = Factory::empty_fixed_array(); this_property_assignments = Factory::empty_fixed_array();
contains_array_literal = entry.contains_array_literal();
} else { } else {
ParseSourceElements(&body, Token::RBRACE, CHECK_OK); ParseSourceElements(&body, Token::RBRACE, CHECK_OK);
materialized_literal_count = temp_scope.materialized_literal_count(); materialized_literal_count = temp_scope.materialized_literal_count();
expected_property_count = temp_scope.expected_property_count(); expected_property_count = temp_scope.expected_property_count();
contains_array_literal = temp_scope.contains_array_literal();
only_this_property_assignments = only_this_property_assignments =
temp_scope.only_this_property_assignments(); temp_scope.only_this_property_assignments();
only_simple_this_property_assignments = only_simple_this_property_assignments =
@ -3669,7 +3656,6 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
entry.set_end_pos(end_pos); entry.set_end_pos(end_pos);
entry.set_literal_count(materialized_literal_count); entry.set_literal_count(materialized_literal_count);
entry.set_property_count(expected_property_count); entry.set_property_count(expected_property_count);
entry.set_contains_array_literal(contains_array_literal);
} }
FunctionLiteral* function_literal = FunctionLiteral* function_literal =
@ -3677,7 +3663,6 @@ FunctionLiteral* Parser::ParseFunctionLiteral(Handle<String> var_name,
top_scope_, top_scope_,
body.elements(), body.elements(),
materialized_literal_count, materialized_literal_count,
contains_array_literal,
expected_property_count, expected_property_count,
only_this_property_assignments, only_this_property_assignments,
only_simple_this_property_assignments, only_simple_this_property_assignments,

10
deps/v8/src/parser.h

@ -70,16 +70,9 @@ class FunctionEntry BASE_EMBEDDED {
int property_count() { return backing_[kPropertyCountOffset]; } int property_count() { return backing_[kPropertyCountOffset]; }
void set_property_count(int value) { backing_[kPropertyCountOffset] = value; } void set_property_count(int value) { backing_[kPropertyCountOffset] = value; }
bool contains_array_literal() {
return backing_[kContainsArrayLiteralOffset] != 0;
}
void set_contains_array_literal(bool value) {
backing_[kContainsArrayLiteralOffset] = value ? 1 : 0;
}
bool is_valid() { return backing_.length() > 0; } bool is_valid() { return backing_.length() > 0; }
static const int kSize = 5; static const int kSize = 4;
private: private:
Vector<unsigned> backing_; Vector<unsigned> backing_;
@ -87,7 +80,6 @@ class FunctionEntry BASE_EMBEDDED {
static const int kEndPosOffset = 1; static const int kEndPosOffset = 1;
static const int kLiteralCountOffset = 2; static const int kLiteralCountOffset = 2;
static const int kPropertyCountOffset = 3; static const int kPropertyCountOffset = 3;
static const int kContainsArrayLiteralOffset = 4;
}; };

1
deps/v8/src/platform-win32.cc

@ -1794,7 +1794,6 @@ class Sampler::PlatformData : public Malloced {
context.ContextFlags = CONTEXT_FULL; context.ContextFlags = CONTEXT_FULL;
if (GetThreadContext(profiled_thread_, &context) != 0) { if (GetThreadContext(profiled_thread_, &context) != 0) {
#if V8_HOST_ARCH_X64 #if V8_HOST_ARCH_X64
UNIMPLEMENTED();
sample.pc = context.Rip; sample.pc = context.Rip;
sample.sp = context.Rsp; sample.sp = context.Rsp;
sample.fp = context.Rbp; sample.fp = context.Rbp;

506
deps/v8/src/prettyprinter.cc

@ -147,46 +147,42 @@ void PrettyPrinter::VisitSwitchStatement(SwitchStatement* node) {
} }
void PrettyPrinter::VisitLoopStatement(LoopStatement* node) { void PrettyPrinter::VisitDoWhileStatement(DoWhileStatement* node) {
PrintLabels(node->labels()); PrintLabels(node->labels());
switch (node->type()) { Print("do ");
case LoopStatement::DO_LOOP: Visit(node->body());
ASSERT(node->init() == NULL); Print(" while (");
ASSERT(node->next() == NULL); Visit(node->cond());
Print("do "); Print(");");
Visit(node->body()); }
Print(" while (");
Visit(node->cond());
Print(");");
break;
case LoopStatement::FOR_LOOP:
Print("for (");
if (node->init() != NULL) {
Visit(node->init());
Print(" ");
} else {
Print("; ");
}
if (node->cond() != NULL)
Visit(node->cond());
Print("; ");
if (node->next() != NULL)
Visit(node->next()); // prints extra ';', unfortunately
// to fix: should use Expression for next
Print(") ");
Visit(node->body());
break;
case LoopStatement::WHILE_LOOP: void PrettyPrinter::VisitWhileStatement(WhileStatement* node) {
ASSERT(node->init() == NULL); PrintLabels(node->labels());
ASSERT(node->next() == NULL); Print("while (");
Print("while ("); Visit(node->cond());
Visit(node->cond()); Print(") ");
Print(") "); Visit(node->body());
Visit(node->body()); }
break;
void PrettyPrinter::VisitForStatement(ForStatement* node) {
PrintLabels(node->labels());
Print("for (");
if (node->init() != NULL) {
Visit(node->init());
Print(" ");
} else {
Print("; ");
}
if (node->cond() != NULL) Visit(node->cond());
Print("; ");
if (node->next() != NULL) {
Visit(node->next()); // prints extra ';', unfortunately
// to fix: should use Expression for next
} }
Print(") ");
Visit(node->body());
} }
@ -201,7 +197,7 @@ void PrettyPrinter::VisitForInStatement(ForInStatement* node) {
} }
void PrettyPrinter::VisitTryCatch(TryCatch* node) { void PrettyPrinter::VisitTryCatchStatement(TryCatchStatement* node) {
Print("try "); Print("try ");
Visit(node->try_block()); Visit(node->try_block());
Print(" catch ("); Print(" catch (");
@ -211,7 +207,7 @@ void PrettyPrinter::VisitTryCatch(TryCatch* node) {
} }
void PrettyPrinter::VisitTryFinally(TryFinally* node) { void PrettyPrinter::VisitTryFinallyStatement(TryFinallyStatement* node) {
Print("try "); Print("try ");
Visit(node->try_block()); Visit(node->try_block());
Print(" finally "); Print(" finally ");
@ -841,12 +837,28 @@ void AstPrinter::VisitSwitchStatement(SwitchStatement* node) {
} }
void AstPrinter::VisitLoopStatement(LoopStatement* node) { void AstPrinter::VisitDoWhileStatement(DoWhileStatement* node) {
IndentedScope indent(node->OperatorString()); IndentedScope indent("DO");
PrintLabelsIndented(NULL, node->labels());
PrintIndentedVisit("BODY", node->body());
PrintIndentedVisit("COND", node->cond());
}
void AstPrinter::VisitWhileStatement(WhileStatement* node) {
IndentedScope indent("WHILE");
PrintLabelsIndented(NULL, node->labels());
PrintIndentedVisit("COND", node->cond());
PrintIndentedVisit("BODY", node->body());
}
void AstPrinter::VisitForStatement(ForStatement* node) {
IndentedScope indent("FOR");
PrintLabelsIndented(NULL, node->labels()); PrintLabelsIndented(NULL, node->labels());
if (node->init()) PrintIndentedVisit("INIT", node->init()); if (node->init()) PrintIndentedVisit("INIT", node->init());
if (node->cond()) PrintIndentedVisit("COND", node->cond()); if (node->cond()) PrintIndentedVisit("COND", node->cond());
if (node->body()) PrintIndentedVisit("BODY", node->body()); PrintIndentedVisit("BODY", node->body());
if (node->next()) PrintIndentedVisit("NEXT", node->next()); if (node->next()) PrintIndentedVisit("NEXT", node->next());
} }
@ -859,7 +871,7 @@ void AstPrinter::VisitForInStatement(ForInStatement* node) {
} }
void AstPrinter::VisitTryCatch(TryCatch* node) { void AstPrinter::VisitTryCatchStatement(TryCatchStatement* node) {
IndentedScope indent("TRY CATCH"); IndentedScope indent("TRY CATCH");
PrintIndentedVisit("TRY", node->try_block()); PrintIndentedVisit("TRY", node->try_block());
PrintIndentedVisit("CATCHVAR", node->catch_var()); PrintIndentedVisit("CATCHVAR", node->catch_var());
@ -867,7 +879,7 @@ void AstPrinter::VisitTryCatch(TryCatch* node) {
} }
void AstPrinter::VisitTryFinally(TryFinally* node) { void AstPrinter::VisitTryFinallyStatement(TryFinallyStatement* node) {
IndentedScope indent("TRY FINALLY"); IndentedScope indent("TRY FINALLY");
PrintIndentedVisit("TRY", node->try_block()); PrintIndentedVisit("TRY", node->try_block());
PrintIndentedVisit("FINALLY", node->finally_block()); PrintIndentedVisit("FINALLY", node->finally_block());
@ -1088,6 +1100,414 @@ void AstPrinter::VisitThisFunction(ThisFunction* node) {
} }
TagScope::TagScope(JsonAstBuilder* builder, const char* name)
: builder_(builder), next_(builder->tag()), has_body_(false) {
if (next_ != NULL) {
next_->use();
builder->Print(",\n");
}
builder->set_tag(this);
builder->PrintIndented("[");
builder->Print("\"%s\"", name);
builder->increase_indent(JsonAstBuilder::kTagIndentSize);
}
TagScope::~TagScope() {
builder_->decrease_indent(JsonAstBuilder::kTagIndentSize);
if (has_body_) {
builder_->Print("\n");
builder_->PrintIndented("]");
} else {
builder_->Print("]");
}
builder_->set_tag(next_);
}
AttributesScope::AttributesScope(JsonAstBuilder* builder)
: builder_(builder), attribute_count_(0) {
builder->set_attributes(this);
builder->tag()->use();
builder->Print(",\n");
builder->PrintIndented("{");
builder->increase_indent(JsonAstBuilder::kAttributesIndentSize);
}
AttributesScope::~AttributesScope() {
builder_->decrease_indent(JsonAstBuilder::kAttributesIndentSize);
if (attribute_count_ > 1) {
builder_->Print("\n");
builder_->PrintIndented("}");
} else {
builder_->Print("}");
}
builder_->set_attributes(NULL);
}
const char* JsonAstBuilder::BuildProgram(FunctionLiteral* program) {
Init();
Visit(program);
Print("\n");
return Output();
}
void JsonAstBuilder::AddAttributePrefix(const char* name) {
if (attributes()->is_used()) {
Print(",\n");
PrintIndented("\"");
} else {
Print("\"");
}
Print("%s\":", name);
attributes()->use();
}
void JsonAstBuilder::AddAttribute(const char* name, Handle<String> value) {
SmartPointer<char> value_string = value->ToCString();
AddAttributePrefix(name);
Print("\"%s\"", *value_string);
}
void JsonAstBuilder::AddAttribute(const char* name, const char* value) {
AddAttributePrefix(name);
Print("\"%s\"", value);
}
void JsonAstBuilder::AddAttribute(const char* name, int value) {
AddAttributePrefix(name);
Print("%d", value);
}
void JsonAstBuilder::AddAttribute(const char* name, bool value) {
AddAttributePrefix(name);
Print(value ? "true" : "false");
}
void JsonAstBuilder::VisitBlock(Block* stmt) {
TagScope tag(this, "Block");
VisitStatements(stmt->statements());
}
void JsonAstBuilder::VisitExpressionStatement(ExpressionStatement* stmt) {
TagScope tag(this, "ExpressionStatement");
Visit(stmt->expression());
}
void JsonAstBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
TagScope tag(this, "EmptyStatement");
}
void JsonAstBuilder::VisitIfStatement(IfStatement* stmt) {
TagScope tag(this, "IfStatement");
Visit(stmt->condition());
Visit(stmt->then_statement());
Visit(stmt->else_statement());
}
void JsonAstBuilder::VisitContinueStatement(ContinueStatement* stmt) {
TagScope tag(this, "ContinueStatement");
}
void JsonAstBuilder::VisitBreakStatement(BreakStatement* stmt) {
TagScope tag(this, "BreakStatement");
}
void JsonAstBuilder::VisitReturnStatement(ReturnStatement* stmt) {
TagScope tag(this, "ReturnStatement");
Visit(stmt->expression());
}
void JsonAstBuilder::VisitWithEnterStatement(WithEnterStatement* stmt) {
TagScope tag(this, "WithEnterStatement");
Visit(stmt->expression());
}
void JsonAstBuilder::VisitWithExitStatement(WithExitStatement* stmt) {
TagScope tag(this, "WithExitStatement");
}
void JsonAstBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
TagScope tag(this, "SwitchStatement");
}
void JsonAstBuilder::VisitDoWhileStatement(DoWhileStatement* stmt) {
TagScope tag(this, "DoWhileStatement");
Visit(stmt->body());
Visit(stmt->cond());
}
void JsonAstBuilder::VisitWhileStatement(WhileStatement* stmt) {
TagScope tag(this, "WhileStatement");
Visit(stmt->cond());
Visit(stmt->body());
}
void JsonAstBuilder::VisitForStatement(ForStatement* stmt) {
TagScope tag(this, "ForStatement");
if (stmt->init() != NULL) Visit(stmt->init());
if (stmt->cond() != NULL) Visit(stmt->cond());
Visit(stmt->body());
if (stmt->next() != NULL) Visit(stmt->next());
}
void JsonAstBuilder::VisitForInStatement(ForInStatement* stmt) {
TagScope tag(this, "ForInStatement");
Visit(stmt->each());
Visit(stmt->enumerable());
Visit(stmt->body());
}
void JsonAstBuilder::VisitTryCatchStatement(TryCatchStatement* stmt) {
TagScope tag(this, "TryCatchStatement");
Visit(stmt->try_block());
Visit(stmt->catch_var());
Visit(stmt->catch_block());
}
void JsonAstBuilder::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
TagScope tag(this, "TryFinallyStatement");
Visit(stmt->try_block());
Visit(stmt->finally_block());
}
void JsonAstBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
TagScope tag(this, "DebuggerStatement");
}
void JsonAstBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
TagScope tag(this, "FunctionLiteral");
{
AttributesScope attributes(this);
AddAttribute("name", expr->name());
}
VisitDeclarations(expr->scope()->declarations());
VisitStatements(expr->body());
}
void JsonAstBuilder::VisitFunctionBoilerplateLiteral(
FunctionBoilerplateLiteral* expr) {
TagScope tag(this, "FunctionBoilerplateLiteral");
}
void JsonAstBuilder::VisitConditional(Conditional* expr) {
TagScope tag(this, "Conditional");
}
void JsonAstBuilder::VisitSlot(Slot* expr) {
TagScope tag(this, "Slot");
{
AttributesScope attributes(this);
switch (expr->type()) {
case Slot::PARAMETER:
AddAttribute("type", "PARAMETER");
break;
case Slot::LOCAL:
AddAttribute("type", "LOCAL");
break;
case Slot::CONTEXT:
AddAttribute("type", "CONTEXT");
break;
case Slot::LOOKUP:
AddAttribute("type", "LOOKUP");
break;
case Slot::GLOBAL:
AddAttribute("type", "GLOBAL");
break;
}
AddAttribute("index", expr->index());
}
}
void JsonAstBuilder::VisitVariableProxy(VariableProxy* expr) {
if (expr->var()->rewrite() == NULL) {
TagScope tag(this, "VariableProxy");
{
AttributesScope attributes(this);
AddAttribute("name", expr->name());
AddAttribute("mode", Variable::Mode2String(expr->var()->mode()));
}
} else {
Visit(expr->var()->rewrite());
}
}
void JsonAstBuilder::VisitLiteral(Literal* expr) {
TagScope tag(this, "Literal");
{
AttributesScope attributes(this);
Handle<Object> handle = expr->handle();
if (handle->IsString()) {
AddAttribute("handle", Handle<String>(String::cast(*handle)));
} else if (handle->IsSmi()) {
AddAttribute("handle", Smi::cast(*handle)->value());
}
}
}
void JsonAstBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
TagScope tag(this, "RegExpLiteral");
}
void JsonAstBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
TagScope tag(this, "ObjectLiteral");
}
void JsonAstBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
TagScope tag(this, "ArrayLiteral");
}
void JsonAstBuilder::VisitCatchExtensionObject(CatchExtensionObject* expr) {
TagScope tag(this, "CatchExtensionObject");
Visit(expr->key());
Visit(expr->value());
}
void JsonAstBuilder::VisitAssignment(Assignment* expr) {
TagScope tag(this, "Assignment");
{
AttributesScope attributes(this);
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->target());
Visit(expr->value());
}
void JsonAstBuilder::VisitThrow(Throw* expr) {
TagScope tag(this, "Throw");
Visit(expr->exception());
}
void JsonAstBuilder::VisitProperty(Property* expr) {
TagScope tag(this, "Property");
{
AttributesScope attributes(this);
AddAttribute("type", expr->is_synthetic() ? "SYNTHETIC" : "NORMAL");
}
Visit(expr->obj());
Visit(expr->key());
}
void JsonAstBuilder::VisitCall(Call* expr) {
TagScope tag(this, "Call");
Visit(expr->expression());
VisitExpressions(expr->arguments());
}
void JsonAstBuilder::VisitCallNew(CallNew* expr) {
TagScope tag(this, "CallNew");
Visit(expr->expression());
VisitExpressions(expr->arguments());
}
void JsonAstBuilder::VisitCallRuntime(CallRuntime* expr) {
TagScope tag(this, "CallRuntime");
{
AttributesScope attributes(this);
AddAttribute("name", expr->name());
}
VisitExpressions(expr->arguments());
}
void JsonAstBuilder::VisitUnaryOperation(UnaryOperation* expr) {
TagScope tag(this, "UnaryOperation");
{
AttributesScope attributes(this);
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->expression());
}
void JsonAstBuilder::VisitCountOperation(CountOperation* expr) {
TagScope tag(this, "CountOperation");
{
AttributesScope attributes(this);
AddAttribute("is_prefix", expr->is_prefix());
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->expression());
}
void JsonAstBuilder::VisitBinaryOperation(BinaryOperation* expr) {
TagScope tag(this, "BinaryOperation");
{
AttributesScope attributes(this);
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->left());
Visit(expr->right());
}
void JsonAstBuilder::VisitCompareOperation(CompareOperation* expr) {
TagScope tag(this, "CompareOperation");
{
AttributesScope attributes(this);
AddAttribute("op", Token::Name(expr->op()));
}
Visit(expr->left());
Visit(expr->right());
}
void JsonAstBuilder::VisitThisFunction(ThisFunction* expr) {
TagScope tag(this, "ThisFunction");
}
void JsonAstBuilder::VisitDeclaration(Declaration* decl) {
TagScope tag(this, "Declaration");
{
AttributesScope attributes(this);
AddAttribute("mode", Variable::Mode2String(decl->mode()));
}
Visit(decl->proxy());
if (decl->fun() != NULL) Visit(decl->fun());
}
#endif // DEBUG #endif // DEBUG

118
deps/v8/src/prettyprinter.h

@ -46,14 +46,15 @@ class PrettyPrinter: public AstVisitor {
const char* PrintExpression(FunctionLiteral* program); const char* PrintExpression(FunctionLiteral* program);
const char* PrintProgram(FunctionLiteral* program); const char* PrintProgram(FunctionLiteral* program);
void Print(const char* format, ...);
// Print a node to stdout. // Print a node to stdout.
static void PrintOut(AstNode* node); static void PrintOut(AstNode* node);
// Individual nodes // Individual nodes
#define DEF_VISIT(type) \ #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
virtual void Visit##type(type* node); AST_NODE_LIST(DECLARE_VISIT)
AST_NODE_LIST(DEF_VISIT) #undef DECLARE_VISIT
#undef DEF_VISIT
private: private:
char* output_; // output string buffer char* output_; // output string buffer
@ -62,7 +63,6 @@ class PrettyPrinter: public AstVisitor {
protected: protected:
void Init(); void Init();
void Print(const char* format, ...);
const char* Output() const { return output_; } const char* Output() const { return output_; }
virtual void PrintStatements(ZoneList<Statement*>* statements); virtual void PrintStatements(ZoneList<Statement*>* statements);
@ -85,10 +85,9 @@ class AstPrinter: public PrettyPrinter {
const char* PrintProgram(FunctionLiteral* program); const char* PrintProgram(FunctionLiteral* program);
// Individual nodes // Individual nodes
#define DEF_VISIT(type) \ #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
virtual void Visit##type(type* node); AST_NODE_LIST(DECLARE_VISIT)
AST_NODE_LIST(DEF_VISIT) #undef DECLARE_VISIT
#undef DEF_VISIT
private: private:
friend class IndentedScope; friend class IndentedScope;
void PrintIndented(const char* txt); void PrintIndented(const char* txt);
@ -112,6 +111,107 @@ class AstPrinter: public PrettyPrinter {
static int indent_; static int indent_;
}; };
// Forward declaration of helper classes.
class TagScope;
class AttributesScope;
// Build a C string containing a JSON representation of a function's
// AST. The representation is based on JsonML (www.jsonml.org).
class JsonAstBuilder: public PrettyPrinter {
public:
JsonAstBuilder()
: indent_(0), top_tag_scope_(NULL), attributes_scope_(NULL) {
}
virtual ~JsonAstBuilder() {}
// Controls the indentation of subsequent lines of a tag body after
// the first line.
static const int kTagIndentSize = 2;
// Controls the indentation of subsequent lines of an attributes
// blocks's body after the first line.
static const int kAttributesIndentSize = 1;
// Construct a JSON representation of a function literal.
const char* BuildProgram(FunctionLiteral* program);
// Print text indented by the current indentation level.
void PrintIndented(const char* text) { Print("%*s%s", indent_, "", text); }
// Change the indentation level.
void increase_indent(int amount) { indent_ += amount; }
void decrease_indent(int amount) { indent_ -= amount; }
// The builder maintains a stack of opened AST node constructors.
// Each node constructor corresponds to a JsonML tag.
TagScope* tag() { return top_tag_scope_; }
void set_tag(TagScope* scope) { top_tag_scope_ = scope; }
// The builder maintains a pointer to the currently opened attributes
// of current AST node or NULL if the attributes are not opened.
AttributesScope* attributes() { return attributes_scope_; }
void set_attributes(AttributesScope* scope) { attributes_scope_ = scope; }
// Add an attribute to the currently opened attributes.
void AddAttribute(const char* name, Handle<String> value);
void AddAttribute(const char* name, const char* value);
void AddAttribute(const char* name, int value);
void AddAttribute(const char* name, bool value);
// AST node visit functions.
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
private:
int indent_;
TagScope* top_tag_scope_;
AttributesScope* attributes_scope_;
// Utility function used by AddAttribute implementations.
void AddAttributePrefix(const char* name);
};
// The JSON AST builder keeps a stack of open element tags (AST node
// constructors from the current iteration point to the root of the
// AST). TagScope is a helper class to manage the opening and closing
// of tags, the indentation of their bodies, and comma separating their
// contents.
class TagScope BASE_EMBEDDED {
public:
TagScope(JsonAstBuilder* builder, const char* name);
~TagScope();
void use() { has_body_ = true; }
private:
JsonAstBuilder* builder_;
TagScope* next_;
bool has_body_;
};
// AttributesScope is a helper class to manage the opening and closing
// of attribute blocks, the indentation of their bodies, and comma
// separating their contents. JsonAstBuilder::AddAttribute adds an
// attribute to the currently open AttributesScope. They cannot be
// nested so the builder keeps an optional single scope rather than a
// stack.
class AttributesScope BASE_EMBEDDED {
public:
explicit AttributesScope(JsonAstBuilder* builder);
~AttributesScope();
bool is_used() { return attribute_count_ > 0; }
void use() { ++attribute_count_; }
private:
JsonAstBuilder* builder_;
int attribute_count_;
};
#endif // DEBUG #endif // DEBUG
} } // namespace v8::internal } } // namespace v8::internal

56
deps/v8/src/rewriter.cc

@ -100,7 +100,21 @@ void AstOptimizer::VisitIfStatement(IfStatement* node) {
} }
void AstOptimizer::VisitLoopStatement(LoopStatement* node) { void AstOptimizer::VisitDoWhileStatement(DoWhileStatement* node) {
Visit(node->cond());
Visit(node->body());
}
void AstOptimizer::VisitWhileStatement(WhileStatement* node) {
has_function_literal_ = false;
Visit(node->cond());
node->may_have_function_literal_ = has_function_literal_;
Visit(node->body());
}
void AstOptimizer::VisitForStatement(ForStatement* node) {
if (node->init() != NULL) { if (node->init() != NULL) {
Visit(node->init()); Visit(node->init());
} }
@ -109,9 +123,7 @@ void AstOptimizer::VisitLoopStatement(LoopStatement* node) {
Visit(node->cond()); Visit(node->cond());
node->may_have_function_literal_ = has_function_literal_; node->may_have_function_literal_ = has_function_literal_;
} }
if (node->body() != NULL) { Visit(node->body());
Visit(node->body());
}
if (node->next() != NULL) { if (node->next() != NULL) {
Visit(node->next()); Visit(node->next());
} }
@ -125,14 +137,14 @@ void AstOptimizer::VisitForInStatement(ForInStatement* node) {
} }
void AstOptimizer::VisitTryCatch(TryCatch* node) { void AstOptimizer::VisitTryCatchStatement(TryCatchStatement* node) {
Visit(node->try_block()); Visit(node->try_block());
Visit(node->catch_var()); Visit(node->catch_var());
Visit(node->catch_block()); Visit(node->catch_block());
} }
void AstOptimizer::VisitTryFinally(TryFinally* node) { void AstOptimizer::VisitTryFinallyStatement(TryFinallyStatement* node) {
Visit(node->try_block()); Visit(node->try_block());
Visit(node->finally_block()); Visit(node->finally_block());
} }
@ -553,6 +565,8 @@ class Processor: public AstVisitor {
virtual void Visit##type(type* node); virtual void Visit##type(type* node);
AST_NODE_LIST(DEF_VISIT) AST_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT #undef DEF_VISIT
void VisitIterationStatement(IterationStatement* stmt);
}; };
@ -596,25 +610,35 @@ void Processor::VisitIfStatement(IfStatement* node) {
} }
void Processor::VisitIterationStatement(IterationStatement* node) {
// Rewrite the body.
void Processor::VisitLoopStatement(LoopStatement* node) {
// Rewrite loop body statement.
bool set_after_loop = is_set_; bool set_after_loop = is_set_;
Visit(node->body()); Visit(node->body());
is_set_ = is_set_ && set_after_loop; is_set_ = is_set_ && set_after_loop;
} }
void Processor::VisitDoWhileStatement(DoWhileStatement* node) {
VisitIterationStatement(node);
}
void Processor::VisitWhileStatement(WhileStatement* node) {
VisitIterationStatement(node);
}
void Processor::VisitForStatement(ForStatement* node) {
VisitIterationStatement(node);
}
void Processor::VisitForInStatement(ForInStatement* node) { void Processor::VisitForInStatement(ForInStatement* node) {
// Rewrite for-in body statement. VisitIterationStatement(node);
bool set_after_for = is_set_;
Visit(node->body());
is_set_ = is_set_ && set_after_for;
} }
void Processor::VisitTryCatch(TryCatch* node) { void Processor::VisitTryCatchStatement(TryCatchStatement* node) {
// Rewrite both try and catch blocks (reversed order). // Rewrite both try and catch blocks (reversed order).
bool set_after_catch = is_set_; bool set_after_catch = is_set_;
Visit(node->catch_block()); Visit(node->catch_block());
@ -626,7 +650,7 @@ void Processor::VisitTryCatch(TryCatch* node) {
} }
void Processor::VisitTryFinally(TryFinally* node) { void Processor::VisitTryFinallyStatement(TryFinallyStatement* node) {
// Rewrite both try and finally block (reversed order). // Rewrite both try and finally block (reversed order).
Visit(node->finally_block()); Visit(node->finally_block());
bool save = in_try_; bool save = in_try_;

49
deps/v8/src/runtime.cc

@ -34,18 +34,17 @@
#include "arguments.h" #include "arguments.h"
#include "compiler.h" #include "compiler.h"
#include "cpu.h" #include "cpu.h"
#include "dateparser.h"
#include "dateparser-inl.h" #include "dateparser-inl.h"
#include "debug.h" #include "debug.h"
#include "execution.h" #include "execution.h"
#include "jsregexp.h" #include "jsregexp.h"
#include "parser.h"
#include "platform.h" #include "platform.h"
#include "runtime.h" #include "runtime.h"
#include "scopeinfo.h" #include "scopeinfo.h"
#include "v8threads.h"
#include "smart-pointer.h" #include "smart-pointer.h"
#include "parser.h"
#include "stub-cache.h" #include "stub-cache.h"
#include "v8threads.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
@ -522,7 +521,7 @@ static Object* Runtime_GetTemplateField(Arguments args) {
RUNTIME_ASSERT(type == FUNCTION_TEMPLATE_INFO_TYPE || RUNTIME_ASSERT(type == FUNCTION_TEMPLATE_INFO_TYPE ||
type == OBJECT_TEMPLATE_INFO_TYPE); type == OBJECT_TEMPLATE_INFO_TYPE);
RUNTIME_ASSERT(offset > 0); RUNTIME_ASSERT(offset > 0);
if (type == FUNCTION_TEMPLATE_INFO_TYPE) { if (type == FUNCTION_TEMPLATE_INFO_TYPE) {
RUNTIME_ASSERT(offset < FunctionTemplateInfo::kSize); RUNTIME_ASSERT(offset < FunctionTemplateInfo::kSize);
} else { } else {
RUNTIME_ASSERT(offset < ObjectTemplateInfo::kSize); RUNTIME_ASSERT(offset < ObjectTemplateInfo::kSize);
@ -3252,8 +3251,8 @@ static Object* Runtime_URIEscape(Arguments args) {
} else { } else {
escaped_length += 3; escaped_length += 3;
} }
// We don't allow strings that are longer than Smi range. // We don't allow strings that are longer than a maximal length.
if (!Smi::IsValid(escaped_length)) { if (escaped_length > String::kMaxLength) {
Top::context()->mark_out_of_memory(); Top::context()->mark_out_of_memory();
return Failure::OutOfMemoryException(); return Failure::OutOfMemoryException();
} }
@ -3584,6 +3583,36 @@ static Object* Runtime_StringToUpperCase(Arguments args) {
return ConvertCase<unibrow::ToUppercase>(args, &to_upper_mapping); return ConvertCase<unibrow::ToUppercase>(args, &to_upper_mapping);
} }
static inline bool IsTrimWhiteSpace(unibrow::uchar c) {
return unibrow::WhiteSpace::Is(c) || c == 0x200b;
}
static Object* Runtime_StringTrim(Arguments args) {
NoHandleAllocation ha;
ASSERT(args.length() == 3);
CONVERT_CHECKED(String, s, args[0]);
CONVERT_BOOLEAN_CHECKED(trimLeft, args[1]);
CONVERT_BOOLEAN_CHECKED(trimRight, args[2]);
s->TryFlattenIfNotFlat();
int length = s->length();
int left = 0;
if (trimLeft) {
while (left < length && IsTrimWhiteSpace(s->Get(left))) {
left++;
}
}
int right = length;
if (trimRight) {
while (right > left && IsTrimWhiteSpace(s->Get(right - 1))) {
right--;
}
}
return s->Slice(left, right);
}
bool Runtime::IsUpperCaseChar(uint16_t ch) { bool Runtime::IsUpperCaseChar(uint16_t ch) {
unibrow::uchar chars[unibrow::ToUppercase::kMaxWidth]; unibrow::uchar chars[unibrow::ToUppercase::kMaxWidth];
@ -3804,10 +3833,6 @@ static Object* Runtime_StringBuilderConcat(Arguments args) {
} else if (elt->IsString()) { } else if (elt->IsString()) {
String* element = String::cast(elt); String* element = String::cast(elt);
int element_length = element->length(); int element_length = element->length();
if (!Smi::IsValid(element_length + position)) {
Top::context()->mark_out_of_memory();
return Failure::OutOfMemoryException();
}
position += element_length; position += element_length;
if (ascii && !element->IsAsciiRepresentation()) { if (ascii && !element->IsAsciiRepresentation()) {
ascii = false; ascii = false;
@ -3815,6 +3840,10 @@ static Object* Runtime_StringBuilderConcat(Arguments args) {
} else { } else {
return Top::Throw(Heap::illegal_argument_symbol()); return Top::Throw(Heap::illegal_argument_symbol());
} }
if (position > String::kMaxLength) {
Top::context()->mark_out_of_memory();
return Failure::OutOfMemoryException();
}
} }
int length = position; int length = position;

1
deps/v8/src/runtime.h

@ -152,6 +152,7 @@ namespace internal {
F(StringSlice, 3, 1) \ F(StringSlice, 3, 1) \
F(StringReplaceRegExpWithString, 4, 1) \ F(StringReplaceRegExpWithString, 4, 1) \
F(StringMatch, 3, 1) \ F(StringMatch, 3, 1) \
F(StringTrim, 3, 1) \
\ \
/* Numbers */ \ /* Numbers */ \
F(NumberToRadixString, 2, 1) \ F(NumberToRadixString, 2, 1) \

20
deps/v8/src/serialize.cc

@ -38,6 +38,7 @@
#include "serialize.h" #include "serialize.h"
#include "stub-cache.h" #include "stub-cache.h"
#include "v8threads.h" #include "v8threads.h"
#include "top.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
@ -612,12 +613,23 @@ void ExternalReferenceTable::PopulateTable() {
} }
// Top addresses // Top addresses
const char* top_address_format = "Top::get_address_from_id(%i)"; const char* top_address_format = "Top::%s";
size_t top_format_length = strlen(top_address_format);
const char* AddressNames[] = {
#define C(name) #name,
TOP_ADDRESS_LIST(C)
TOP_ADDRESS_LIST_PROF(C)
NULL
#undef C
};
size_t top_format_length = strlen(top_address_format) - 2;
for (uint16_t i = 0; i < Top::k_top_address_count; ++i) { for (uint16_t i = 0; i < Top::k_top_address_count; ++i) {
Vector<char> name = Vector<char>::New(top_format_length + 1); const char* address_name = AddressNames[i];
Vector<char> name =
Vector<char>::New(top_format_length + strlen(address_name) + 1);
const char* chars = name.start(); const char* chars = name.start();
OS::SNPrintF(name, top_address_format, i); OS::SNPrintF(name, top_address_format, address_name);
Add(Top::get_address_from_id((Top::AddressId)i), TOP_ADDRESS, i, chars); Add(Top::get_address_from_id((Top::AddressId)i), TOP_ADDRESS, i, chars);
} }

15
deps/v8/src/string.js

@ -680,6 +680,18 @@ function StringToLocaleUpperCase() {
return %StringToUpperCase(ToString(this)); return %StringToUpperCase(ToString(this));
} }
// ES5, 15.5.4.20
function StringTrim() {
return %StringTrim(ToString(this), true, true);
}
function StringTrimLeft() {
return %StringTrim(ToString(this), true, false);
}
function StringTrimRight() {
return %StringTrim(ToString(this), false, true);
}
// ECMA-262, section 15.5.3.2 // ECMA-262, section 15.5.3.2
function StringFromCharCode(code) { function StringFromCharCode(code) {
@ -855,6 +867,9 @@ function SetupString() {
"toLocaleLowerCase", StringToLocaleLowerCase, "toLocaleLowerCase", StringToLocaleLowerCase,
"toUpperCase", StringToUpperCase, "toUpperCase", StringToUpperCase,
"toLocaleUpperCase", StringToLocaleUpperCase, "toLocaleUpperCase", StringToLocaleUpperCase,
"trim", StringTrim,
"trimLeft", StringTrimLeft,
"trimRight", StringTrimRight,
"link", StringLink, "link", StringLink,
"anchor", StringAnchor, "anchor", StringAnchor,
"fontcolor", StringFontcolor, "fontcolor", StringFontcolor,

11
deps/v8/src/top.cc

@ -54,6 +54,7 @@ Address Top::get_address_from_id(Top::AddressId id) {
return top_addresses[id]; return top_addresses[id];
} }
char* Top::Iterate(ObjectVisitor* v, char* thread_storage) { char* Top::Iterate(ObjectVisitor* v, char* thread_storage) {
ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(thread_storage); ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(thread_storage);
Iterate(v, thread); Iterate(v, thread);
@ -493,11 +494,17 @@ static MayAccessDecision MayAccessPreCheck(JSObject* receiver,
bool Top::MayNamedAccess(JSObject* receiver, Object* key, v8::AccessType type) { bool Top::MayNamedAccess(JSObject* receiver, Object* key, v8::AccessType type) {
ASSERT(receiver->IsAccessCheckNeeded()); ASSERT(receiver->IsAccessCheckNeeded());
// The callers of this method are not expecting a GC.
AssertNoAllocation no_gc;
// Skip checks for hidden properties access. Note, we do not
// require existence of a context in this case.
if (key == Heap::hidden_symbol()) return true;
// Check for compatibility between the security tokens in the // Check for compatibility between the security tokens in the
// current lexical context and the accessed object. // current lexical context and the accessed object.
ASSERT(Top::context()); ASSERT(Top::context());
// The callers of this method are not expecting a GC.
AssertNoAllocation no_gc;
MayAccessDecision decision = MayAccessPreCheck(receiver, type); MayAccessDecision decision = MayAccessPreCheck(receiver, type);
if (decision != UNKNOWN) return decision == YES; if (decision != UNKNOWN) return decision == YES;

29
deps/v8/src/usage-analyzer.cc

@ -159,14 +159,25 @@ void UsageComputer::VisitSwitchStatement(SwitchStatement* node) {
} }
void UsageComputer::VisitLoopStatement(LoopStatement* node) { void UsageComputer::VisitDoWhileStatement(DoWhileStatement* node) {
if (node->init() != NULL) WeightScaler ws(this, 10.0);
Visit(node->init()); Read(node->cond());
Visit(node->body());
}
void UsageComputer::VisitWhileStatement(WhileStatement* node) {
WeightScaler ws(this, 10.0);
Read(node->cond());
Visit(node->body());
}
void UsageComputer::VisitForStatement(ForStatement* node) {
if (node->init() != NULL) Visit(node->init());
{ WeightScaler ws(this, 10.0); // executed in each iteration { WeightScaler ws(this, 10.0); // executed in each iteration
if (node->cond() != NULL) if (node->cond() != NULL) Read(node->cond());
Read(node->cond()); if (node->next() != NULL) Visit(node->next());
if (node->next() != NULL)
Visit(node->next());
Visit(node->body()); Visit(node->body());
} }
} }
@ -180,7 +191,7 @@ void UsageComputer::VisitForInStatement(ForInStatement* node) {
} }
void UsageComputer::VisitTryCatch(TryCatch* node) { void UsageComputer::VisitTryCatchStatement(TryCatchStatement* node) {
Visit(node->try_block()); Visit(node->try_block());
{ WeightScaler ws(this, 0.25); { WeightScaler ws(this, 0.25);
Write(node->catch_var()); Write(node->catch_var());
@ -189,7 +200,7 @@ void UsageComputer::VisitTryCatch(TryCatch* node) {
} }
void UsageComputer::VisitTryFinally(TryFinally* node) { void UsageComputer::VisitTryFinallyStatement(TryFinallyStatement* node) {
Visit(node->try_block()); Visit(node->try_block());
Visit(node->finally_block()); Visit(node->finally_block());
} }

3
deps/v8/src/utils.h

@ -36,7 +36,8 @@ namespace internal {
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// General helper functions // General helper functions
// Returns true iff x is a power of 2. Does not work for zero. // Returns true iff x is a power of 2 (or zero). Cannot be used with the
// maximally negative value of the type T (the -1 overflows).
template <typename T> template <typename T>
static inline bool IsPowerOf2(T x) { static inline bool IsPowerOf2(T x) {
return (x & (x - 1)) == 0; return (x & (x - 1)) == 0;

8
deps/v8/src/v8-counters.h

@ -52,8 +52,8 @@ namespace internal {
HT(variable_allocation, V8.VariableAllocation) \ HT(variable_allocation, V8.VariableAllocation) \
HT(ast_optimization, V8.ASTOptimization) \ HT(ast_optimization, V8.ASTOptimization) \
HT(code_generation, V8.CodeGeneration) \ HT(code_generation, V8.CodeGeneration) \
HT(deferred_code_generation, V8.DeferredCodeGeneration) \ HT(deferred_code_generation, V8.DeferredCodeGeneration)
HT(code_creation, V8.CodeCreation)
// WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC // WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC
// Intellisense to crash. It was broken into two macros (each of length 40 // Intellisense to crash. It was broken into two macros (each of length 40
@ -150,7 +150,9 @@ namespace internal {
SC(reloc_info_count, V8.RelocInfoCount) \ SC(reloc_info_count, V8.RelocInfoCount) \
SC(reloc_info_size, V8.RelocInfoSize) \ SC(reloc_info_size, V8.RelocInfoSize) \
SC(zone_segment_bytes, V8.ZoneSegmentBytes) \ SC(zone_segment_bytes, V8.ZoneSegmentBytes) \
SC(compute_entry_frame, V8.ComputeEntryFrame) SC(compute_entry_frame, V8.ComputeEntryFrame) \
SC(generic_binary_stub_calls, V8.GenericBinaryStubCalls) \
SC(generic_binary_stub_calls_regs, V8.GenericBinaryStubCallsRegs)
// This file contains all the v8 counters that are in use. // This file contains all the v8 counters that are in use.

7
deps/v8/src/v8.cc

@ -178,11 +178,14 @@ bool V8::IdleNotification() {
return Heap::IdleNotification(); return Heap::IdleNotification();
} }
static const uint32_t kRandomPositiveSmiMax = 0x3fffffff;
Smi* V8::RandomPositiveSmi() { Smi* V8::RandomPositiveSmi() {
uint32_t random = Random(); uint32_t random = Random();
ASSERT(IsPowerOf2(Smi::kMaxValue + 1)); ASSERT(static_cast<uint32_t>(Smi::kMaxValue) >= kRandomPositiveSmiMax);
return Smi::FromInt(random & Smi::kMaxValue); // kRandomPositiveSmiMax must match the value being divided
// by in math.js.
return Smi::FromInt(random & kRandomPositiveSmiMax);
} }
} } // namespace v8::internal } } // namespace v8::internal

2
deps/v8/src/version.cc

@ -34,7 +34,7 @@
// cannot be changed without changing the SCons build script. // cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 1 #define MAJOR_VERSION 1
#define MINOR_VERSION 3 #define MINOR_VERSION 3
#define BUILD_NUMBER 15 #define BUILD_NUMBER 16
#define PATCH_LEVEL 0 #define PATCH_LEVEL 0
#define CANDIDATE_VERSION false #define CANDIDATE_VERSION false

23
deps/v8/src/x64/assembler-x64-inl.h

@ -38,11 +38,6 @@ Condition NegateCondition(Condition cc) {
return static_cast<Condition>(cc ^ 1); return static_cast<Condition>(cc ^ 1);
} }
// -----------------------------------------------------------------------------
Immediate::Immediate(Smi* value) {
value_ = static_cast<int32_t>(reinterpret_cast<intptr_t>(value));
}
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
// Implementation of Assembler // Implementation of Assembler
@ -199,7 +194,7 @@ void RelocInfo::apply(intptr_t delta) {
Memory::Address_at(pc_) += delta; Memory::Address_at(pc_) += delta;
} else if (IsCodeTarget(rmode_)) { } else if (IsCodeTarget(rmode_)) {
Memory::int32_at(pc_) -= delta; Memory::int32_at(pc_) -= delta;
} else if (rmode_ == JS_RETURN && IsCallInstruction()) { } else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) {
// Special handling of js_return when a break point is set (call // Special handling of js_return when a break point is set (call
// instruction has been inserted). // instruction has been inserted).
Memory::int32_at(pc_ + 1) -= delta; // relocate entry Memory::int32_at(pc_ + 1) -= delta; // relocate entry
@ -267,45 +262,49 @@ void RelocInfo::set_target_object(Object* target) {
} }
bool RelocInfo::IsCallInstruction() { bool RelocInfo::IsPatchedReturnSequence() {
// The recognized call sequence is: // The recognized call sequence is:
// movq(kScratchRegister, immediate64); call(kScratchRegister); // movq(kScratchRegister, immediate64); call(kScratchRegister);
// It only needs to be distinguished from a return sequence // It only needs to be distinguished from a return sequence
// movq(rsp, rbp); pop(rbp); ret(n); int3 *6 // movq(rsp, rbp); pop(rbp); ret(n); int3 *6
// The 11th byte is int3 (0xCC) in the return sequence and // The 11th byte is int3 (0xCC) in the return sequence and
// REX.WB (0x48+register bit) for the call sequence. // REX.WB (0x48+register bit) for the call sequence.
#ifdef ENABLE_DEBUGGER_SUPPORT
return pc_[10] != 0xCC; return pc_[10] != 0xCC;
#else
return false;
#endif
} }
Address RelocInfo::call_address() { Address RelocInfo::call_address() {
ASSERT(IsCallInstruction()); ASSERT(IsPatchedReturnSequence());
return Memory::Address_at( return Memory::Address_at(
pc_ + Assembler::kRealPatchReturnSequenceAddressOffset); pc_ + Assembler::kRealPatchReturnSequenceAddressOffset);
} }
void RelocInfo::set_call_address(Address target) { void RelocInfo::set_call_address(Address target) {
ASSERT(IsCallInstruction()); ASSERT(IsPatchedReturnSequence());
Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) = Memory::Address_at(pc_ + Assembler::kRealPatchReturnSequenceAddressOffset) =
target; target;
} }
Object* RelocInfo::call_object() { Object* RelocInfo::call_object() {
ASSERT(IsCallInstruction()); ASSERT(IsPatchedReturnSequence());
return *call_object_address(); return *call_object_address();
} }
void RelocInfo::set_call_object(Object* target) { void RelocInfo::set_call_object(Object* target) {
ASSERT(IsCallInstruction()); ASSERT(IsPatchedReturnSequence());
*call_object_address() = target; *call_object_address() = target;
} }
Object** RelocInfo::call_object_address() { Object** RelocInfo::call_object_address() {
ASSERT(IsCallInstruction()); ASSERT(IsPatchedReturnSequence());
return reinterpret_cast<Object**>( return reinterpret_cast<Object**>(
pc_ + Assembler::kPatchReturnSequenceAddressOffset); pc_ + Assembler::kPatchReturnSequenceAddressOffset);
} }

92
deps/v8/src/x64/assembler-x64.cc

@ -708,7 +708,7 @@ void Assembler::shift_32(Register dst, int subcode) {
void Assembler::shift_32(Register dst, Immediate shift_amount, int subcode) { void Assembler::shift_32(Register dst, Immediate shift_amount, int subcode) {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;
ASSERT(is_uint6(shift_amount.value_)); // illegal shift count ASSERT(is_uint5(shift_amount.value_)); // illegal shift count
if (shift_amount.value_ == 1) { if (shift_amount.value_ == 1) {
emit_optional_rex_32(dst); emit_optional_rex_32(dst);
emit(0xD1); emit(0xD1);
@ -794,6 +794,12 @@ void Assembler::call(const Operand& op) {
} }
void Assembler::clc() {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
emit(0xF8);
}
void Assembler::cdq() { void Assembler::cdq() {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;
@ -802,6 +808,11 @@ void Assembler::cdq() {
void Assembler::cmovq(Condition cc, Register dst, Register src) { void Assembler::cmovq(Condition cc, Register dst, Register src) {
if (cc == always) {
movq(dst, src);
} else if (cc == never) {
return;
}
// No need to check CpuInfo for CMOV support, it's a required part of the // No need to check CpuInfo for CMOV support, it's a required part of the
// 64-bit architecture. // 64-bit architecture.
ASSERT(cc >= 0); // Use mov for unconditional moves. ASSERT(cc >= 0); // Use mov for unconditional moves.
@ -816,6 +827,11 @@ void Assembler::cmovq(Condition cc, Register dst, Register src) {
void Assembler::cmovq(Condition cc, Register dst, const Operand& src) { void Assembler::cmovq(Condition cc, Register dst, const Operand& src) {
if (cc == always) {
movq(dst, src);
} else if (cc == never) {
return;
}
ASSERT(cc >= 0); ASSERT(cc >= 0);
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;
@ -828,6 +844,11 @@ void Assembler::cmovq(Condition cc, Register dst, const Operand& src) {
void Assembler::cmovl(Condition cc, Register dst, Register src) { void Assembler::cmovl(Condition cc, Register dst, Register src) {
if (cc == always) {
movl(dst, src);
} else if (cc == never) {
return;
}
ASSERT(cc >= 0); ASSERT(cc >= 0);
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;
@ -840,6 +861,11 @@ void Assembler::cmovl(Condition cc, Register dst, Register src) {
void Assembler::cmovl(Condition cc, Register dst, const Operand& src) { void Assembler::cmovl(Condition cc, Register dst, const Operand& src) {
if (cc == always) {
movl(dst, src);
} else if (cc == never) {
return;
}
ASSERT(cc >= 0); ASSERT(cc >= 0);
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;
@ -913,6 +939,27 @@ void Assembler::decl(const Operand& dst) {
} }
void Assembler::decb(Register dst) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
if (dst.code() > 3) {
// Register is not one of al, bl, cl, dl. Its encoding needs REX.
emit_rex_32(dst);
}
emit(0xFE);
emit_modrm(0x1, dst);
}
void Assembler::decb(const Operand& dst) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
emit_optional_rex_32(dst);
emit(0xFE);
emit_operand(1, dst);
}
void Assembler::enter(Immediate size) { void Assembler::enter(Immediate size) {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;
@ -1037,6 +1084,12 @@ void Assembler::int3() {
void Assembler::j(Condition cc, Label* L) { void Assembler::j(Condition cc, Label* L) {
if (cc == always) {
jmp(L);
return;
} else if (cc == never) {
return;
}
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;
ASSERT(is_uint4(cc)); ASSERT(is_uint4(cc));
@ -1373,10 +1426,7 @@ void Assembler::movq(Register dst, Handle<Object> value, RelocInfo::Mode mode) {
// There is no possible reason to store a heap pointer without relocation // There is no possible reason to store a heap pointer without relocation
// info, so it must be a smi. // info, so it must be a smi.
ASSERT(value->IsSmi()); ASSERT(value->IsSmi());
// Smis never have more than 32 significant bits, but they might movq(dst, reinterpret_cast<int64_t>(*value), RelocInfo::NONE);
// have garbage in the high bits.
movq(dst,
Immediate(static_cast<int32_t>(reinterpret_cast<intptr_t>(*value))));
} else { } else {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;
@ -1650,22 +1700,6 @@ void Assembler::pushfq() {
} }
void Assembler::rcl(Register dst, uint8_t imm8) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
ASSERT(is_uint6(imm8)); // illegal shift count
if (imm8 == 1) {
emit_rex_64(dst);
emit(0xD1);
emit_modrm(0x2, dst);
} else {
emit_rex_64(dst);
emit(0xC1);
emit_modrm(0x2, dst);
emit(imm8);
}
}
void Assembler::rdtsc() { void Assembler::rdtsc() {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;
@ -1689,6 +1723,10 @@ void Assembler::ret(int imm16) {
void Assembler::setcc(Condition cc, Register reg) { void Assembler::setcc(Condition cc, Register reg) {
if (cc > last_condition) {
movb(reg, Immediate(cc == always ? 1 : 0));
return;
}
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;
ASSERT(is_uint4(cc)); ASSERT(is_uint4(cc));
@ -1750,6 +1788,18 @@ void Assembler::store_rax(ExternalReference ref) {
} }
void Assembler::testb(Register dst, Register src) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
if (dst.code() > 3 || src.code() > 3) {
// Register is not one of al, bl, cl, dl. Its encoding needs REX.
emit_rex_32(dst, src);
}
emit(0x84);
emit_modrm(dst, src);
}
void Assembler::testb(Register reg, Immediate mask) { void Assembler::testb(Register reg, Immediate mask) {
ASSERT(is_int8(mask.value_) || is_uint8(mask.value_)); ASSERT(is_int8(mask.value_) || is_uint8(mask.value_));
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);

46
deps/v8/src/x64/assembler-x64.h

@ -222,13 +222,18 @@ enum Condition {
less_equal = 14, less_equal = 14,
greater = 15, greater = 15,
// Fake conditions that are handled by the
// opcodes using them.
always = 16,
never = 17,
// aliases // aliases
carry = below, carry = below,
not_carry = above_equal, not_carry = above_equal,
zero = equal, zero = equal,
not_zero = not_equal, not_zero = not_equal,
sign = negative, sign = negative,
not_sign = positive not_sign = positive,
last_condition = greater
}; };
@ -284,7 +289,6 @@ inline Hint NegateHint(Hint hint) {
class Immediate BASE_EMBEDDED { class Immediate BASE_EMBEDDED {
public: public:
explicit Immediate(int32_t value) : value_(value) {} explicit Immediate(int32_t value) : value_(value) {}
inline explicit Immediate(Smi* value);
private: private:
int32_t value_; int32_t value_;
@ -372,6 +376,11 @@ class CpuFeatures : public AllStatic {
static void Probe(); static void Probe();
// Check whether a feature is supported by the target CPU. // Check whether a feature is supported by the target CPU.
static bool IsSupported(Feature f) { static bool IsSupported(Feature f) {
if (f == SSE2 && !FLAG_enable_sse2) return false;
if (f == SSE3 && !FLAG_enable_sse3) return false;
if (f == CMOV && !FLAG_enable_cmov) return false;
if (f == RDTSC && !FLAG_enable_rdtsc) return false;
if (f == SAHF && !FLAG_enable_sahf) return false;
return (supported_ & (V8_UINT64_C(1) << f)) != 0; return (supported_ & (V8_UINT64_C(1) << f)) != 0;
} }
// Check whether a feature is currently enabled. // Check whether a feature is currently enabled.
@ -699,10 +708,17 @@ class Assembler : public Malloced {
immediate_arithmetic_op_32(0x4, dst, src); immediate_arithmetic_op_32(0x4, dst, src);
} }
void andl(Register dst, Register src) {
arithmetic_op_32(0x23, dst, src);
}
void decq(Register dst); void decq(Register dst);
void decq(const Operand& dst); void decq(const Operand& dst);
void decl(Register dst); void decl(Register dst);
void decl(const Operand& dst); void decl(const Operand& dst);
void decb(Register dst);
void decb(const Operand& dst);
// Sign-extends rax into rdx:rax. // Sign-extends rax into rdx:rax.
void cqo(); void cqo();
@ -758,12 +774,34 @@ class Assembler : public Malloced {
immediate_arithmetic_op(0x1, dst, src); immediate_arithmetic_op(0x1, dst, src);
} }
void orl(Register dst, Immediate src) {
immediate_arithmetic_op_32(0x1, dst, src);
}
void or_(const Operand& dst, Immediate src) { void or_(const Operand& dst, Immediate src) {
immediate_arithmetic_op(0x1, dst, src); immediate_arithmetic_op(0x1, dst, src);
} }
void orl(const Operand& dst, Immediate src) {
immediate_arithmetic_op_32(0x1, dst, src);
}
void rcl(Register dst, uint8_t imm8); void rcl(Register dst, Immediate imm8) {
shift(dst, imm8, 0x2);
}
void rol(Register dst, Immediate imm8) {
shift(dst, imm8, 0x0);
}
void rcr(Register dst, Immediate imm8) {
shift(dst, imm8, 0x3);
}
void ror(Register dst, Immediate imm8) {
shift(dst, imm8, 0x1);
}
// Shifts dst:src left by cl bits, affecting only dst. // Shifts dst:src left by cl bits, affecting only dst.
void shld(Register dst, Register src); void shld(Register dst, Register src);
@ -864,6 +902,7 @@ class Assembler : public Malloced {
immediate_arithmetic_op_8(0x5, dst, src); immediate_arithmetic_op_8(0x5, dst, src);
} }
void testb(Register dst, Register src);
void testb(Register reg, Immediate mask); void testb(Register reg, Immediate mask);
void testb(const Operand& op, Immediate mask); void testb(const Operand& op, Immediate mask);
void testl(Register dst, Register src); void testl(Register dst, Register src);
@ -902,6 +941,7 @@ class Assembler : public Malloced {
void bts(const Operand& dst, Register src); void bts(const Operand& dst, Register src);
// Miscellaneous // Miscellaneous
void clc();
void cpuid(); void cpuid();
void hlt(); void hlt();
void int3(); void int3();

46
deps/v8/src/x64/builtins-x64.cc

@ -53,7 +53,7 @@ static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
__ movq(rbp, rsp); __ movq(rbp, rsp);
// Store the arguments adaptor context sentinel. // Store the arguments adaptor context sentinel.
__ push(Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR))); __ Push(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
// Push the function on the stack. // Push the function on the stack.
__ push(rdi); __ push(rdi);
@ -75,14 +75,9 @@ static void LeaveArgumentsAdaptorFrame(MacroAssembler* masm) {
__ pop(rbp); __ pop(rbp);
// Remove caller arguments from the stack. // Remove caller arguments from the stack.
// rbx holds a Smi, so we convery to dword offset by multiplying by 4.
// TODO(smi): Find a way to abstract indexing by a smi.
ASSERT_EQ(kSmiTagSize, 1 && kSmiTag == 0);
ASSERT_EQ(kPointerSize, (1 << kSmiTagSize) * 4);
// TODO(smi): Find way to abstract indexing by a smi.
__ pop(rcx); __ pop(rcx);
// 1 * kPointerSize is offset of receiver. SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
__ lea(rsp, Operand(rsp, rbx, times_half_pointer_size, 1 * kPointerSize)); __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
__ push(rcx); __ push(rcx);
} }
@ -342,7 +337,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Because runtime functions always remove the receiver from the stack, we // Because runtime functions always remove the receiver from the stack, we
// have to fake one to avoid underflowing the stack. // have to fake one to avoid underflowing the stack.
__ push(rax); __ push(rax);
__ push(Immediate(Smi::FromInt(0))); __ Push(Smi::FromInt(0));
// Do call to runtime routine. // Do call to runtime routine.
__ CallRuntime(Runtime::kStackGuard, 1); __ CallRuntime(Runtime::kStackGuard, 1);
@ -434,7 +429,7 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
// Update the index on the stack and in register rax. // Update the index on the stack and in register rax.
__ movq(rax, Operand(rbp, kIndexOffset)); __ movq(rax, Operand(rbp, kIndexOffset));
__ addq(rax, Immediate(Smi::FromInt(1))); __ SmiAddConstant(rax, rax, Smi::FromInt(1));
__ movq(Operand(rbp, kIndexOffset), rax); __ movq(Operand(rbp, kIndexOffset), rax);
__ bind(&entry); __ bind(&entry);
@ -507,7 +502,7 @@ static void AllocateEmptyJSArray(MacroAssembler* masm,
__ Move(FieldOperand(result, JSArray::kPropertiesOffset), __ Move(FieldOperand(result, JSArray::kPropertiesOffset),
Factory::empty_fixed_array()); Factory::empty_fixed_array());
// Field JSArray::kElementsOffset is initialized later. // Field JSArray::kElementsOffset is initialized later.
__ movq(FieldOperand(result, JSArray::kLengthOffset), Immediate(0)); __ Move(FieldOperand(result, JSArray::kLengthOffset), Smi::FromInt(0));
// If no storage is requested for the elements array just set the empty // If no storage is requested for the elements array just set the empty
// fixed array. // fixed array.
@ -718,14 +713,12 @@ static void ArrayNativeCode(MacroAssembler* masm,
__ cmpq(rax, Immediate(1)); __ cmpq(rax, Immediate(1));
__ j(not_equal, &argc_two_or_more); __ j(not_equal, &argc_two_or_more);
__ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack. __ movq(rdx, Operand(rsp, kPointerSize)); // Get the argument from the stack.
Condition not_positive_smi = __ CheckNotPositiveSmi(rdx); __ JumpIfNotPositiveSmi(rdx, call_generic_code);
__ j(not_positive_smi, call_generic_code);
// Handle construction of an empty array of a certain size. Bail out if size // Handle construction of an empty array of a certain size. Bail out if size
// is to large to actually allocate an elements array. // is to large to actually allocate an elements array.
__ JumpIfSmiGreaterEqualsConstant(rdx, __ SmiCompare(rdx, Smi::FromInt(JSObject::kInitialMaxFastElementArray));
JSObject::kInitialMaxFastElementArray, __ j(greater_equal, call_generic_code);
call_generic_code);
// rax: argc // rax: argc
// rdx: array_size (smi) // rdx: array_size (smi)
@ -825,10 +818,10 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
__ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi. // Will both indicate a NULL and a Smi.
ASSERT(kSmiTag == 0); ASSERT(kSmiTag == 0);
Condition not_smi = __ CheckNotSmi(rbx); Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
__ Assert(not_smi, "Unexpected initial map for Array function"); __ Check(not_smi, "Unexpected initial map for Array function");
__ CmpObjectType(rbx, MAP_TYPE, rcx); __ CmpObjectType(rbx, MAP_TYPE, rcx);
__ Assert(equal, "Unexpected initial map for Array function"); __ Check(equal, "Unexpected initial map for Array function");
} }
// Run the native code for the Array function called as a normal function. // Run the native code for the Array function called as a normal function.
@ -857,15 +850,15 @@ void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
// does always have a map. // does always have a map.
GenerateLoadArrayFunction(masm, rbx); GenerateLoadArrayFunction(masm, rbx);
__ cmpq(rdi, rbx); __ cmpq(rdi, rbx);
__ Assert(equal, "Unexpected Array function"); __ Check(equal, "Unexpected Array function");
// Initial map for the builtin Array function should be a map. // Initial map for the builtin Array function should be a map.
__ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset)); __ movq(rbx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi. // Will both indicate a NULL and a Smi.
ASSERT(kSmiTag == 0); ASSERT(kSmiTag == 0);
Condition not_smi = __ CheckNotSmi(rbx); Condition not_smi = NegateCondition(masm->CheckSmi(rbx));
__ Assert(not_smi, "Unexpected initial map for Array function"); __ Check(not_smi, "Unexpected initial map for Array function");
__ CmpObjectType(rbx, MAP_TYPE, rcx); __ CmpObjectType(rbx, MAP_TYPE, rcx);
__ Assert(equal, "Unexpected initial map for Array function"); __ Check(equal, "Unexpected initial map for Array function");
} }
// Run the native code for the Array function called as constructor. // Run the native code for the Array function called as constructor.
@ -902,7 +895,6 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
// edi: called object // edi: called object
// eax: number of arguments // eax: number of arguments
__ bind(&non_function_call); __ bind(&non_function_call);
// Set expected number of arguments to zero (not changing eax). // Set expected number of arguments to zero (not changing eax).
__ movq(rbx, Immediate(0)); __ movq(rbx, Immediate(0));
__ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR); __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
@ -1143,11 +1135,9 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
__ LeaveConstructFrame(); __ LeaveConstructFrame();
// Remove caller arguments from the stack and return. // Remove caller arguments from the stack and return.
ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
// TODO(smi): Find a way to abstract indexing by a smi.
__ pop(rcx); __ pop(rcx);
// 1 * kPointerSize is offset of receiver. SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
__ lea(rsp, Operand(rsp, rbx, times_half_pointer_size, 1 * kPointerSize)); __ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
__ push(rcx); __ push(rcx);
__ IncrementCounter(&Counters::constructed_objects, 1); __ IncrementCounter(&Counters::constructed_objects, 1);
__ ret(0); __ ret(0);

779
deps/v8/src/x64/codegen-x64.cc

File diff suppressed because it is too large

19
deps/v8/src/x64/codegen-x64.h

@ -294,6 +294,15 @@ class CodeGenerator: public AstVisitor {
Handle<Script> script, Handle<Script> script,
bool is_eval); bool is_eval);
// Printing of AST, etc. as requested by flags.
static void MakeCodePrologue(FunctionLiteral* fun);
// Allocate and install the code.
static Handle<Code> MakeCodeEpilogue(FunctionLiteral* fun,
MacroAssembler* masm,
Code::Flags flags,
Handle<Script> script);
#ifdef ENABLE_LOGGING_AND_PROFILING #ifdef ENABLE_LOGGING_AND_PROFILING
static bool ShouldGenerateLog(Expression* type); static bool ShouldGenerateLog(Expression* type);
#endif #endif
@ -303,6 +312,8 @@ class CodeGenerator: public AstVisitor {
bool is_toplevel, bool is_toplevel,
Handle<Script> script); Handle<Script> script);
static void RecordPositions(MacroAssembler* masm, int pos);
// Accessors // Accessors
MacroAssembler* masm() { return masm_; } MacroAssembler* masm() { return masm_; }
@ -548,6 +559,14 @@ class CodeGenerator: public AstVisitor {
inline void GenerateMathSin(ZoneList<Expression*>* args); inline void GenerateMathSin(ZoneList<Expression*>* args);
inline void GenerateMathCos(ZoneList<Expression*>* args); inline void GenerateMathCos(ZoneList<Expression*>* args);
// Simple condition analysis.
enum ConditionAnalysis {
ALWAYS_TRUE,
ALWAYS_FALSE,
DONT_KNOW
};
ConditionAnalysis AnalyzeCondition(Expression* cond);
// Methods used to indicate which source code is generated for. Source // Methods used to indicate which source code is generated for. Source
// positions are collected by the assembler and emitted with the relocation // positions are collected by the assembler and emitted with the relocation
// information. // information.

5
deps/v8/src/x64/debug-x64.cc

@ -39,10 +39,7 @@ namespace internal {
bool Debug::IsDebugBreakAtReturn(v8::internal::RelocInfo* rinfo) { bool Debug::IsDebugBreakAtReturn(v8::internal::RelocInfo* rinfo) {
ASSERT(RelocInfo::IsJSReturn(rinfo->rmode())); ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()));
// 11th byte of patch is 0x49 (REX.WB byte of computed jump/call to r10), return rinfo->IsPatchedReturnSequence();
// 11th byte of JS return is 0xCC (int3).
ASSERT(*(rinfo->pc() + 10) == 0x49 || *(rinfo->pc() + 10) == 0xCC);
return (*(rinfo->pc() + 10) != 0xCC);
} }
#define __ ACCESS_MASM(masm) #define __ ACCESS_MASM(masm)

181
deps/v8/src/x64/fast-codegen-x64.cc

@ -0,0 +1,181 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "codegen-inl.h"
#include "debug.h"
#include "fast-codegen.h"
namespace v8 {
namespace internal {
#define __ ACCESS_MASM(masm_)
// Generate code for a JS function. On entry to the function the receiver
// and arguments have been pushed on the stack left to right, with the
// return address on top of them. The actual argument count matches the
// formal parameter count expected by the function.
//
// The live registers are:
// o rdi: the JS function object being called (ie, ourselves)
// o rsi: our context
// o rbp: our caller's frame pointer
// o rsp: stack pointer (pointing to return address)
//
// The function builds a JS frame. Please see JavaScriptFrameConstants in
// frames-x64.h for its layout.
void FastCodeGenerator::Generate(FunctionLiteral* fun) {
function_ = fun;
SetFunctionPosition(fun);
__ push(rbp); // Caller's frame pointer.
__ movq(rbp, rsp);
__ push(rsi); // Callee's context.
__ push(rdi); // Callee's JS Function.
{ Comment cmnt(masm_, "[ Allocate locals");
int locals_count = fun->scope()->num_stack_slots();
for (int i = 0; i < locals_count; i++) {
__ PushRoot(Heap::kUndefinedValueRootIndex);
}
}
{ Comment cmnt(masm_, "[ Stack check");
Label ok;
__ CompareRoot(rsp, Heap::kStackLimitRootIndex);
__ j(above_equal, &ok);
StackCheckStub stub;
__ CallStub(&stub);
__ bind(&ok);
}
{ Comment cmnt(masm_, "[ Body");
VisitStatements(fun->body());
}
{ Comment cmnt(masm_, "[ return <undefined>;");
// Emit a 'return undefined' in case control fell off the end of the
// body.
__ LoadRoot(rax, Heap::kUndefinedValueRootIndex);
SetReturnPosition(fun);
__ RecordJSReturn();
// Do not use the leave instruction here because it is too short to
// patch with the code required by the debugger.
__ movq(rsp, rbp);
__ pop(rbp);
__ ret((fun->scope()->num_parameters() + 1) * kPointerSize);
#ifdef ENABLE_DEBUGGER_SUPPORT
// Add padding that will be overwritten by a debugger breakpoint. We
// have just generated "movq rsp, rbp; pop rbp; ret k" with length 7
// (3 + 1 + 3).
const int kPadding = Debug::kX64JSReturnSequenceLength - 7;
for (int i = 0; i < kPadding; ++i) {
masm_->int3();
}
#endif
}
}
void FastCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) {
Comment cmnt(masm_, "[ ExpressionStatement");
SetStatementPosition(stmt);
Visit(stmt->expression());
}
void FastCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
Comment cmnt(masm_, "[ ReturnStatement");
SetStatementPosition(stmt);
Visit(stmt->expression());
__ pop(rax);
__ RecordJSReturn();
// Do not use the leave instruction here because it is too short to
// patch with the code required by the debugger.
__ movq(rsp, rbp);
__ pop(rbp);
__ ret((function_->scope()->num_parameters() + 1) * kPointerSize);
#ifdef ENABLE_DEBUGGER_SUPPORT
// Add padding that will be overwritten by a debugger breakpoint. We
// have just generated "movq rsp, rbp; pop rbp; ret k" with length 7
// (3 + 1 + 3).
const int kPadding = Debug::kX64JSReturnSequenceLength - 7;
for (int i = 0; i < kPadding; ++i) {
masm_->int3();
}
#endif
}
void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
Comment cmnt(masm_, "[ VariableProxy");
Expression* rewrite = expr->var()->rewrite();
ASSERT(rewrite != NULL);
Slot* slot = rewrite->AsSlot();
ASSERT(slot != NULL);
{ Comment cmnt(masm_, "[ Slot");
if (expr->location().is_temporary()) {
__ push(Operand(rbp, SlotOffset(slot)));
} else {
ASSERT(expr->location().is_nowhere());
}
}
}
void FastCodeGenerator::VisitLiteral(Literal* expr) {
Comment cmnt(masm_, "[ Literal");
if (expr->location().is_temporary()) {
__ Push(expr->handle());
} else {
ASSERT(expr->location().is_nowhere());
}
}
void FastCodeGenerator::VisitAssignment(Assignment* expr) {
Comment cmnt(masm_, "[ Assignment");
ASSERT(expr->op() == Token::ASSIGN || expr->op() == Token::INIT_VAR);
Visit(expr->value());
Variable* var = expr->target()->AsVariableProxy()->AsVariable();
ASSERT(var != NULL && var->slot() != NULL);
if (expr->location().is_temporary()) {
__ movq(rax, Operand(rsp, 0));
__ movq(Operand(rbp, SlotOffset(var->slot())), rax);
} else {
ASSERT(expr->location().is_nowhere());
__ pop(Operand(rbp, SlotOffset(var->slot())));
}
}
} } // namespace v8::internal

3
deps/v8/src/x64/frames-x64.h

@ -31,9 +31,6 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
// TODO(x64): This is a stub, mostly just a copy of the ia32 bit version.
// This might all need to change to be correct for x64.
static const int kNumRegs = 8; static const int kNumRegs = 8;
static const RegList kJSCallerSaved = static const RegList kJSCallerSaved =
1 << 0 | // rax 1 << 0 | // rax

51
deps/v8/src/x64/ic-x64.cc

@ -131,8 +131,8 @@ static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label,
// Check that the value is a normal property. // Check that the value is a normal property.
__ bind(&done); __ bind(&done);
const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize; const int kDetailsOffset = kElementsStartOffset + 2 * kPointerSize;
__ testl(Operand(r0, r1, times_pointer_size, kDetailsOffset - kHeapObjectTag), __ Test(Operand(r0, r1, times_pointer_size, kDetailsOffset - kHeapObjectTag),
Immediate(Smi::FromInt(PropertyDetails::TypeField::mask()))); Smi::FromInt(PropertyDetails::TypeField::mask()));
__ j(not_zero, miss_label); __ j(not_zero, miss_label);
// Get the value at the masked, scaled index. // Get the value at the masked, scaled index.
@ -336,13 +336,13 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ testb(FieldOperand(rdx, Map::kInstanceTypeOffset), __ testb(FieldOperand(rdx, Map::kInstanceTypeOffset),
Immediate(kIsSymbolMask)); Immediate(kIsSymbolMask));
__ j(zero, &slow); __ j(zero, &slow);
// Probe the dictionary leaving result in ecx. // Probe the dictionary leaving result in rcx.
GenerateDictionaryLoad(masm, &slow, rbx, rcx, rdx, rax); GenerateDictionaryLoad(masm, &slow, rbx, rcx, rdx, rax);
GenerateCheckNonObjectOrLoaded(masm, &slow, rcx); GenerateCheckNonObjectOrLoaded(masm, &slow, rcx);
__ movq(rax, rcx); __ movq(rax, rcx);
__ IncrementCounter(&Counters::keyed_load_generic_symbol, 1); __ IncrementCounter(&Counters::keyed_load_generic_symbol, 1);
__ ret(0); __ ret(0);
// Array index string: If short enough use cache in length/hash field (ebx). // Array index string: If short enough use cache in length/hash field (rbx).
// We assert that there are enough bits in an int32_t after the hash shift // We assert that there are enough bits in an int32_t after the hash shift
// bits have been subtracted to allow space for the length and the cached // bits have been subtracted to allow space for the length and the cached
// array index. // array index.
@ -434,9 +434,6 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
__ movq(rbx, Operand(rsp, 1 * kPointerSize)); // 1 ~ return address __ movq(rbx, Operand(rsp, 1 * kPointerSize)); // 1 ~ return address
// Check that the key is a smi. // Check that the key is a smi.
__ JumpIfNotSmi(rbx, &slow); __ JumpIfNotSmi(rbx, &slow);
// If it is a smi, make sure it is zero-extended, so it can be
// used as an index in a memory operand.
__ movl(rbx, rbx); // Clear the high bits of rbx.
__ CmpInstanceType(rcx, JS_ARRAY_TYPE); __ CmpInstanceType(rcx, JS_ARRAY_TYPE);
__ j(equal, &array); __ j(equal, &array);
@ -447,7 +444,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
// Object case: Check key against length in the elements array. // Object case: Check key against length in the elements array.
// rax: value // rax: value
// rdx: JSObject // rdx: JSObject
// rbx: index (as a smi), zero-extended. // rbx: index (as a smi)
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary). // Check that the object is in fast mode (not dictionary).
__ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset), __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
@ -488,14 +485,11 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
__ movq(rdx, rax); // Save the value. __ movq(rdx, rax); // Save the value.
__ SmiToInteger32(rax, rax); __ SmiToInteger32(rax, rax);
{ // Clamp the value to [0..255]. { // Clamp the value to [0..255].
Label done, is_negative; Label done;
__ testl(rax, Immediate(0xFFFFFF00)); __ testl(rax, Immediate(0xFFFFFF00));
__ j(zero, &done); __ j(zero, &done);
__ j(negative, &is_negative); __ setcc(negative, rax); // 1 if negative, 0 if positive.
__ movl(rax, Immediate(255)); __ decb(rax); // 0 if negative, 255 if positive.
__ jmp(&done);
__ bind(&is_negative);
__ xorl(rax, rax); // Clear rax.
__ bind(&done); __ bind(&done);
} }
__ movq(rcx, FieldOperand(rcx, PixelArray::kExternalPointerOffset)); __ movq(rcx, FieldOperand(rcx, PixelArray::kExternalPointerOffset));
@ -511,15 +505,15 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
// rdx: JSArray // rdx: JSArray
// rcx: FixedArray // rcx: FixedArray
// rbx: index (as a smi) // rbx: index (as a smi)
// flags: compare (rbx, rdx.length()) // flags: smicompare (rdx.length(), rbx)
__ j(not_equal, &slow); // do not leave holes in the array __ j(not_equal, &slow); // do not leave holes in the array
__ SmiToInteger64(rbx, rbx); __ SmiToInteger64(rbx, rbx);
__ cmpl(rbx, FieldOperand(rcx, FixedArray::kLengthOffset)); __ cmpl(rbx, FieldOperand(rcx, FixedArray::kLengthOffset));
__ j(above_equal, &slow); __ j(above_equal, &slow);
// Increment and restore smi-tag. // Increment and restore smi-tag.
__ Integer64AddToSmi(rbx, rbx, 1); __ Integer64PlusConstantToSmi(rbx, rbx, 1);
__ movq(FieldOperand(rdx, JSArray::kLengthOffset), rbx); __ movq(FieldOperand(rdx, JSArray::kLengthOffset), rbx);
__ SmiSubConstant(rbx, rbx, 1, NULL); __ SmiSubConstant(rbx, rbx, Smi::FromInt(1));
__ jmp(&fast); __ jmp(&fast);
// Array case: Get the length and the elements array from the JS // Array case: Get the length and the elements array from the JS
@ -530,25 +524,36 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
// rdx: JSArray // rdx: JSArray
// rbx: index (as a smi) // rbx: index (as a smi)
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset)); __ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
__ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), Factory::fixed_array_map()); __ CompareRoot(FieldOperand(rcx, HeapObject::kMapOffset),
Heap::kFixedArrayMapRootIndex);
__ j(not_equal, &slow); __ j(not_equal, &slow);
// Check the key against the length in the array, compute the // Check the key against the length in the array, compute the
// address to store into and fall through to fast case. // address to store into and fall through to fast case.
__ cmpl(rbx, FieldOperand(rdx, JSArray::kLengthOffset)); __ SmiCompare(FieldOperand(rdx, JSArray::kLengthOffset), rbx);
__ j(above_equal, &extra); __ j(below_equal, &extra);
// Fast case: Do the store. // Fast case: Do the store.
__ bind(&fast); __ bind(&fast);
// rax: value // rax: value
// rcx: FixedArray // rcx: FixedArray
// rbx: index (as a smi) // rbx: index (as a smi)
__ movq(Operand(rcx, rbx, times_half_pointer_size, Label non_smi_value;
__ JumpIfNotSmi(rax, &non_smi_value);
SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
__ movq(Operand(rcx, index.reg, index.scale,
FixedArray::kHeaderSize - kHeapObjectTag), FixedArray::kHeaderSize - kHeapObjectTag),
rax); rax);
__ ret(0);
__ bind(&non_smi_value);
// Slow case that needs to retain rbx for use by RecordWrite.
// Update write barrier for the elements array address. // Update write barrier for the elements array address.
SmiIndex index2 = masm->SmiToIndex(kScratchRegister, rbx, kPointerSizeLog2);
__ movq(Operand(rcx, index2.reg, index2.scale,
FixedArray::kHeaderSize - kHeapObjectTag),
rax);
__ movq(rdx, rax); __ movq(rdx, rax);
__ RecordWrite(rcx, 0, rdx, rbx); __ RecordWriteNonSmi(rcx, 0, rdx, rbx);
__ ret(0); __ ret(0);
} }

1060
deps/v8/src/x64/macro-assembler-x64.cc

File diff suppressed because it is too large

87
deps/v8/src/x64/macro-assembler-x64.h

@ -72,6 +72,18 @@ class MacroAssembler: public Assembler {
Register value, Register value,
Register scratch); Register scratch);
// Set the remembered set bit for [object+offset].
// The value is known to not be a smi.
// object is the object being stored into, value is the object being stored.
// If offset is zero, then the scratch register contains the array index into
// the elements array represented as a Smi.
// All registers are clobbered by the operation.
void RecordWriteNonSmi(Register object,
int offset,
Register value,
Register scratch);
#ifdef ENABLE_DEBUGGER_SUPPORT #ifdef ENABLE_DEBUGGER_SUPPORT
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Debugger Support // Debugger Support
@ -146,11 +158,12 @@ class MacroAssembler: public Assembler {
// Tag an integer value if possible, or jump the integer value cannot be // Tag an integer value if possible, or jump the integer value cannot be
// represented as a smi. Only uses the low 32 bit of the src registers. // represented as a smi. Only uses the low 32 bit of the src registers.
// NOTICE: Destroys the dst register even if unsuccessful!
void Integer32ToSmi(Register dst, Register src, Label* on_overflow); void Integer32ToSmi(Register dst, Register src, Label* on_overflow);
// Adds constant to src and tags the result as a smi. // Adds constant to src and tags the result as a smi.
// Result must be a valid smi. // Result must be a valid smi.
void Integer64AddToSmi(Register dst, Register src, int constant); void Integer64PlusConstantToSmi(Register dst, Register src, int constant);
// Convert smi to 32-bit integer. I.e., not sign extended into // Convert smi to 32-bit integer. I.e., not sign extended into
// high 32 bits of destination. // high 32 bits of destination.
@ -165,38 +178,31 @@ class MacroAssembler: public Assembler {
Register src, Register src,
int power); int power);
// Simple comparison of smis.
void SmiCompare(Register dst, Register src);
void SmiCompare(Register dst, Smi* src);
void SmiCompare(const Operand& dst, Register src);
void SmiCompare(const Operand& dst, Smi* src);
// Sets sign and zero flags depending on value of smi in register.
void SmiTest(Register src);
// Functions performing a check on a known or potential smi. Returns // Functions performing a check on a known or potential smi. Returns
// a condition that is satisfied if the check is successful. // a condition that is satisfied if the check is successful.
// Is the value a tagged smi. // Is the value a tagged smi.
Condition CheckSmi(Register src); Condition CheckSmi(Register src);
// Is the value not a tagged smi.
Condition CheckNotSmi(Register src);
// Is the value a positive tagged smi. // Is the value a positive tagged smi.
Condition CheckPositiveSmi(Register src); Condition CheckPositiveSmi(Register src);
// Is the value not a positive tagged smi.
Condition CheckNotPositiveSmi(Register src);
// Are both values are tagged smis. // Are both values are tagged smis.
Condition CheckBothSmi(Register first, Register second); Condition CheckBothSmi(Register first, Register second);
// Is one of the values not a tagged smi.
Condition CheckNotBothSmi(Register first, Register second);
// Is the value the minimum smi value (since we are using // Is the value the minimum smi value (since we are using
// two's complement numbers, negating the value is known to yield // two's complement numbers, negating the value is known to yield
// a non-smi value). // a non-smi value).
Condition CheckIsMinSmi(Register src); Condition CheckIsMinSmi(Register src);
// Check whether a tagged smi is equal to a constant.
Condition CheckSmiEqualsConstant(Register src, int constant);
// Check whether a tagged smi is greater than or equal to a constant.
Condition CheckSmiGreaterEqualsConstant(Register src, int constant);
// Checks whether an 32-bit integer value is a valid for conversion // Checks whether an 32-bit integer value is a valid for conversion
// to a smi. // to a smi.
Condition CheckInteger32ValidSmiValue(Register src); Condition CheckInteger32ValidSmiValue(Register src);
@ -216,15 +222,9 @@ class MacroAssembler: public Assembler {
// Jump to label if the value is not a positive tagged smi. // Jump to label if the value is not a positive tagged smi.
void JumpIfNotPositiveSmi(Register src, Label* on_not_smi); void JumpIfNotPositiveSmi(Register src, Label* on_not_smi);
// Jump to label if the value is a tagged smi with value equal // Jump to label if the value, which must be a tagged smi, has value equal
// to the constant. // to the constant.
void JumpIfSmiEqualsConstant(Register src, int constant, Label* on_equals); void JumpIfSmiEqualsConstant(Register src, Smi* constant, Label* on_equals);
// Jump to label if the value is a tagged smi with value greater than or equal
// to the constant.
void JumpIfSmiGreaterEqualsConstant(Register src,
int constant,
Label* on_equals);
// Jump if either or both register are not smi values. // Jump if either or both register are not smi values.
void JumpIfNotBothSmi(Register src1, Register src2, Label* on_not_both_smi); void JumpIfNotBothSmi(Register src1, Register src2, Label* on_not_both_smi);
@ -239,29 +239,36 @@ class MacroAssembler: public Assembler {
// the label. // the label.
void SmiTryAddConstant(Register dst, void SmiTryAddConstant(Register dst,
Register src, Register src,
int32_t constant, Smi* constant,
Label* on_not_smi_result); Label* on_not_smi_result);
// Add an integer constant to a tagged smi, giving a tagged smi as result.
// No overflow testing on the result is done.
void SmiAddConstant(Register dst, Register src, Smi* constant);
// Add an integer constant to a tagged smi, giving a tagged smi as result, // Add an integer constant to a tagged smi, giving a tagged smi as result,
// or jumping to a label if the result cannot be represented by a smi. // or jumping to a label if the result cannot be represented by a smi.
// If the label is NULL, no testing on the result is done.
void SmiAddConstant(Register dst, void SmiAddConstant(Register dst,
Register src, Register src,
int32_t constant, Smi* constant,
Label* on_not_smi_result); Label* on_not_smi_result);
// Subtract an integer constant from a tagged smi, giving a tagged smi as
// result. No testing on the result is done.
void SmiSubConstant(Register dst, Register src, Smi* constant);
// Subtract an integer constant from a tagged smi, giving a tagged smi as // Subtract an integer constant from a tagged smi, giving a tagged smi as
// result, or jumping to a label if the result cannot be represented by a smi. // result, or jumping to a label if the result cannot be represented by a smi.
// If the label is NULL, no testing on the result is done.
void SmiSubConstant(Register dst, void SmiSubConstant(Register dst,
Register src, Register src,
int32_t constant, Smi* constant,
Label* on_not_smi_result); Label* on_not_smi_result);
// Negating a smi can give a negative zero or too large positive value. // Negating a smi can give a negative zero or too large positive value.
// NOTICE: This operation jumps on success, not failure!
void SmiNeg(Register dst, void SmiNeg(Register dst,
Register src, Register src,
Label* on_not_smi_result); Label* on_smi_result);
// Adds smi values and return the result as a smi. // Adds smi values and return the result as a smi.
// If dst is src1, then src1 will be destroyed, even if // If dst is src1, then src1 will be destroyed, even if
@ -307,9 +314,9 @@ class MacroAssembler: public Assembler {
void SmiAnd(Register dst, Register src1, Register src2); void SmiAnd(Register dst, Register src1, Register src2);
void SmiOr(Register dst, Register src1, Register src2); void SmiOr(Register dst, Register src1, Register src2);
void SmiXor(Register dst, Register src1, Register src2); void SmiXor(Register dst, Register src1, Register src2);
void SmiAndConstant(Register dst, Register src1, int constant); void SmiAndConstant(Register dst, Register src1, Smi* constant);
void SmiOrConstant(Register dst, Register src1, int constant); void SmiOrConstant(Register dst, Register src1, Smi* constant);
void SmiXorConstant(Register dst, Register src1, int constant); void SmiXorConstant(Register dst, Register src1, Smi* constant);
void SmiShiftLeftConstant(Register dst, void SmiShiftLeftConstant(Register dst,
Register src, Register src,
@ -367,20 +374,27 @@ class MacroAssembler: public Assembler {
// Converts a positive smi to a negative index. // Converts a positive smi to a negative index.
SmiIndex SmiToNegativeIndex(Register dst, Register src, int shift); SmiIndex SmiToNegativeIndex(Register dst, Register src, int shift);
bool IsUnsafeSmi(Smi* value);
void LoadUnsafeSmi(Register dst, Smi* source);
// Basic Smi operations.
void Move(Register dst, Smi* source);
void Move(const Operand& dst, Smi* source);
void Push(Smi* smi);
void Test(const Operand& dst, Smi* source);
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Macro instructions // Macro instructions
// Expression support // Load a register with a long value as efficiently as possible.
void Set(Register dst, int64_t x); void Set(Register dst, int64_t x);
void Set(const Operand& dst, int64_t x); void Set(const Operand& dst, int64_t x);
// Handle support // Handle support
bool IsUnsafeSmi(Smi* value);
bool IsUnsafeSmi(Handle<Object> value) { bool IsUnsafeSmi(Handle<Object> value) {
return IsUnsafeSmi(Smi::cast(*value)); return IsUnsafeSmi(Smi::cast(*value));
} }
void LoadUnsafeSmi(Register dst, Smi* source);
void LoadUnsafeSmi(Register dst, Handle<Object> source) { void LoadUnsafeSmi(Register dst, Handle<Object> source) {
LoadUnsafeSmi(dst, Smi::cast(*source)); LoadUnsafeSmi(dst, Smi::cast(*source));
} }
@ -390,7 +404,6 @@ class MacroAssembler: public Assembler {
void Cmp(Register dst, Handle<Object> source); void Cmp(Register dst, Handle<Object> source);
void Cmp(const Operand& dst, Handle<Object> source); void Cmp(const Operand& dst, Handle<Object> source);
void Push(Handle<Object> source); void Push(Handle<Object> source);
void Push(Smi* smi);
// Control Flow // Control Flow
void Jump(Address destination, RelocInfo::Mode rmode); void Jump(Address destination, RelocInfo::Mode rmode);

13
deps/v8/src/x64/stub-cache-x64.cc

@ -47,19 +47,24 @@ static void ProbeTable(MacroAssembler* masm,
StubCache::Table table, StubCache::Table table,
Register name, Register name,
Register offset) { Register offset) {
// The offset register must hold a *positive* smi. ASSERT_EQ(8, kPointerSize);
ASSERT_EQ(16, sizeof(StubCache::Entry));
// The offset register holds the entry offset times four (due to masking
// and shifting optimizations).
ExternalReference key_offset(SCTableReference::keyReference(table)); ExternalReference key_offset(SCTableReference::keyReference(table));
Label miss; Label miss;
__ movq(kScratchRegister, key_offset); __ movq(kScratchRegister, key_offset);
SmiIndex index = masm->SmiToIndex(offset, offset, kPointerSizeLog2);
// Check that the key in the entry matches the name. // Check that the key in the entry matches the name.
__ cmpl(name, Operand(kScratchRegister, index.reg, index.scale, 0)); // Multiply entry offset by 16 to get the entry address. Since the
// offset register already holds the entry offset times four, multiply
// by a further four.
__ cmpl(name, Operand(kScratchRegister, offset, times_4, 0));
__ j(not_equal, &miss); __ j(not_equal, &miss);
// Get the code entry from the cache. // Get the code entry from the cache.
// Use key_offset + kPointerSize, rather than loading value_offset. // Use key_offset + kPointerSize, rather than loading value_offset.
__ movq(kScratchRegister, __ movq(kScratchRegister,
Operand(kScratchRegister, index.reg, index.scale, kPointerSize)); Operand(kScratchRegister, offset, times_4, kPointerSize));
// Check that the flags match what we're looking for. // Check that the flags match what we're looking for.
__ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset)); __ movl(offset, FieldOperand(kScratchRegister, Code::kFlagsOffset));
__ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup)); __ and_(offset, Immediate(~Code::kFlagsNotUsedInLookup));

47
deps/v8/src/x64/virtual-frame-x64.cc

@ -63,14 +63,16 @@ void VirtualFrame::Enter() {
Comment cmnt(masm(), "[ Enter JS frame"); Comment cmnt(masm(), "[ Enter JS frame");
#ifdef DEBUG #ifdef DEBUG
// Verify that rdi contains a JS function. The following code if (FLAG_debug_code) {
// relies on rax being available for use. // Verify that rdi contains a JS function. The following code
Condition not_smi = masm()->CheckNotSmi(rdi); // relies on rax being available for use.
__ Check(not_smi, Condition not_smi = NegateCondition(masm()->CheckSmi(rdi));
"VirtualFrame::Enter - rdi is not a function (smi check)."); __ Check(not_smi,
__ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax); "VirtualFrame::Enter - rdi is not a function (smi check).");
__ Check(equal, __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rax);
"VirtualFrame::Enter - rdi is not a function (map check)."); __ Check(equal,
"VirtualFrame::Enter - rdi is not a function (map check).");
}
#endif #endif
EmitPush(rbp); EmitPush(rbp);
@ -197,6 +199,14 @@ void VirtualFrame::EmitPush(Immediate immediate) {
} }
void VirtualFrame::EmitPush(Smi* smi_value) {
ASSERT(stack_pointer_ == element_count() - 1);
elements_.Add(FrameElement::MemoryElement());
stack_pointer_++;
__ Push(smi_value);
}
void VirtualFrame::EmitPush(Handle<Object> value) { void VirtualFrame::EmitPush(Handle<Object> value) {
ASSERT(stack_pointer_ == element_count() - 1); ASSERT(stack_pointer_ == element_count() - 1);
elements_.Add(FrameElement::MemoryElement()); elements_.Add(FrameElement::MemoryElement());
@ -841,7 +851,7 @@ void VirtualFrame::SyncElementByPushing(int index) {
switch (element.type()) { switch (element.type()) {
case FrameElement::INVALID: case FrameElement::INVALID:
__ push(Immediate(Smi::FromInt(0))); __ Push(Smi::FromInt(0));
break; break;
case FrameElement::MEMORY: case FrameElement::MEMORY:
@ -883,15 +893,16 @@ void VirtualFrame::SyncRange(int begin, int end) {
// on the stack. // on the stack.
int start = Min(begin, stack_pointer_ + 1); int start = Min(begin, stack_pointer_ + 1);
// If positive we have to adjust the stack pointer. // Emit normal 'push' instructions for elements above stack pointer
int delta = end - stack_pointer_; // and use mov instructions if we are below stack pointer.
if (delta > 0) {
stack_pointer_ = end;
__ subq(rsp, Immediate(delta * kPointerSize));
}
for (int i = start; i <= end; i++) { for (int i = start; i <= end; i++) {
if (!elements_[i].is_synced()) SyncElementBelowStackPointer(i); if (!elements_[i].is_synced()) {
if (i <= stack_pointer_) {
SyncElementBelowStackPointer(i);
} else {
SyncElementByPushing(i);
}
}
} }
} }
@ -1004,7 +1015,7 @@ Result VirtualFrame::CallConstructor(int arg_count) {
function.ToRegister(rdi); function.ToRegister(rdi);
// Constructors are called with the number of arguments in register // Constructors are called with the number of arguments in register
// eax for now. Another option would be to have separate construct // rax for now. Another option would be to have separate construct
// call trampolines per different arguments counts encountered. // call trampolines per different arguments counts encountered.
Result num_args = cgen()->allocator()->Allocate(rax); Result num_args = cgen()->allocator()->Allocate(rax);
ASSERT(num_args.is_valid()); ASSERT(num_args.is_valid());

1
deps/v8/src/x64/virtual-frame-x64.h

@ -377,6 +377,7 @@ class VirtualFrame : public ZoneObject {
void EmitPush(const Operand& operand); void EmitPush(const Operand& operand);
void EmitPush(Heap::RootListIndex index); void EmitPush(Heap::RootListIndex index);
void EmitPush(Immediate immediate); void EmitPush(Immediate immediate);
void EmitPush(Smi* value);
// Uses kScratchRegister, emits appropriate relocation info. // Uses kScratchRegister, emits appropriate relocation info.
void EmitPush(Handle<Object> value); void EmitPush(Handle<Object> value);

4
deps/v8/test/cctest/SConscript

@ -67,7 +67,9 @@ SOURCES = {
'test-disasm-ia32.cc', 'test-disasm-ia32.cc',
'test-log-stack-tracer.cc' 'test-log-stack-tracer.cc'
], ],
'arch:x64': ['test-assembler-x64.cc', 'test-log-stack-tracer.cc'], 'arch:x64': ['test-assembler-x64.cc',
'test-macro-assembler-x64.cc',
'test-log-stack-tracer.cc'],
'os:linux': ['test-platform-linux.cc'], 'os:linux': ['test-platform-linux.cc'],
'os:macos': ['test-platform-macos.cc'], 'os:macos': ['test-platform-macos.cc'],
'os:nullos': ['test-platform-nullos.cc'], 'os:nullos': ['test-platform-nullos.cc'],

162
deps/v8/test/cctest/test-api.cc

@ -25,8 +25,6 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <stdlib.h>
#include "v8.h" #include "v8.h"
#include "api.h" #include "api.h"
@ -574,6 +572,44 @@ THREADED_TEST(UsingExternalAsciiString) {
} }
THREADED_TEST(StringConcat) {
{
v8::HandleScope scope;
LocalContext env;
const char* one_byte_string_1 = "function a_times_t";
const char* two_byte_string_1 = "wo_plus_b(a, b) {return ";
const char* one_byte_extern_1 = "a * 2 + b;} a_times_two_plus_b(4, 8) + ";
const char* two_byte_extern_1 = "a_times_two_plus_b(4, 8) + ";
const char* one_byte_string_2 = "a_times_two_plus_b(4, 8) + ";
const char* two_byte_string_2 = "a_times_two_plus_b(4, 8) + ";
const char* two_byte_extern_2 = "a_times_two_plus_b(1, 2);";
Local<String> left = v8_str(one_byte_string_1);
Local<String> right = String::New(AsciiToTwoByteString(two_byte_string_1));
Local<String> source = String::Concat(left, right);
right = String::NewExternal(
new TestAsciiResource(i::StrDup(one_byte_extern_1)));
source = String::Concat(source, right);
right = String::NewExternal(
new TestResource(AsciiToTwoByteString(two_byte_extern_1)));
source = String::Concat(source, right);
right = v8_str(one_byte_string_2);
source = String::Concat(source, right);
right = String::New(AsciiToTwoByteString(two_byte_string_2));
source = String::Concat(source, right);
right = String::NewExternal(
new TestResource(AsciiToTwoByteString(two_byte_extern_2)));
source = String::Concat(source, right);
Local<Script> script = Script::Compile(source);
Local<Value> value = script->Run();
CHECK(value->IsNumber());
CHECK_EQ(68, value->Int32Value());
}
v8::internal::CompilationCache::Clear();
i::Heap::CollectAllGarbage(false);
i::Heap::CollectAllGarbage(false);
}
THREADED_TEST(GlobalProperties) { THREADED_TEST(GlobalProperties) {
v8::HandleScope scope; v8::HandleScope scope;
LocalContext env; LocalContext env;
@ -702,6 +738,88 @@ THREADED_TEST(PropertyHandler) {
} }
THREADED_TEST(TinyInteger) {
v8::HandleScope scope;
LocalContext env;
int32_t value = 239;
Local<v8::Integer> value_obj = v8::Integer::New(value);
CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
}
THREADED_TEST(BigSmiInteger) {
v8::HandleScope scope;
LocalContext env;
int32_t value = i::Smi::kMaxValue;
// We cannot add one to a Smi::kMaxValue without wrapping.
if (i::kSmiValueSize < 32) {
CHECK(i::Smi::IsValid(value));
CHECK(!i::Smi::IsValid(value + 1));
Local<v8::Integer> value_obj = v8::Integer::New(value);
CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
}
}
THREADED_TEST(BigInteger) {
v8::HandleScope scope;
LocalContext env;
// We cannot add one to a Smi::kMaxValue without wrapping.
if (i::kSmiValueSize < 32) {
// The casts allow this to compile, even if Smi::kMaxValue is 2^31-1.
// The code will not be run in that case, due to the "if" guard.
int32_t value =
static_cast<int32_t>(static_cast<uint32_t>(i::Smi::kMaxValue) + 1);
CHECK(value > i::Smi::kMaxValue);
CHECK(!i::Smi::IsValid(value));
Local<v8::Integer> value_obj = v8::Integer::New(value);
CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
}
}
THREADED_TEST(TinyUnsignedInteger) {
v8::HandleScope scope;
LocalContext env;
uint32_t value = 239;
Local<v8::Integer> value_obj = v8::Integer::NewFromUnsigned(value);
CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
}
THREADED_TEST(BigUnsignedSmiInteger) {
v8::HandleScope scope;
LocalContext env;
uint32_t value = static_cast<uint32_t>(i::Smi::kMaxValue);
CHECK(i::Smi::IsValid(value));
CHECK(!i::Smi::IsValid(value + 1));
Local<v8::Integer> value_obj = v8::Integer::NewFromUnsigned(value);
CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
}
THREADED_TEST(BigUnsignedInteger) {
v8::HandleScope scope;
LocalContext env;
uint32_t value = static_cast<uint32_t>(i::Smi::kMaxValue) + 1;
CHECK(value > static_cast<uint32_t>(i::Smi::kMaxValue));
CHECK(!i::Smi::IsValid(value));
Local<v8::Integer> value_obj = v8::Integer::NewFromUnsigned(value);
CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
}
THREADED_TEST(OutOfSignedRangeUnsignedInteger) {
v8::HandleScope scope;
LocalContext env;
uint32_t INT32_MAX_AS_UINT = (1U << 31) - 1;
uint32_t value = INT32_MAX_AS_UINT + 1;
CHECK(value > INT32_MAX_AS_UINT); // No overflow.
Local<v8::Integer> value_obj = v8::Integer::NewFromUnsigned(value);
CHECK_EQ(static_cast<int64_t>(value), value_obj->Value());
}
THREADED_TEST(Number) { THREADED_TEST(Number) {
v8::HandleScope scope; v8::HandleScope scope;
LocalContext env; LocalContext env;
@ -1346,6 +1464,44 @@ THREADED_TEST(InternalFieldsNativePointers) {
} }
THREADED_TEST(InternalFieldsNativePointersAndExternal) {
v8::HandleScope scope;
LocalContext env;
Local<v8::FunctionTemplate> templ = v8::FunctionTemplate::New();
Local<v8::ObjectTemplate> instance_templ = templ->InstanceTemplate();
instance_templ->SetInternalFieldCount(1);
Local<v8::Object> obj = templ->GetFunction()->NewInstance();
CHECK_EQ(1, obj->InternalFieldCount());
CHECK(obj->GetPointerFromInternalField(0) == NULL);
char* data = new char[100];
void* aligned = data;
CHECK_EQ(0, reinterpret_cast<uintptr_t>(aligned) & 0x1);
void* unaligned = data + 1;
CHECK_EQ(1, reinterpret_cast<uintptr_t>(unaligned) & 0x1);
obj->SetPointerInInternalField(0, aligned);
i::Heap::CollectAllGarbage(false);
CHECK_EQ(aligned, v8::External::Unwrap(obj->GetInternalField(0)));
obj->SetPointerInInternalField(0, unaligned);
i::Heap::CollectAllGarbage(false);
CHECK_EQ(unaligned, v8::External::Unwrap(obj->GetInternalField(0)));
obj->SetInternalField(0, v8::External::Wrap(aligned));
i::Heap::CollectAllGarbage(false);
CHECK_EQ(aligned, obj->GetPointerFromInternalField(0));
obj->SetInternalField(0, v8::External::Wrap(unaligned));
i::Heap::CollectAllGarbage(false);
CHECK_EQ(unaligned, obj->GetPointerFromInternalField(0));
delete[] data;
}
THREADED_TEST(IdentityHash) { THREADED_TEST(IdentityHash) {
v8::HandleScope scope; v8::HandleScope scope;
LocalContext env; LocalContext env;
@ -1810,7 +1966,7 @@ TEST(HugeConsStringOutOfMemory) {
// Build huge string. This should fail with out of memory exception. // Build huge string. This should fail with out of memory exception.
Local<Value> result = CompileRun( Local<Value> result = CompileRun(
"var str = Array.prototype.join.call({length: 513}, \"A\").toUpperCase();" "var str = Array.prototype.join.call({length: 513}, \"A\").toUpperCase();"
"for (var i = 0; i < 21; i++) { str = str + str; }"); "for (var i = 0; i < 22; i++) { str = str + str; }");
// Check for out of memory state. // Check for out of memory state.
CHECK(result.IsEmpty()); CHECK(result.IsEmpty());

54
deps/v8/test/cctest/test-assembler-x64.cc

@ -44,6 +44,7 @@ using v8::internal::Label;
using v8::internal::rax; using v8::internal::rax;
using v8::internal::rsi; using v8::internal::rsi;
using v8::internal::rdi; using v8::internal::rdi;
using v8::internal::rcx;
using v8::internal::rdx; using v8::internal::rdx;
using v8::internal::rbp; using v8::internal::rbp;
using v8::internal::rsp; using v8::internal::rsp;
@ -53,20 +54,28 @@ using v8::internal::less_equal;
using v8::internal::not_equal; using v8::internal::not_equal;
using v8::internal::greater; using v8::internal::greater;
// Test the x64 assembler by compiling some simple functions into // Test the x64 assembler by compiling some simple functions into
// a buffer and executing them. These tests do not initialize the // a buffer and executing them. These tests do not initialize the
// V8 library, create a context, or use any V8 objects. // V8 library, create a context, or use any V8 objects.
// The AMD64 calling convention is used, with the first five arguments // The AMD64 calling convention is used, with the first six arguments
// in RSI, RDI, RDX, RCX, R8, and R9, and floating point arguments in // in RDI, RSI, RDX, RCX, R8, and R9, and floating point arguments in
// the XMM registers. The return value is in RAX. // the XMM registers. The return value is in RAX.
// This calling convention is used on Linux, with GCC, and on Mac OS, // This calling convention is used on Linux, with GCC, and on Mac OS,
// with GCC. A different convention is used on 64-bit windows. // with GCC. A different convention is used on 64-bit windows,
// where the first four integer arguments are passed in RCX, RDX, R8 and R9.
typedef int (*F0)(); typedef int (*F0)();
typedef int (*F1)(int64_t x); typedef int (*F1)(int64_t x);
typedef int (*F2)(int64_t x, int64_t y); typedef int (*F2)(int64_t x, int64_t y);
#ifdef _WIN64
static const v8::internal::Register arg1 = rcx;
static const v8::internal::Register arg2 = rdx;
#else
static const v8::internal::Register arg1 = rdi;
static const v8::internal::Register arg2 = rsi;
#endif
#define __ assm. #define __ assm.
@ -80,7 +89,7 @@ TEST(AssemblerX64ReturnOperation) {
Assembler assm(buffer, actual_size); Assembler assm(buffer, actual_size);
// Assemble a simple function that copies argument 2 and returns it. // Assemble a simple function that copies argument 2 and returns it.
__ movq(rax, rsi); __ movq(rax, arg2);
__ nop(); __ nop();
__ ret(0); __ ret(0);
@ -105,9 +114,9 @@ TEST(AssemblerX64StackOperations) {
// incorrect stack frames when debugging this function (which has them). // incorrect stack frames when debugging this function (which has them).
__ push(rbp); __ push(rbp);
__ movq(rbp, rsp); __ movq(rbp, rsp);
__ push(rsi); // Value at (rbp - 8) __ push(arg2); // Value at (rbp - 8)
__ push(rsi); // Value at (rbp - 16) __ push(arg2); // Value at (rbp - 16)
__ push(rdi); // Value at (rbp - 24) __ push(arg1); // Value at (rbp - 24)
__ pop(rax); __ pop(rax);
__ pop(rax); __ pop(rax);
__ pop(rax); __ pop(rax);
@ -132,8 +141,8 @@ TEST(AssemblerX64ArithmeticOperations) {
Assembler assm(buffer, actual_size); Assembler assm(buffer, actual_size);
// Assemble a simple function that adds arguments returning the sum. // Assemble a simple function that adds arguments returning the sum.
__ movq(rax, rsi); __ movq(rax, arg2);
__ addq(rax, rdi); __ addq(rax, arg1);
__ ret(0); __ ret(0);
CodeDesc desc; CodeDesc desc;
@ -154,8 +163,8 @@ TEST(AssemblerX64ImulOperation) {
// Assemble a simple function that multiplies arguments returning the high // Assemble a simple function that multiplies arguments returning the high
// word. // word.
__ movq(rax, rsi); __ movq(rax, arg2);
__ imul(rdi); __ imul(arg1);
__ movq(rax, rdx); __ movq(rax, rdx);
__ ret(0); __ ret(0);
@ -182,14 +191,16 @@ TEST(AssemblerX64MemoryOperands) {
// Assemble a simple function that copies argument 2 and returns it. // Assemble a simple function that copies argument 2 and returns it.
__ push(rbp); __ push(rbp);
__ movq(rbp, rsp); __ movq(rbp, rsp);
__ push(rsi); // Value at (rbp - 8)
__ push(rsi); // Value at (rbp - 16) __ push(arg2); // Value at (rbp - 8)
__ push(rdi); // Value at (rbp - 24) __ push(arg2); // Value at (rbp - 16)
__ push(arg1); // Value at (rbp - 24)
const int kStackElementSize = 8; const int kStackElementSize = 8;
__ movq(rax, Operand(rbp, -3 * kStackElementSize)); __ movq(rax, Operand(rbp, -3 * kStackElementSize));
__ pop(rsi); __ pop(arg2);
__ pop(rsi); __ pop(arg2);
__ pop(rsi); __ pop(arg2);
__ pop(rbp); __ pop(rbp);
__ nop(); __ nop();
__ ret(0); __ ret(0);
@ -210,13 +221,14 @@ TEST(AssemblerX64ControlFlow) {
CHECK(buffer); CHECK(buffer);
Assembler assm(buffer, actual_size); Assembler assm(buffer, actual_size);
// Assemble a simple function that copies argument 2 and returns it. // Assemble a simple function that copies argument 1 and returns it.
__ push(rbp); __ push(rbp);
__ movq(rbp, rsp); __ movq(rbp, rsp);
__ movq(rax, rdi); __ movq(rax, arg1);
Label target; Label target;
__ jmp(&target); __ jmp(&target);
__ movq(rax, rsi); __ movq(rax, arg2);
__ bind(&target); __ bind(&target);
__ pop(rbp); __ pop(rbp);
__ ret(0); __ ret(0);

138
deps/v8/test/cctest/test-debug.cc

@ -3539,6 +3539,52 @@ bool IsBreakEventMessage(char *message) {
} }
// We match parts of the message to decide if it is a exception message.
bool IsExceptionEventMessage(char *message) {
const char* type_event = "\"type\":\"event\"";
const char* event_exception = "\"event\":\"exception\"";
// Does the message contain both type:event and event:exception?
return strstr(message, type_event) != NULL &&
strstr(message, event_exception) != NULL;
}
// We match the message wether it is an evaluate response message.
bool IsEvaluateResponseMessage(char* message) {
const char* type_response = "\"type\":\"response\"";
const char* command_evaluate = "\"command\":\"evaluate\"";
// Does the message contain both type:response and command:evaluate?
return strstr(message, type_response) != NULL &&
strstr(message, command_evaluate) != NULL;
}
// We match parts of the message to get evaluate result int value.
int GetEvaluateIntResult(char *message) {
const char* value = "\"value\":";
char* pos = strstr(message, value);
if (pos == NULL) {
return -1;
}
int res = -1;
res = atoi(pos + strlen(value));
return res;
}
// We match parts of the message to get hit breakpoint id.
int GetBreakpointIdFromBreakEventMessage(char *message) {
const char* breakpoints = "\"breakpoints\":[";
char* pos = strstr(message, breakpoints);
if (pos == NULL) {
return -1;
}
int res = -1;
res = atoi(pos + strlen(breakpoints));
return res;
}
/* Test MessageQueues */ /* Test MessageQueues */
/* Tests the message queues that hold debugger commands and /* Tests the message queues that hold debugger commands and
* response messages to the debugger. Fills queues and makes * response messages to the debugger. Fills queues and makes
@ -3566,8 +3612,6 @@ static void MessageHandler(const uint16_t* message, int length,
// Allow message handler to block on a semaphore, to test queueing of // Allow message handler to block on a semaphore, to test queueing of
// messages while blocked. // messages while blocked.
message_queue_barriers.semaphore_1->Wait(); message_queue_barriers.semaphore_1->Wait();
printf("%s\n", print_buffer);
fflush(stdout);
} }
void MessageQueueDebuggerThread::Run() { void MessageQueueDebuggerThread::Run() {
@ -3822,8 +3866,6 @@ static void ThreadedMessageHandler(const v8::Debug::Message& message) {
if (IsBreakEventMessage(print_buffer)) { if (IsBreakEventMessage(print_buffer)) {
threaded_debugging_barriers.barrier_2.Wait(); threaded_debugging_barriers.barrier_2.Wait();
} }
printf("%s\n", print_buffer);
fflush(stdout);
} }
@ -3911,16 +3953,20 @@ class BreakpointsDebuggerThread : public v8::internal::Thread {
Barriers* breakpoints_barriers; Barriers* breakpoints_barriers;
int break_event_breakpoint_id;
int evaluate_int_result;
static void BreakpointsMessageHandler(const v8::Debug::Message& message) { static void BreakpointsMessageHandler(const v8::Debug::Message& message) {
static char print_buffer[1000]; static char print_buffer[1000];
v8::String::Value json(message.GetJSON()); v8::String::Value json(message.GetJSON());
Utf16ToAscii(*json, json.length(), print_buffer); Utf16ToAscii(*json, json.length(), print_buffer);
printf("%s\n", print_buffer);
fflush(stdout);
// Is break_template a prefix of the message?
if (IsBreakEventMessage(print_buffer)) { if (IsBreakEventMessage(print_buffer)) {
break_event_breakpoint_id =
GetBreakpointIdFromBreakEventMessage(print_buffer);
breakpoints_barriers->semaphore_1->Signal();
} else if (IsEvaluateResponseMessage(print_buffer)) {
evaluate_int_result = GetEvaluateIntResult(print_buffer);
breakpoints_barriers->semaphore_1->Signal(); breakpoints_barriers->semaphore_1->Signal();
} }
} }
@ -3930,9 +3976,9 @@ void BreakpointsV8Thread::Run() {
const char* source_1 = "var y_global = 3;\n" const char* source_1 = "var y_global = 3;\n"
"function cat( new_value ) {\n" "function cat( new_value ) {\n"
" var x = new_value;\n" " var x = new_value;\n"
" y_global = 4;\n" " y_global = y_global + 4;\n"
" x = 3 * x + 1;\n" " x = 3 * x + 1;\n"
" y_global = 5;\n" " y_global = y_global + 5;\n"
" return x;\n" " return x;\n"
"}\n" "}\n"
"\n" "\n"
@ -3970,59 +4016,76 @@ void BreakpointsDebuggerThread::Run() {
"\"type\":\"request\"," "\"type\":\"request\","
"\"command\":\"setbreakpoint\"," "\"command\":\"setbreakpoint\","
"\"arguments\":{\"type\":\"function\",\"target\":\"dog\",\"line\":3}}"; "\"arguments\":{\"type\":\"function\",\"target\":\"dog\",\"line\":3}}";
const char* command_3 = "{\"seq\":104," const char* command_3 = "{\"seq\":103,"
"\"type\":\"request\"," "\"type\":\"request\","
"\"command\":\"evaluate\"," "\"command\":\"evaluate\","
"\"arguments\":{\"expression\":\"dog()\",\"disable_break\":false}}"; "\"arguments\":{\"expression\":\"dog()\",\"disable_break\":false}}";
const char* command_4 = "{\"seq\":105," const char* command_4 = "{\"seq\":104,"
"\"type\":\"request\"," "\"type\":\"request\","
"\"command\":\"evaluate\"," "\"command\":\"evaluate\","
"\"arguments\":{\"expression\":\"x\",\"disable_break\":true}}"; "\"arguments\":{\"expression\":\"x + 1\",\"disable_break\":true}}";
const char* command_5 = "{\"seq\":106," const char* command_5 = "{\"seq\":105,"
"\"type\":\"request\"," "\"type\":\"request\","
"\"command\":\"continue\"}"; "\"command\":\"continue\"}";
const char* command_6 = "{\"seq\":107," const char* command_6 = "{\"seq\":106,"
"\"type\":\"request\"," "\"type\":\"request\","
"\"command\":\"continue\"}"; "\"command\":\"continue\"}";
const char* command_7 = "{\"seq\":108," const char* command_7 = "{\"seq\":107,"
"\"type\":\"request\"," "\"type\":\"request\","
"\"command\":\"evaluate\"," "\"command\":\"evaluate\","
"\"arguments\":{\"expression\":\"dog()\",\"disable_break\":true}}"; "\"arguments\":{\"expression\":\"dog()\",\"disable_break\":true}}";
const char* command_8 = "{\"seq\":109," const char* command_8 = "{\"seq\":108,"
"\"type\":\"request\"," "\"type\":\"request\","
"\"command\":\"continue\"}"; "\"command\":\"continue\"}";
// v8 thread initializes, runs source_1 // v8 thread initializes, runs source_1
breakpoints_barriers->barrier_1.Wait(); breakpoints_barriers->barrier_1.Wait();
// 1:Set breakpoint in cat(). // 1:Set breakpoint in cat() (will get id 1).
v8::Debug::SendCommand(buffer, AsciiToUtf16(command_1, buffer)); v8::Debug::SendCommand(buffer, AsciiToUtf16(command_1, buffer));
// 2:Set breakpoint in dog() // 2:Set breakpoint in dog() (will get id 2).
v8::Debug::SendCommand(buffer, AsciiToUtf16(command_2, buffer)); v8::Debug::SendCommand(buffer, AsciiToUtf16(command_2, buffer));
breakpoints_barriers->barrier_2.Wait(); breakpoints_barriers->barrier_2.Wait();
// v8 thread starts compiling source_2. // V8 thread starts compiling source_2.
// Automatic break happens, to run queued commands // Automatic break happens, to run queued commands
// breakpoints_barriers->semaphore_1->Wait(); // breakpoints_barriers->semaphore_1->Wait();
// Commands 1 through 3 run, thread continues. // Commands 1 through 3 run, thread continues.
// v8 thread runs source_2 to breakpoint in cat(). // v8 thread runs source_2 to breakpoint in cat().
// message callback receives break event. // message callback receives break event.
breakpoints_barriers->semaphore_1->Wait(); breakpoints_barriers->semaphore_1->Wait();
// Must have hit breakpoint #1.
CHECK_EQ(1, break_event_breakpoint_id);
// 4:Evaluate dog() (which has a breakpoint). // 4:Evaluate dog() (which has a breakpoint).
v8::Debug::SendCommand(buffer, AsciiToUtf16(command_3, buffer)); v8::Debug::SendCommand(buffer, AsciiToUtf16(command_3, buffer));
// v8 thread hits breakpoint in dog() // V8 thread hits breakpoint in dog().
breakpoints_barriers->semaphore_1->Wait(); // wait for break event breakpoints_barriers->semaphore_1->Wait(); // wait for break event
// 5:Evaluate x // Must have hit breakpoint #2.
CHECK_EQ(2, break_event_breakpoint_id);
// 5:Evaluate (x + 1).
v8::Debug::SendCommand(buffer, AsciiToUtf16(command_4, buffer)); v8::Debug::SendCommand(buffer, AsciiToUtf16(command_4, buffer));
// 6:Continue evaluation of dog() // Evaluate (x + 1) finishes.
breakpoints_barriers->semaphore_1->Wait();
// Must have result 108.
CHECK_EQ(108, evaluate_int_result);
// 6:Continue evaluation of dog().
v8::Debug::SendCommand(buffer, AsciiToUtf16(command_5, buffer)); v8::Debug::SendCommand(buffer, AsciiToUtf16(command_5, buffer));
// dog() finishes. // Evaluate dog() finishes.
breakpoints_barriers->semaphore_1->Wait();
// Must have result 107.
CHECK_EQ(107, evaluate_int_result);
// 7:Continue evaluation of source_2, finish cat(17), hit breakpoint // 7:Continue evaluation of source_2, finish cat(17), hit breakpoint
// in cat(19). // in cat(19).
v8::Debug::SendCommand(buffer, AsciiToUtf16(command_6, buffer)); v8::Debug::SendCommand(buffer, AsciiToUtf16(command_6, buffer));
// message callback gets break event // Message callback gets break event.
breakpoints_barriers->semaphore_1->Wait(); // wait for break event breakpoints_barriers->semaphore_1->Wait(); // wait for break event
// 8: Evaluate dog() with breaks disabled // Must have hit breakpoint #1.
CHECK_EQ(1, break_event_breakpoint_id);
// 8: Evaluate dog() with breaks disabled.
v8::Debug::SendCommand(buffer, AsciiToUtf16(command_7, buffer)); v8::Debug::SendCommand(buffer, AsciiToUtf16(command_7, buffer));
// Evaluate dog() finishes.
breakpoints_barriers->semaphore_1->Wait();
// Must have result 116.
CHECK_EQ(116, evaluate_int_result);
// 9: Continue evaluation of source2, reach end. // 9: Continue evaluation of source2, reach end.
v8::Debug::SendCommand(buffer, AsciiToUtf16(command_8, buffer)); v8::Debug::SendCommand(buffer, AsciiToUtf16(command_8, buffer));
} }
@ -4325,7 +4388,13 @@ static int message_handler_hit_count = 0;
static void MessageHandlerHitCount(const v8::Debug::Message& message) { static void MessageHandlerHitCount(const v8::Debug::Message& message) {
message_handler_hit_count++; message_handler_hit_count++;
SendContinueCommand(); static char print_buffer[1000];
v8::String::Value json(message.GetJSON());
Utf16ToAscii(*json, json.length(), print_buffer);
if (IsExceptionEventMessage(print_buffer)) {
// Send a continue command for exception events.
SendContinueCommand();
}
} }
@ -4415,8 +4484,6 @@ static void HostDispatchMessageHandler(const v8::Debug::Message& message) {
static char print_buffer[1000]; static char print_buffer[1000];
v8::String::Value json(message.GetJSON()); v8::String::Value json(message.GetJSON());
Utf16ToAscii(*json, json.length(), print_buffer); Utf16ToAscii(*json, json.length(), print_buffer);
printf("%s\n", print_buffer);
fflush(stdout);
} }
@ -4776,8 +4843,12 @@ static void ContextCheckMessageHandler(const v8::Debug::Message& message) {
expected_context_data)); expected_context_data));
message_handler_hit_count++; message_handler_hit_count++;
static char print_buffer[1000];
v8::String::Value json(message.GetJSON());
Utf16ToAscii(*json, json.length(), print_buffer);
// Send a continue command for break events. // Send a continue command for break events.
if (message.GetEvent() == v8::Break) { if (IsBreakEventMessage(print_buffer)) {
SendContinueCommand(); SendContinueCommand();
} }
} }
@ -5016,7 +5087,11 @@ static void DebugEvalContextCheckMessageHandler(
expected_context_data)); expected_context_data));
message_handler_hit_count++; message_handler_hit_count++;
if (message.IsEvent() && message.GetEvent() == v8::Break) { static char print_buffer[1000];
v8::String::Value json(message.GetJSON());
Utf16ToAscii(*json, json.length(), print_buffer);
if (IsBreakEventMessage(print_buffer)) {
break_count++; break_count++;
if (!sent_eval) { if (!sent_eval) {
sent_eval = true; sent_eval = true;
@ -5038,7 +5113,8 @@ static void DebugEvalContextCheckMessageHandler(
SendContinueCommand(); SendContinueCommand();
continue_command_send_count++; continue_command_send_count++;
} }
} else if (message.IsResponse() && continue_command_send_count < 2) { } else if (IsEvaluateResponseMessage(print_buffer) &&
continue_command_send_count < 2) {
// Response to the evaluation request. We're still on the breakpoint so // Response to the evaluation request. We're still on the breakpoint so
// send continue. // send continue.
SendContinueCommand(); SendContinueCommand();

24
deps/v8/test/cctest/test-disasm-ia32.cc

@ -363,7 +363,31 @@ TEST(DisasmIa320) {
__ divsd(xmm1, xmm0); __ divsd(xmm1, xmm0);
__ movdbl(xmm1, Operand(ebx, ecx, times_4, 10000)); __ movdbl(xmm1, Operand(ebx, ecx, times_4, 10000));
__ movdbl(Operand(ebx, ecx, times_4, 10000), xmm1); __ movdbl(Operand(ebx, ecx, times_4, 10000), xmm1);
__ comisd(xmm0, xmm1);
} }
// cmov.
{
CHECK(CpuFeatures::IsSupported(CpuFeatures::CMOV));
CpuFeatures::Scope use_cmov(CpuFeatures::CMOV);
__ cmov(overflow, eax, Operand(eax, 0));
__ cmov(no_overflow, eax, Operand(eax, 1));
__ cmov(below, eax, Operand(eax, 2));
__ cmov(above_equal, eax, Operand(eax, 3));
__ cmov(equal, eax, Operand(ebx, 0));
__ cmov(not_equal, eax, Operand(ebx, 1));
__ cmov(below_equal, eax, Operand(ebx, 2));
__ cmov(above, eax, Operand(ebx, 3));
__ cmov(sign, eax, Operand(ecx, 0));
__ cmov(not_sign, eax, Operand(ecx, 1));
__ cmov(parity_even, eax, Operand(ecx, 2));
__ cmov(parity_odd, eax, Operand(ecx, 3));
__ cmov(less, eax, Operand(edx, 0));
__ cmov(greater_equal, eax, Operand(edx, 1));
__ cmov(less_equal, eax, Operand(edx, 2));
__ cmov(greater, eax, Operand(edx, 3));
}
__ ret(0); __ ret(0);
CodeDesc desc; CodeDesc desc;

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save