Browse Source

Upgrade V8 to 2.1.0

v0.7.4-release
Ryan Dahl 15 years ago
parent
commit
c7cb4daa25
  1. 1
      deps/v8/AUTHORS
  2. 29
      deps/v8/ChangeLog
  3. 24
      deps/v8/SConstruct
  4. 41
      deps/v8/include/v8-debug.h
  5. 425
      deps/v8/samples/lineprocessor.cc
  6. 7
      deps/v8/src/SConscript
  7. 6
      deps/v8/src/accessors.cc
  8. 10
      deps/v8/src/api.cc
  9. 14
      deps/v8/src/arm/assembler-arm-inl.h
  10. 36
      deps/v8/src/arm/assembler-arm.cc
  11. 16
      deps/v8/src/arm/assembler-arm.h
  12. 61
      deps/v8/src/arm/builtins-arm.cc
  13. 949
      deps/v8/src/arm/codegen-arm.cc
  14. 120
      deps/v8/src/arm/codegen-arm.h
  15. 6
      deps/v8/src/arm/constants-arm.h
  16. 2
      deps/v8/src/arm/debug-arm.cc
  17. 52
      deps/v8/src/arm/disasm-arm.cc
  18. 1742
      deps/v8/src/arm/fast-codegen-arm.cc
  19. 1781
      deps/v8/src/arm/full-codegen-arm.cc
  20. 25
      deps/v8/src/arm/ic-arm.cc
  21. 63
      deps/v8/src/arm/macro-assembler-arm.cc
  22. 19
      deps/v8/src/arm/macro-assembler-arm.h
  23. 110
      deps/v8/src/arm/regexp-macro-assembler-arm.cc
  24. 6
      deps/v8/src/arm/regexp-macro-assembler-arm.h
  25. 103
      deps/v8/src/arm/simulator-arm.cc
  26. 8
      deps/v8/src/arm/simulator-arm.h
  27. 551
      deps/v8/src/arm/stub-cache-arm.cc
  28. 101
      deps/v8/src/arm/virtual-frame-arm.cc
  29. 34
      deps/v8/src/arm/virtual-frame-arm.h
  30. 20
      deps/v8/src/assembler.cc
  31. 5
      deps/v8/src/assembler.h
  32. 21
      deps/v8/src/ast.cc
  33. 35
      deps/v8/src/ast.h
  34. 20
      deps/v8/src/bootstrapper.cc
  35. 218
      deps/v8/src/builtins.cc
  36. 48
      deps/v8/src/builtins.h
  37. 4
      deps/v8/src/code-stubs.h
  38. 43
      deps/v8/src/codegen.cc
  39. 128
      deps/v8/src/codegen.h
  40. 735
      deps/v8/src/compiler.cc
  41. 37
      deps/v8/src/compiler.h
  42. 267
      deps/v8/src/data-flow.cc
  43. 67
      deps/v8/src/data-flow.h
  44. 32
      deps/v8/src/dateparser.cc
  45. 4
      deps/v8/src/debug-agent.cc
  46. 2
      deps/v8/src/debug-delay.js
  47. 110
      deps/v8/src/debug.cc
  48. 37
      deps/v8/src/debug.h
  49. 8
      deps/v8/src/disassembler.cc
  50. 18
      deps/v8/src/execution.cc
  51. 1
      deps/v8/src/execution.h
  52. 8
      deps/v8/src/factory.cc
  53. 1
      deps/v8/src/factory.h
  54. 947
      deps/v8/src/fast-codegen.cc
  55. 338
      deps/v8/src/fast-codegen.h
  56. 20
      deps/v8/src/flag-definitions.h
  57. 14
      deps/v8/src/frames.cc
  58. 6
      deps/v8/src/frames.h
  59. 1155
      deps/v8/src/full-codegen.cc
  60. 452
      deps/v8/src/full-codegen.h
  61. 7
      deps/v8/src/globals.h
  62. 44
      deps/v8/src/handles.cc
  63. 15
      deps/v8/src/handles.h
  64. 6
      deps/v8/src/heap-inl.h
  65. 3
      deps/v8/src/heap-profiler.cc
  66. 112
      deps/v8/src/heap.cc
  67. 5
      deps/v8/src/heap.h
  68. 26
      deps/v8/src/ia32/assembler-ia32.cc
  69. 8
      deps/v8/src/ia32/assembler-ia32.h
  70. 65
      deps/v8/src/ia32/builtins-ia32.cc
  71. 2047
      deps/v8/src/ia32/codegen-ia32.cc
  72. 154
      deps/v8/src/ia32/codegen-ia32.h
  73. 9
      deps/v8/src/ia32/debug-ia32.cc
  74. 13
      deps/v8/src/ia32/disasm-ia32.cc
  75. 1714
      deps/v8/src/ia32/fast-codegen-ia32.cc
  76. 1900
      deps/v8/src/ia32/full-codegen-ia32.cc
  77. 33
      deps/v8/src/ia32/ic-ia32.cc
  78. 99
      deps/v8/src/ia32/macro-assembler-ia32.cc
  79. 15
      deps/v8/src/ia32/macro-assembler-ia32.h
  80. 75
      deps/v8/src/ia32/regexp-macro-assembler-ia32.cc
  81. 6
      deps/v8/src/ia32/regexp-macro-assembler-ia32.h
  82. 4
      deps/v8/src/ia32/simulator-ia32.h
  83. 144
      deps/v8/src/ia32/stub-cache-ia32.cc
  84. 36
      deps/v8/src/ia32/virtual-frame-ia32.cc
  85. 30
      deps/v8/src/ic.cc
  86. 13
      deps/v8/src/ic.h
  87. 2
      deps/v8/src/json-delay.js
  88. 5
      deps/v8/src/jsregexp.cc
  89. 3
      deps/v8/src/list.h
  90. 122
      deps/v8/src/log.cc
  91. 23
      deps/v8/src/log.h
  92. 1
      deps/v8/src/macros.py
  93. 76
      deps/v8/src/mark-compact.cc
  94. 3
      deps/v8/src/mark-compact.h
  95. 3
      deps/v8/src/messages.js
  96. 18
      deps/v8/src/mirror-delay.js
  97. 6
      deps/v8/src/mksnapshot.cc
  98. 21
      deps/v8/src/objects-inl.h
  99. 81
      deps/v8/src/objects.cc
  100. 32
      deps/v8/src/objects.h

1
deps/v8/AUTHORS

@ -10,6 +10,7 @@ Alexandre Vassalotti <avassalotti@gmail.com>
Craig Schlenter <craig.schlenter@gmail.com> Craig Schlenter <craig.schlenter@gmail.com>
Daniel Andersson <kodandersson@gmail.com> Daniel Andersson <kodandersson@gmail.com>
Daniel James <dnljms@gmail.com> Daniel James <dnljms@gmail.com>
Erich Ocean <erich.ocean@me.com>
Jan de Mooij <jandemooij@gmail.com> Jan de Mooij <jandemooij@gmail.com>
Jay Freeman <saurik@saurik.com> Jay Freeman <saurik@saurik.com>
Joel Stanley <joel.stan@gmail.com> Joel Stanley <joel.stan@gmail.com>

29
deps/v8/ChangeLog

@ -1,3 +1,32 @@
2010-02-03: Version 2.1.0
Values are now always wrapped in objects when used as a receiver.
(issue 223).
[ES5] Implemented Object.getOwnPropertyNames.
[ES5] Restrict JSON.parse to only accept strings that conforms to the
JSON grammar.
Improvement of debugger agent (issue 549 and 554).
Fixed problem with skipped stack frame in profiles (issue 553).
Solaris support by Erich Ocean <erich.ocean@me.com> and Ryan Dahl
<ry@tinyclouds.org>.
Fix a bug that Math.round() returns incorrect results for huge
integers.
Fix enumeration order for objects created from some constructor
functions (isue http://crbug.com/3867).
Fix arithmetic on some integer constants (issue 580).
Numerous performance improvements including porting of previous IA-32
optimizations to x64 and ARM architectures.
2010-01-14: Version 2.0.6 2010-01-14: Version 2.0.6
Added ES5 Object.getPrototypeOf, GetOwnPropertyDescriptor, Added ES5 Object.getPrototypeOf, GetOwnPropertyDescriptor,

24
deps/v8/SConstruct

@ -280,18 +280,12 @@ V8_EXTRA_FLAGS = {
}, },
'msvc': { 'msvc': {
'all': { 'all': {
'WARNINGFLAGS': ['/WX', '/wd4355', '/wd4800'] 'WARNINGFLAGS': ['/W3', '/WX', '/wd4355', '/wd4800']
}, },
'library:shared': { 'library:shared': {
'CPPDEFINES': ['BUILDING_V8_SHARED'], 'CPPDEFINES': ['BUILDING_V8_SHARED'],
'LIBS': ['winmm', 'ws2_32'] 'LIBS': ['winmm', 'ws2_32']
}, },
'arch:ia32': {
'WARNINGFLAGS': ['/W3']
},
'arch:x64': {
'WARNINGFLAGS': ['/W3']
},
'arch:arm': { 'arch:arm': {
'CPPDEFINES': ['V8_TARGET_ARCH_ARM'], 'CPPDEFINES': ['V8_TARGET_ARCH_ARM'],
# /wd4996 is to silence the warning about sscanf # /wd4996 is to silence the warning about sscanf
@ -317,7 +311,8 @@ MKSNAPSHOT_EXTRA_FLAGS = {
'LIBS': ['execinfo', 'pthread'] 'LIBS': ['execinfo', 'pthread']
}, },
'os:solaris': { 'os:solaris': {
'LIBS': ['pthread', 'socket', 'nsl', 'rt'] 'LIBS': ['m', 'pthread', 'socket', 'nsl', 'rt'],
'LINKFLAGS': ['-mt']
}, },
'os:openbsd': { 'os:openbsd': {
'LIBS': ['execinfo', 'pthread'] 'LIBS': ['execinfo', 'pthread']
@ -369,7 +364,8 @@ CCTEST_EXTRA_FLAGS = {
'LIBS': ['execinfo', 'pthread'] 'LIBS': ['execinfo', 'pthread']
}, },
'os:solaris': { 'os:solaris': {
'LIBS': ['pthread', 'socket', 'nsl', 'rt'] 'LIBS': ['m', 'pthread', 'socket', 'nsl', 'rt'],
'LINKFLAGS': ['-mt']
}, },
'os:openbsd': { 'os:openbsd': {
'LIBS': ['execinfo', 'pthread'] 'LIBS': ['execinfo', 'pthread']
@ -431,7 +427,8 @@ SAMPLE_FLAGS = {
}, },
'os:solaris': { 'os:solaris': {
'LIBPATH' : ['/usr/local/lib'], 'LIBPATH' : ['/usr/local/lib'],
'LIBS': ['pthread', 'socket', 'nsl', 'rt'] 'LIBS': ['m', 'pthread', 'socket', 'nsl', 'rt'],
'LINKFLAGS': ['-mt']
}, },
'os:openbsd': { 'os:openbsd': {
'LIBPATH' : ['/usr/local/lib'], 'LIBPATH' : ['/usr/local/lib'],
@ -543,7 +540,8 @@ D8_FLAGS = {
'LIBS': ['pthread'], 'LIBS': ['pthread'],
}, },
'os:solaris': { 'os:solaris': {
'LIBS': ['pthread', 'socket', 'nsl', 'rt'] 'LIBS': ['m', 'pthread', 'socket', 'nsl', 'rt'],
'LINKFLAGS': ['-mt']
}, },
'os:openbsd': { 'os:openbsd': {
'LIBS': ['pthread'], 'LIBS': ['pthread'],
@ -693,7 +691,7 @@ SIMPLE_OPTIONS = {
def GetOptions(): def GetOptions():
result = Options() result = Options()
result.Add('mode', 'compilation mode (debug, release)', 'release') result.Add('mode', 'compilation mode (debug, release)', 'release')
result.Add('sample', 'build sample (shell, process)', '') result.Add('sample', 'build sample (shell, process, lineprocessor)', '')
result.Add('env', 'override environment settings (NAME0:value0,NAME1:value1,...)', '') result.Add('env', 'override environment settings (NAME0:value0,NAME1:value1,...)', '')
result.Add('importenv', 'import environment settings (NAME0,NAME1,...)', '') result.Add('importenv', 'import environment settings (NAME0,NAME1,...)', '')
for (name, option) in SIMPLE_OPTIONS.iteritems(): for (name, option) in SIMPLE_OPTIONS.iteritems():
@ -761,7 +759,7 @@ def IsLegal(env, option, values):
def VerifyOptions(env): def VerifyOptions(env):
if not IsLegal(env, 'mode', ['debug', 'release']): if not IsLegal(env, 'mode', ['debug', 'release']):
return False return False
if not IsLegal(env, 'sample', ["shell", "process"]): if not IsLegal(env, 'sample', ["shell", "process", "lineprocessor"]):
return False return False
if not IsLegal(env, 'regexp', ["native", "interpreted"]): if not IsLegal(env, 'regexp', ["native", "interpreted"]):
return False return False

41
deps/v8/include/v8-debug.h

@ -224,9 +224,11 @@ class EXPORT Debug {
* be processed. Note that debug messages will only be processed if there is * be processed. Note that debug messages will only be processed if there is
* a V8 break. This can happen automatically by using the option * a V8 break. This can happen automatically by using the option
* --debugger-auto-break. * --debugger-auto-break.
* \param provide_locker requires that V8 acquires v8::Locker for you before
* calling handler
*/ */
static void SetDebugMessageDispatchHandler( static void SetDebugMessageDispatchHandler(
DebugMessageDispatchHandler handler); DebugMessageDispatchHandler handler, bool provide_locker = false);
/** /**
* Run a JavaScript function in the debugger. * Run a JavaScript function in the debugger.
@ -263,6 +265,43 @@ class EXPORT Debug {
*/ */
static bool EnableAgent(const char* name, int port, static bool EnableAgent(const char* name, int port,
bool wait_for_connection = false); bool wait_for_connection = false);
/**
* Makes V8 process all pending debug messages.
*
* From V8 point of view all debug messages come asynchronously (e.g. from
* remote debugger) but they all must be handled synchronously: V8 cannot
* do 2 things at one time so normal script execution must be interrupted
* for a while.
*
* Generally when message arrives V8 may be in one of 3 states:
* 1. V8 is running script; V8 will automatically interrupt and process all
* pending messages (however auto_break flag should be enabled);
* 2. V8 is suspended on debug breakpoint; in this state V8 is dedicated
* to reading and processing debug messages;
* 3. V8 is not running at all or has called some long-working C++ function;
* by default it means that processing of all debug message will be deferred
* until V8 gets control again; however, embedding application may improve
* this by manually calling this method.
*
* It makes sense to call this method whenever a new debug message arrived and
* V8 is not already running. Method v8::Debug::SetDebugMessageDispatchHandler
* should help with the former condition.
*
* Technically this method in many senses is equivalent to executing empty
* script:
* 1. It does nothing except for processing all pending debug messages.
* 2. It should be invoked with the same precautions and from the same context
* as V8 script would be invoked from, because:
* a. with "evaluate" command it can do whatever normal script can do,
* including all native calls;
* b. no other thread should call V8 while this method is running
* (v8::Locker may be used here).
*
* "Evaluate" debug command behavior currently is not specified in scope
* of this method.
*/
static void ProcessDebugMessages();
}; };

425
deps/v8/samples/lineprocessor.cc

@ -0,0 +1,425 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <v8.h>
#include <v8-debug.h>
#include <fcntl.h>
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
/**
* This sample program should demonstrate certain aspects of debugging
* standalone V8-based application.
*
* The program reads input stream, processes it line by line and print
* the result to output. The actual processing is done by custom JavaScript
* script. The script is specified with command line parameters.
*
* The main cycle of the program will sequentially read lines from standard
* input, process them and print to standard output until input closes.
* There are 2 possible configuration in regard to main cycle.
*
* 1. The main cycle is on C++ side. Program should be run with
* --main-cycle-in-cpp option. Script must declare a function named
* "ProcessLine". The main cycle in C++ reads lines and calls this function
* for processing every time. This is a sample script:
function ProcessLine(input_line) {
return ">>>" + input_line + "<<<";
}
*
* 2. The main cycle is in JavaScript. Program should be run with
* --main-cycle-in-js option. Script gets run one time at all and gets
* API of 2 global functions: "read_line" and "print". It should read input
* and print converted lines to output itself. This a sample script:
while (true) {
var line = read_line();
if (!line) {
break;
}
var res = line + " | " + line;
print(res);
}
*
* When run with "-p" argument, the program starts V8 Debugger Agent and
* allows remote debugger to attach and debug JavaScript code.
*
* Interesting aspects:
* 1. Wait for remote debugger to attach
* Normally the program compiles custom script and immediately runs it.
* If programmer needs to debug script from the very beginning, he should
* run this sample program with "--wait-for-connection" command line parameter.
* This way V8 will suspend on the first statement and wait for
* debugger to attach.
*
* 2. Unresponsive V8
* V8 Debugger Agent holds a connection with remote debugger, but it does
* respond only when V8 is running some script. In particular, when this program
* is waiting for input, all requests from debugger get deferred until V8
* is called again. See how "--callback" command-line parameter in this sample
* fixes this issue.
*/
enum MainCycleType {
CycleInCpp,
CycleInJs
};
const char* ToCString(const v8::String::Utf8Value& value);
void ReportException(v8::TryCatch* handler);
v8::Handle<v8::String> ReadFile(const char* name);
v8::Handle<v8::String> ReadLine();
v8::Handle<v8::Value> Print(const v8::Arguments& args);
v8::Handle<v8::Value> ReadLine(const v8::Arguments& args);
bool RunCppCycle(v8::Handle<v8::Script> script, v8::Local<v8::Context> context,
bool report_exceptions);
v8::Persistent<v8::Context> debug_message_context;
void DispatchDebugMessages() {
// We are in some random thread. We should already have v8::Locker acquired
// (we requested this when registered this callback). We was called
// because new debug messages arrived; they may have already been processed,
// but we shouldn't worry about this.
//
// All we have to do is to set context and call ProcessDebugMessages.
//
// We should decide which V8 context to use here. This is important for
// "evaluate" command, because it must be executed some context.
// In our sample we have only one context, so there is nothing really to
// think about.
v8::Context::Scope scope(debug_message_context);
v8::Debug::ProcessDebugMessages();
}
int RunMain(int argc, char* argv[]) {
v8::V8::SetFlagsFromCommandLine(&argc, argv, true);
v8::HandleScope handle_scope;
v8::Handle<v8::String> script_source(NULL);
v8::Handle<v8::Value> script_name(NULL);
int script_param_counter = 0;
int port_number = -1;
bool wait_for_connection = false;
bool support_callback = false;
MainCycleType cycle_type = CycleInCpp;
for (int i = 1; i < argc; i++) {
const char* str = argv[i];
if (strcmp(str, "-f") == 0) {
// Ignore any -f flags for compatibility with the other stand-
// alone JavaScript engines.
continue;
} else if (strcmp(str, "--callback") == 0) {
support_callback = true;
} else if (strcmp(str, "--wait-for-connection") == 0) {
wait_for_connection = true;
} else if (strcmp(str, "--main-cycle-in-cpp") == 0) {
cycle_type = CycleInCpp;
} else if (strcmp(str, "--main-cycle-in-js") == 0) {
cycle_type = CycleInJs;
} else if (strcmp(str, "-p") == 0 && i + 1 < argc) {
port_number = atoi(argv[i + 1]);
i++;
} else if (strncmp(str, "--", 2) == 0) {
printf("Warning: unknown flag %s.\nTry --help for options\n", str);
} else if (strcmp(str, "-e") == 0 && i + 1 < argc) {
script_source = v8::String::New(argv[i + 1]);
script_name = v8::String::New("unnamed");
i++;
script_param_counter++;
} else {
// Use argument as a name of file to load.
script_source = ReadFile(str);
script_name = v8::String::New(str);
if (script_source.IsEmpty()) {
printf("Error reading '%s'\n", str);
return 1;
}
script_param_counter++;
}
}
if (script_param_counter == 0) {
printf("Script is not specified\n");
return 1;
}
if (script_param_counter != 1) {
printf("Only one script may be specified\n");
return 1;
}
// Create a template for the global object.
v8::Handle<v8::ObjectTemplate> global = v8::ObjectTemplate::New();
// Bind the global 'print' function to the C++ Print callback.
global->Set(v8::String::New("print"), v8::FunctionTemplate::New(Print));
if (cycle_type == CycleInJs) {
// Bind the global 'read_line' function to the C++ Print callback.
global->Set(v8::String::New("read_line"),
v8::FunctionTemplate::New(ReadLine));
}
// Create a new execution environment containing the built-in
// functions
v8::Handle<v8::Context> context = v8::Context::New(NULL, global);
debug_message_context = v8::Persistent<v8::Context>::New(context);
// Enter the newly created execution environment.
v8::Context::Scope context_scope(context);
v8::Locker locker;
if (support_callback) {
v8::Debug::SetDebugMessageDispatchHandler(DispatchDebugMessages, true);
}
if (port_number != -1) {
const char* auto_break_param = "--debugger_auto_break";
v8::V8::SetFlagsFromString(auto_break_param, strlen(auto_break_param));
v8::Debug::EnableAgent("lineprocessor", port_number, wait_for_connection);
}
bool report_exceptions = true;
v8::Handle<v8::Script> script;
{
// Compile script in try/catch context.
v8::TryCatch try_catch;
script = v8::Script::Compile(script_source, script_name);
if (script.IsEmpty()) {
// Print errors that happened during compilation.
if (report_exceptions)
ReportException(&try_catch);
return 1;
}
}
{
v8::TryCatch try_catch;
script->Run();
if (try_catch.HasCaught()) {
if (report_exceptions)
ReportException(&try_catch);
return 1;
}
}
if (cycle_type == CycleInCpp) {
bool res = RunCppCycle(script, v8::Context::GetCurrent(),
report_exceptions);
return !res;
} else {
// All is already done.
}
return 0;
}
bool RunCppCycle(v8::Handle<v8::Script> script, v8::Local<v8::Context> context,
bool report_exceptions) {
v8::Locker lock;
v8::Handle<v8::String> fun_name = v8::String::New("ProcessLine");
v8::Handle<v8::Value> process_val =
v8::Context::GetCurrent()->Global()->Get(fun_name);
// If there is no Process function, or if it is not a function,
// bail out
if (!process_val->IsFunction()) {
printf("Error: Script does not declare 'ProcessLine' global function.\n");
return 1;
}
// It is a function; cast it to a Function
v8::Handle<v8::Function> process_fun =
v8::Handle<v8::Function>::Cast(process_val);
while (!feof(stdin)) {
v8::HandleScope handle_scope;
v8::Handle<v8::String> input_line = ReadLine();
if (input_line == v8::Undefined()) {
continue;
}
const int argc = 1;
v8::Handle<v8::Value> argv[argc] = { input_line };
v8::Handle<v8::Value> result;
{
v8::TryCatch try_catch;
result = process_fun->Call(v8::Context::GetCurrent()->Global(),
argc, argv);
if (try_catch.HasCaught()) {
if (report_exceptions)
ReportException(&try_catch);
return false;
}
}
v8::String::Utf8Value str(result);
const char* cstr = ToCString(str);
printf("%s\n", cstr);
}
return true;
}
int main(int argc, char* argv[]) {
int result = RunMain(argc, argv);
v8::V8::Dispose();
return result;
}
// Extracts a C string from a V8 Utf8Value.
const char* ToCString(const v8::String::Utf8Value& value) {
return *value ? *value : "<string conversion failed>";
}
// Reads a file into a v8 string.
v8::Handle<v8::String> ReadFile(const char* name) {
FILE* file = fopen(name, "rb");
if (file == NULL) return v8::Handle<v8::String>();
fseek(file, 0, SEEK_END);
int size = ftell(file);
rewind(file);
char* chars = new char[size + 1];
chars[size] = '\0';
for (int i = 0; i < size;) {
int read = fread(&chars[i], 1, size - i, file);
i += read;
}
fclose(file);
v8::Handle<v8::String> result = v8::String::New(chars, size);
delete[] chars;
return result;
}
void ReportException(v8::TryCatch* try_catch) {
v8::HandleScope handle_scope;
v8::String::Utf8Value exception(try_catch->Exception());
const char* exception_string = ToCString(exception);
v8::Handle<v8::Message> message = try_catch->Message();
if (message.IsEmpty()) {
// V8 didn't provide any extra information about this error; just
// print the exception.
printf("%s\n", exception_string);
} else {
// Print (filename):(line number): (message).
v8::String::Utf8Value filename(message->GetScriptResourceName());
const char* filename_string = ToCString(filename);
int linenum = message->GetLineNumber();
printf("%s:%i: %s\n", filename_string, linenum, exception_string);
// Print line of source code.
v8::String::Utf8Value sourceline(message->GetSourceLine());
const char* sourceline_string = ToCString(sourceline);
printf("%s\n", sourceline_string);
// Print wavy underline (GetUnderline is deprecated).
int start = message->GetStartColumn();
for (int i = 0; i < start; i++) {
printf(" ");
}
int end = message->GetEndColumn();
for (int i = start; i < end; i++) {
printf("^");
}
printf("\n");
}
}
// The callback that is invoked by v8 whenever the JavaScript 'print'
// function is called. Prints its arguments on stdout separated by
// spaces and ending with a newline.
v8::Handle<v8::Value> Print(const v8::Arguments& args) {
bool first = true;
for (int i = 0; i < args.Length(); i++) {
v8::HandleScope handle_scope;
if (first) {
first = false;
} else {
printf(" ");
}
v8::String::Utf8Value str(args[i]);
const char* cstr = ToCString(str);
printf("%s", cstr);
}
printf("\n");
fflush(stdout);
return v8::Undefined();
}
// The callback that is invoked by v8 whenever the JavaScript 'read_line'
// function is called. Reads a string from standard input and returns.
v8::Handle<v8::Value> ReadLine(const v8::Arguments& args) {
if (args.Length() > 0) {
return v8::ThrowException(v8::String::New("Unexpected arguments"));
}
return ReadLine();
}
v8::Handle<v8::String> ReadLine() {
const int kBufferSize = 1024 + 1;
char buffer[kBufferSize];
char* res;
{
v8::Unlocker unlocker;
res = fgets(buffer, kBufferSize, stdin);
}
if (res == NULL) {
v8::Handle<v8::Primitive> t = v8::Undefined();
return reinterpret_cast<v8::Handle<v8::String>&>(t);
}
// remove newline char
for (char* pos = buffer; *pos != '\0'; pos++) {
if (*pos == '\n') {
*pos = '\0';
break;
}
}
return v8::String::New(buffer);
}

7
deps/v8/src/SConscript

@ -50,6 +50,7 @@ SOURCES = {
contexts.cc contexts.cc
conversions.cc conversions.cc
counters.cc counters.cc
data-flow.cc
dateparser.cc dateparser.cc
debug-agent.cc debug-agent.cc
debug.cc debug.cc
@ -60,6 +61,7 @@ SOURCES = {
flags.cc flags.cc
frame-element.cc frame-element.cc
frames.cc frames.cc
full-codegen.cc
func-name-inferrer.cc func-name-inferrer.cc
global-handles.cc global-handles.cc
handles.cc handles.cc
@ -114,6 +116,7 @@ SOURCES = {
arm/disasm-arm.cc arm/disasm-arm.cc
arm/fast-codegen-arm.cc arm/fast-codegen-arm.cc
arm/frames-arm.cc arm/frames-arm.cc
arm/full-codegen-arm.cc
arm/ic-arm.cc arm/ic-arm.cc
arm/jump-target-arm.cc arm/jump-target-arm.cc
arm/macro-assembler-arm.cc arm/macro-assembler-arm.cc
@ -137,6 +140,7 @@ SOURCES = {
ia32/disasm-ia32.cc ia32/disasm-ia32.cc
ia32/fast-codegen-ia32.cc ia32/fast-codegen-ia32.cc
ia32/frames-ia32.cc ia32/frames-ia32.cc
ia32/full-codegen-ia32.cc
ia32/ic-ia32.cc ia32/ic-ia32.cc
ia32/jump-target-ia32.cc ia32/jump-target-ia32.cc
ia32/macro-assembler-ia32.cc ia32/macro-assembler-ia32.cc
@ -154,6 +158,7 @@ SOURCES = {
x64/disasm-x64.cc x64/disasm-x64.cc
x64/fast-codegen-x64.cc x64/fast-codegen-x64.cc
x64/frames-x64.cc x64/frames-x64.cc
x64/full-codegen-x64.cc
x64/ic-x64.cc x64/ic-x64.cc
x64/jump-target-x64.cc x64/jump-target-x64.cc
x64/macro-assembler-x64.cc x64/macro-assembler-x64.cc
@ -239,7 +244,7 @@ def ConfigureObjectFiles():
env.Replace(**context.flags['v8']) env.Replace(**context.flags['v8'])
context.ApplyEnvOverrides(env) context.ApplyEnvOverrides(env)
env['BUILDERS']['JS2C'] = Builder(action=js2c.JS2C) env['BUILDERS']['JS2C'] = Builder(action=js2c.JS2C)
env['BUILDERS']['Snapshot'] = Builder(action='$SOURCE $TARGET --logfile "$LOGFILE"') env['BUILDERS']['Snapshot'] = Builder(action='$SOURCE $TARGET --logfile "$LOGFILE" --log-snapshot-positions')
# Build the standard platform-independent source files. # Build the standard platform-independent source files.
source_files = context.GetRelevantSources(SOURCES) source_files = context.GetRelevantSources(SOURCES)

6
deps/v8/src/accessors.cc

@ -493,11 +493,11 @@ Object* Accessors::FunctionGetLength(Object* object, void*) {
// If the function isn't compiled yet, the length is not computed // If the function isn't compiled yet, the length is not computed
// correctly yet. Compile it now and return the right length. // correctly yet. Compile it now and return the right length.
HandleScope scope; HandleScope scope;
Handle<JSFunction> function_handle(function); Handle<SharedFunctionInfo> shared(function->shared());
if (!CompileLazy(function_handle, KEEP_EXCEPTION)) { if (!CompileLazyShared(shared, KEEP_EXCEPTION)) {
return Failure::Exception(); return Failure::Exception();
} }
return Smi::FromInt(function_handle->shared()->length()); return Smi::FromInt(shared->length());
} else { } else {
return Smi::FromInt(function->shared()->length()); return Smi::FromInt(function->shared()->length());
} }

10
deps/v8/src/api.cc

@ -3669,7 +3669,6 @@ void Debug::SetMessageHandler(v8::Debug::MessageHandler handler,
void Debug::SetMessageHandler2(v8::Debug::MessageHandler2 handler) { void Debug::SetMessageHandler2(v8::Debug::MessageHandler2 handler) {
EnsureInitialized("v8::Debug::SetMessageHandler"); EnsureInitialized("v8::Debug::SetMessageHandler");
ENTER_V8; ENTER_V8;
HandleScope scope;
i::Debugger::SetMessageHandler(handler); i::Debugger::SetMessageHandler(handler);
} }
@ -3691,10 +3690,10 @@ void Debug::SetHostDispatchHandler(HostDispatchHandler handler,
void Debug::SetDebugMessageDispatchHandler( void Debug::SetDebugMessageDispatchHandler(
DebugMessageDispatchHandler handler) { DebugMessageDispatchHandler handler, bool provide_locker) {
EnsureInitialized("v8::Debug::SetDebugMessageDispatchHandler"); EnsureInitialized("v8::Debug::SetDebugMessageDispatchHandler");
ENTER_V8; ENTER_V8;
i::Debugger::SetDebugMessageDispatchHandler(handler); i::Debugger::SetDebugMessageDispatchHandler(handler, provide_locker);
} }
@ -3744,6 +3743,11 @@ Local<Value> Debug::GetMirror(v8::Handle<v8::Value> obj) {
bool Debug::EnableAgent(const char* name, int port, bool wait_for_connection) { bool Debug::EnableAgent(const char* name, int port, bool wait_for_connection) {
return i::Debugger::StartAgent(name, port, wait_for_connection); return i::Debugger::StartAgent(name, port, wait_for_connection);
} }
void Debug::ProcessDebugMessages() {
i::Execution::ProcessDebugMesssages(true);
}
#endif // ENABLE_DEBUGGER_SUPPORT #endif // ENABLE_DEBUGGER_SUPPORT
namespace internal { namespace internal {

14
deps/v8/src/arm/assembler-arm-inl.h

@ -174,20 +174,6 @@ Operand::Operand(const ExternalReference& f) {
} }
Operand::Operand(Object** opp) {
rm_ = no_reg;
imm32_ = reinterpret_cast<int32_t>(opp);
rmode_ = RelocInfo::NONE;
}
Operand::Operand(Context** cpp) {
rm_ = no_reg;
imm32_ = reinterpret_cast<int32_t>(cpp);
rmode_ = RelocInfo::NONE;
}
Operand::Operand(Smi* value) { Operand::Operand(Smi* value) {
rm_ = no_reg; rm_ = no_reg;
imm32_ = reinterpret_cast<intptr_t>(value); imm32_ = reinterpret_cast<intptr_t>(value);

36
deps/v8/src/arm/assembler-arm.cc

@ -30,9 +30,9 @@
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE. // OF THE POSSIBILITY OF SUCH DAMAGE.
// The original source code covered by the above license above has been modified // The original source code covered by the above license above has been
// significantly by Google Inc. // modified significantly by Google Inc.
// Copyright 2006-2008 the V8 project authors. All rights reserved. // Copyright 2010 the V8 project authors. All rights reserved.
#include "v8.h" #include "v8.h"
@ -1371,6 +1371,36 @@ void Assembler::stc2(Coprocessor coproc,
// Support for VFP. // Support for VFP.
void Assembler::vldr(const DwVfpRegister dst,
const Register base,
int offset,
const Condition cond) {
// Ddst = MEM(Rbase + offset).
// Instruction details available in ARM DDI 0406A, A8-628.
// cond(31-28) | 1101(27-24)| 1001(23-20) | Rbase(19-16) |
// Vdst(15-12) | 1011(11-8) | offset
ASSERT(CpuFeatures::IsEnabled(VFP3));
ASSERT(offset % 4 == 0);
emit(cond | 0xD9*B20 | base.code()*B16 | dst.code()*B12 |
0xB*B8 | ((offset / 4) & 255));
}
void Assembler::vstr(const DwVfpRegister src,
const Register base,
int offset,
const Condition cond) {
// MEM(Rbase + offset) = Dsrc.
// Instruction details available in ARM DDI 0406A, A8-786.
// cond(31-28) | 1101(27-24)| 1000(23-20) | | Rbase(19-16) |
// Vsrc(15-12) | 1011(11-8) | (offset/4)
ASSERT(CpuFeatures::IsEnabled(VFP3));
ASSERT(offset % 4 == 0);
emit(cond | 0xD8*B20 | base.code()*B16 | src.code()*B12 |
0xB*B8 | ((offset / 4) & 255));
}
void Assembler::vmov(const DwVfpRegister dst, void Assembler::vmov(const DwVfpRegister dst,
const Register src1, const Register src1,
const Register src2, const Register src2,

16
deps/v8/src/arm/assembler-arm.h

@ -30,9 +30,9 @@
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE. // OF THE POSSIBILITY OF SUCH DAMAGE.
// The original source code covered by the above license above has been modified // The original source code covered by the above license above has been
// significantly by Google Inc. // modified significantly by Google Inc.
// Copyright 2006-2008 the V8 project authors. All rights reserved. // Copyright 2010 the V8 project authors. All rights reserved.
// A light-weight ARM Assembler // A light-weight ARM Assembler
// Generates user mode instructions for the ARM architecture up to version 5 // Generates user mode instructions for the ARM architecture up to version 5
@ -398,8 +398,6 @@ class Operand BASE_EMBEDDED {
RelocInfo::Mode rmode = RelocInfo::NONE)); RelocInfo::Mode rmode = RelocInfo::NONE));
INLINE(explicit Operand(const ExternalReference& f)); INLINE(explicit Operand(const ExternalReference& f));
INLINE(explicit Operand(const char* s)); INLINE(explicit Operand(const char* s));
INLINE(explicit Operand(Object** opp));
INLINE(explicit Operand(Context** cpp));
explicit Operand(Handle<Object> handle); explicit Operand(Handle<Object> handle);
INLINE(explicit Operand(Smi* value)); INLINE(explicit Operand(Smi* value));
@ -796,6 +794,14 @@ class Assembler : public Malloced {
// However, some simple modifications can allow // However, some simple modifications can allow
// these APIs to support D16 to D31. // these APIs to support D16 to D31.
void vldr(const DwVfpRegister dst,
const Register base,
int offset, // Offset must be a multiple of 4.
const Condition cond = al);
void vstr(const DwVfpRegister src,
const Register base,
int offset, // Offset must be a multiple of 4.
const Condition cond = al);
void vmov(const DwVfpRegister dst, void vmov(const DwVfpRegister dst,
const Register src1, const Register src1,
const Register src2, const Register src2,

61
deps/v8/src/arm/builtins-arm.cc

@ -38,15 +38,32 @@ namespace internal {
#define __ ACCESS_MASM(masm) #define __ ACCESS_MASM(masm)
void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) { void Builtins::Generate_Adaptor(MacroAssembler* masm,
// TODO(428): Don't pass the function in a static variable. CFunctionId id,
__ mov(ip, Operand(ExternalReference::builtin_passed_function())); BuiltinExtraArguments extra_args) {
__ str(r1, MemOperand(ip, 0)); // ----------- S t a t e -------------
// -- r0 : number of arguments excluding receiver
// The actual argument count has already been loaded into register // -- r1 : called function (only guaranteed when
// r0, but JumpToRuntime expects r0 to contain the number of // extra_args requires it)
// arguments including the receiver. // -- cp : context
__ add(r0, r0, Operand(1)); // -- sp[0] : last argument
// -- ...
// -- sp[4 * (argc - 1)] : first argument (argc == r0)
// -- sp[4 * argc] : receiver
// -----------------------------------
// Insert extra arguments.
int num_extra_args = 0;
if (extra_args == NEEDS_CALLED_FUNCTION) {
num_extra_args = 1;
__ push(r1);
} else {
ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
}
// JumpToRuntime expects r0 to contain the number of arguments
// including the receiver and the extra arguments.
__ add(r0, r0, Operand(num_extra_args + 1));
__ JumpToRuntime(ExternalReference(id)); __ JumpToRuntime(ExternalReference(id));
} }
@ -491,7 +508,8 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
} }
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function) {
// Enter a construct frame. // Enter a construct frame.
__ EnterConstructFrame(); __ EnterConstructFrame();
@ -727,8 +745,17 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// Call the function. // Call the function.
// r0: number of arguments // r0: number of arguments
// r1: constructor function // r1: constructor function
ParameterCount actual(r0); if (is_api_function) {
__ InvokeFunction(r1, actual, CALL_FUNCTION); __ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
Handle<Code> code = Handle<Code>(
Builtins::builtin(Builtins::HandleApiCallConstruct));
ParameterCount expected(0);
__ InvokeCode(code, expected, expected,
RelocInfo::CODE_TARGET, CALL_FUNCTION);
} else {
ParameterCount actual(r0);
__ InvokeFunction(r1, actual, CALL_FUNCTION);
}
// Pop the function from the stack. // Pop the function from the stack.
// sp[0]: constructor function // sp[0]: constructor function
@ -783,6 +810,16 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
} }
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, true);
}
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
bool is_construct) { bool is_construct) {
// Called from Generate_JS_Entry // Called from Generate_JS_Entry

949
deps/v8/src/arm/codegen-arm.cc

File diff suppressed because it is too large

120
deps/v8/src/arm/codegen-arm.h

@ -1,4 +1,4 @@
// Copyright 2006-2008 the V8 project authors. All rights reserved. // Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -32,6 +32,7 @@ namespace v8 {
namespace internal { namespace internal {
// Forward declarations // Forward declarations
class CompilationInfo;
class DeferredCode; class DeferredCode;
class RegisterAllocator; class RegisterAllocator;
class RegisterFile; class RegisterFile;
@ -43,57 +44,69 @@ enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
// Reference support // Reference support
// A reference is a C++ stack-allocated object that keeps an ECMA // A reference is a C++ stack-allocated object that puts a
// reference on the execution stack while in scope. For variables // reference on the virtual frame. The reference may be consumed
// the reference is empty, indicating that it isn't necessary to // by GetValue, TakeValue, SetValue, and Codegen::UnloadReference.
// store state on the stack for keeping track of references to those. // When the lifetime (scope) of a valid reference ends, it must have
// For properties, we keep either one (named) or two (indexed) values // been consumed, and be in state UNLOADED.
// on the execution stack to represent the reference.
class Reference BASE_EMBEDDED { class Reference BASE_EMBEDDED {
public: public:
// The values of the types is important, see size(). // The values of the types is important, see size().
enum Type { ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 }; enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
Reference(CodeGenerator* cgen, Expression* expression); Reference(CodeGenerator* cgen,
Expression* expression,
bool persist_after_get = false);
~Reference(); ~Reference();
Expression* expression() const { return expression_; } Expression* expression() const { return expression_; }
Type type() const { return type_; } Type type() const { return type_; }
void set_type(Type value) { void set_type(Type value) {
ASSERT(type_ == ILLEGAL); ASSERT_EQ(ILLEGAL, type_);
type_ = value; type_ = value;
} }
void set_unloaded() {
ASSERT_NE(ILLEGAL, type_);
ASSERT_NE(UNLOADED, type_);
type_ = UNLOADED;
}
// The size the reference takes up on the stack. // The size the reference takes up on the stack.
int size() const { return (type_ == ILLEGAL) ? 0 : type_; } int size() const {
return (type_ < SLOT) ? 0 : type_;
}
bool is_illegal() const { return type_ == ILLEGAL; } bool is_illegal() const { return type_ == ILLEGAL; }
bool is_slot() const { return type_ == SLOT; } bool is_slot() const { return type_ == SLOT; }
bool is_property() const { return type_ == NAMED || type_ == KEYED; } bool is_property() const { return type_ == NAMED || type_ == KEYED; }
bool is_unloaded() const { return type_ == UNLOADED; }
// Return the name. Only valid for named property references. // Return the name. Only valid for named property references.
Handle<String> GetName(); Handle<String> GetName();
// Generate code to push the value of the reference on top of the // Generate code to push the value of the reference on top of the
// expression stack. The reference is expected to be already on top of // expression stack. The reference is expected to be already on top of
// the expression stack, and it is left in place with its value above it. // the expression stack, and it is consumed by the call unless the
// reference is for a compound assignment.
// If the reference is not consumed, it is left in place under its value.
void GetValue(); void GetValue();
// Generate code to push the value of a reference on top of the expression // Generate code to pop a reference, push the value of the reference,
// stack and then spill the stack frame. This function is used temporarily // and then spill the stack frame.
// while the code generator is being transformed.
inline void GetValueAndSpill(); inline void GetValueAndSpill();
// Generate code to store the value on top of the expression stack in the // Generate code to store the value on top of the expression stack in the
// reference. The reference is expected to be immediately below the value // reference. The reference is expected to be immediately below the value
// on the expression stack. The stored value is left in place (with the // on the expression stack. The value is stored in the location specified
// reference intact below it) to support chained assignments. // by the reference, and is left on top of the stack, after the reference
// is popped from beneath it (unloaded).
void SetValue(InitState init_state); void SetValue(InitState init_state);
private: private:
CodeGenerator* cgen_; CodeGenerator* cgen_;
Expression* expression_; Expression* expression_;
Type type_; Type type_;
// Keep the reference on the stack after get, so it can be used by set later.
bool persist_after_get_;
}; };
@ -137,11 +150,21 @@ class CodeGenState BASE_EMBEDDED {
class CodeGenerator: public AstVisitor { class CodeGenerator: public AstVisitor {
public: public:
// Compilation mode. Either the compiler is used as the primary
// compiler and needs to setup everything or the compiler is used as
// the secondary compiler for split compilation and has to handle
// bailouts.
enum Mode {
PRIMARY,
SECONDARY
};
// Takes a function literal, generates code for it. This function should only // Takes a function literal, generates code for it. This function should only
// be called by compiler.cc. // be called by compiler.cc.
static Handle<Code> MakeCode(FunctionLiteral* fun, static Handle<Code> MakeCode(FunctionLiteral* fun,
Handle<Script> script, Handle<Script> script,
bool is_eval); bool is_eval,
CompilationInfo* info);
// Printing of AST, etc. as requested by flags. // Printing of AST, etc. as requested by flags.
static void MakeCodePrologue(FunctionLiteral* fun); static void MakeCodePrologue(FunctionLiteral* fun);
@ -189,8 +212,7 @@ class CodeGenerator: public AstVisitor {
private: private:
// Construction/Destruction // Construction/Destruction
CodeGenerator(int buffer_size, Handle<Script> script, bool is_eval); CodeGenerator(MacroAssembler* masm, Handle<Script> script, bool is_eval);
virtual ~CodeGenerator() { delete masm_; }
// Accessors // Accessors
Scope* scope() const { return scope_; } Scope* scope() const { return scope_; }
@ -227,7 +249,7 @@ class CodeGenerator: public AstVisitor {
inline void VisitStatementsAndSpill(ZoneList<Statement*>* statements); inline void VisitStatementsAndSpill(ZoneList<Statement*>* statements);
// Main code generation function // Main code generation function
void GenCode(FunctionLiteral* fun); void Generate(FunctionLiteral* fun, Mode mode, CompilationInfo* info);
// The following are used by class Reference. // The following are used by class Reference.
void LoadReference(Reference* ref); void LoadReference(Reference* ref);
@ -274,6 +296,9 @@ class CodeGenerator: public AstVisitor {
void LoadFromSlot(Slot* slot, TypeofState typeof_state); void LoadFromSlot(Slot* slot, TypeofState typeof_state);
// Store the value on top of the stack to a slot. // Store the value on top of the stack to a slot.
void StoreToSlot(Slot* slot, InitState init_state); void StoreToSlot(Slot* slot, InitState init_state);
// Load a keyed property, leaving it in r0. The receiver and key are
// passed on the stack, and remain there.
void EmitKeyedLoad(bool is_global);
void LoadFromGlobalSlotCheckExtensions(Slot* slot, void LoadFromGlobalSlotCheckExtensions(Slot* slot,
TypeofState typeof_state, TypeofState typeof_state,
@ -304,7 +329,9 @@ class CodeGenerator: public AstVisitor {
bool reversed, bool reversed,
OverwriteMode mode); OverwriteMode mode);
void CallWithArguments(ZoneList<Expression*>* arguments, int position); void CallWithArguments(ZoneList<Expression*>* arguments,
CallFunctionFlags flags,
int position);
// Control flow // Control flow
void Branch(bool if_true, JumpTarget* target); void Branch(bool if_true, JumpTarget* target);
@ -339,6 +366,7 @@ class CodeGenerator: public AstVisitor {
void GenerateIsArray(ZoneList<Expression*>* args); void GenerateIsArray(ZoneList<Expression*>* args);
void GenerateIsObject(ZoneList<Expression*>* args); void GenerateIsObject(ZoneList<Expression*>* args);
void GenerateIsFunction(ZoneList<Expression*>* args); void GenerateIsFunction(ZoneList<Expression*>* args);
void GenerateIsUndetectableObject(ZoneList<Expression*>* args);
// Support for construct call checks. // Support for construct call checks.
void GenerateIsConstructCall(ZoneList<Expression*>* args); void GenerateIsConstructCall(ZoneList<Expression*>* args);
@ -426,33 +454,13 @@ class CodeGenerator: public AstVisitor {
friend class JumpTarget; friend class JumpTarget;
friend class Reference; friend class Reference;
friend class FastCodeGenerator; friend class FastCodeGenerator;
friend class CodeGenSelector; friend class FullCodeGenerator;
friend class FullCodeGenSyntaxChecker;
DISALLOW_COPY_AND_ASSIGN(CodeGenerator); DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
}; };
class CallFunctionStub: public CodeStub {
public:
CallFunctionStub(int argc, InLoopFlag in_loop)
: argc_(argc), in_loop_(in_loop) {}
void Generate(MacroAssembler* masm);
private:
int argc_;
InLoopFlag in_loop_;
#if defined(DEBUG)
void Print() { PrintF("CallFunctionStub (argc %d)\n", argc_); }
#endif // defined(DEBUG)
Major MajorKey() { return CallFunction; }
int MinorKey() { return argc_; }
InLoopFlag InLoop() { return in_loop_; }
};
class GenericBinaryOpStub : public CodeStub { class GenericBinaryOpStub : public CodeStub {
public: public:
GenericBinaryOpStub(Token::Value op, GenericBinaryOpStub(Token::Value op,
@ -530,6 +538,28 @@ class GenericBinaryOpStub : public CodeStub {
}; };
class StringCompareStub: public CodeStub {
public:
StringCompareStub() { }
// Compare two flat ASCII strings and returns result in r0.
// Does not use the stack.
static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
Register left,
Register right,
Register scratch1,
Register scratch2,
Register scratch3,
Register scratch4);
private:
Major MajorKey() { return StringCompare; }
int MinorKey() { return 0; }
void Generate(MacroAssembler* masm);
};
} } // namespace v8::internal } } // namespace v8::internal
#endif // V8_ARM_CODEGEN_ARM_H_ #endif // V8_ARM_CODEGEN_ARM_H_

6
deps/v8/src/arm/constants-arm.h

@ -1,4 +1,4 @@
// Copyright 2009 the V8 project authors. All rights reserved. // Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -237,6 +237,7 @@ class Instr {
inline int RnField() const { return Bits(19, 16); } inline int RnField() const { return Bits(19, 16); }
inline int RdField() const { return Bits(15, 12); } inline int RdField() const { return Bits(15, 12); }
inline int CoprocessorField() const { return Bits(11, 8); }
// Support for VFP. // Support for VFP.
// Vn(19-16) | Vd(15-12) | Vm(3-0) // Vn(19-16) | Vd(15-12) | Vm(3-0)
inline int VnField() const { return Bits(19, 16); } inline int VnField() const { return Bits(19, 16); }
@ -246,6 +247,8 @@ class Instr {
inline int MField() const { return Bit(5); } inline int MField() const { return Bit(5); }
inline int DField() const { return Bit(22); } inline int DField() const { return Bit(22); }
inline int RtField() const { return Bits(15, 12); } inline int RtField() const { return Bits(15, 12); }
inline int PField() const { return Bit(24); }
inline int UField() const { return Bit(23); }
// Fields used in Data processing instructions // Fields used in Data processing instructions
inline Opcode OpcodeField() const { inline Opcode OpcodeField() const {
@ -296,6 +299,7 @@ class Instr {
inline bool HasB() const { return BField() == 1; } inline bool HasB() const { return BField() == 1; }
inline bool HasW() const { return WField() == 1; } inline bool HasW() const { return WField() == 1; }
inline bool HasL() const { return LField() == 1; } inline bool HasL() const { return LField() == 1; }
inline bool HasU() const { return UField() == 1; }
inline bool HasSign() const { return SignField() == 1; } inline bool HasSign() const { return SignField() == 1; }
inline bool HasH() const { return HField() == 1; } inline bool HasH() const { return HField() == 1; }
inline bool HasLink() const { return LinkField() == 1; } inline bool HasLink() const { return LinkField() == 1; }

2
deps/v8/src/arm/debug-arm.cc

@ -98,7 +98,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
__ mov(r0, Operand(0)); // no arguments __ mov(r0, Operand(0)); // no arguments
__ mov(r1, Operand(ExternalReference::debug_break())); __ mov(r1, Operand(ExternalReference::debug_break()));
CEntryDebugBreakStub ceb; CEntryStub ceb(1, ExitFrame::MODE_DEBUG);
__ CallStub(&ceb); __ CallStub(&ceb);
// Restore the register values containing object pointers from the expression // Restore the register values containing object pointers from the expression

52
deps/v8/src/arm/disasm-arm.cc

@ -1,4 +1,4 @@
// Copyright 2007-2009 the V8 project authors. All rights reserved. // Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -998,29 +998,43 @@ void Decoder::DecodeTypeVFP(Instr* instr) {
// Decode Type 6 coprocessor instructions. // Decode Type 6 coprocessor instructions.
// Dm = vmov(Rt, Rt2) // Dm = vmov(Rt, Rt2)
// <Rt, Rt2> = vmov(Dm) // <Rt, Rt2> = vmov(Dm)
// Ddst = MEM(Rbase + 4*offset).
// MEM(Rbase + 4*offset) = Dsrc.
void Decoder::DecodeType6CoprocessorIns(Instr* instr) { void Decoder::DecodeType6CoprocessorIns(Instr* instr) {
ASSERT((instr->TypeField() == 6)); ASSERT((instr->TypeField() == 6));
if (instr->Bit(23) == 1) { if (instr->CoprocessorField() != 0xB) {
Unknown(instr); // Not used by V8.
} else if (instr->Bit(22) == 1) {
if ((instr->Bits(27, 24) == 0xC) &&
(instr->Bit(22) == 1) &&
(instr->Bits(11, 8) == 0xB) &&
(instr->Bits(7, 6) == 0x0) &&
(instr->Bit(4) == 1)) {
if (instr->Bit(20) == 0) {
Format(instr, "vmov'cond 'Dm, 'rt, 'rn");
} else if (instr->Bit(20) == 1) {
Format(instr, "vmov'cond 'rt, 'rn, 'Dm");
}
} else {
Unknown(instr); // Not used by V8.
}
} else if (instr->Bit(21) == 1) {
Unknown(instr); // Not used by V8. Unknown(instr); // Not used by V8.
} else { } else {
Unknown(instr); // Not used by V8. switch (instr->OpcodeField()) {
case 0x2:
// Load and store double to two GP registers
if (instr->Bits(7, 4) != 0x1) {
Unknown(instr); // Not used by V8.
} else if (instr->HasL()) {
Format(instr, "vmov'cond 'rt, 'rn, 'Dm");
} else {
Format(instr, "vmov'cond 'Dm, 'rt, 'rn");
}
break;
case 0x8:
if (instr->HasL()) {
Format(instr, "vldr'cond 'Dd, ['rn - 4*'off8]");
} else {
Format(instr, "vstr'cond 'Dd, ['rn - 4*'off8]");
}
break;
case 0xC:
if (instr->HasL()) {
Format(instr, "vldr'cond 'Dd, ['rn + 4*'off8]");
} else {
Format(instr, "vstr'cond 'Dd, ['rn + 4*'off8]");
}
break;
default:
Unknown(instr); // Not used by V8.
break;
}
} }
} }

1742
deps/v8/src/arm/fast-codegen-arm.cc

File diff suppressed because it is too large

1781
deps/v8/src/arm/full-codegen-arm.cc

File diff suppressed because it is too large

25
deps/v8/src/arm/ic-arm.cc

@ -170,7 +170,6 @@ void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
// -- lr : return address // -- lr : return address
// -- [sp] : receiver // -- [sp] : receiver
// ----------------------------------- // -----------------------------------
Label miss; Label miss;
__ ldr(r0, MemOperand(sp, 0)); __ ldr(r0, MemOperand(sp, 0));
@ -204,7 +203,6 @@ void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
// -- lr : return address // -- lr : return address
// -- [sp] : receiver // -- [sp] : receiver
// ----------------------------------- // -----------------------------------
Label miss; Label miss;
// Load receiver. // Load receiver.
@ -318,7 +316,6 @@ void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- lr: return address // -- lr: return address
// ----------------------------------- // -----------------------------------
Label miss, global_object, non_global_object; Label miss, global_object, non_global_object;
// Get the receiver of the function from the stack into r1. // Get the receiver of the function from the stack into r1.
@ -451,7 +448,6 @@ void LoadIC::GenerateNormal(MacroAssembler* masm) {
// -- lr : return address // -- lr : return address
// -- [sp] : receiver // -- [sp] : receiver
// ----------------------------------- // -----------------------------------
Label miss, probe, global; Label miss, probe, global;
__ ldr(r0, MemOperand(sp, 0)); __ ldr(r0, MemOperand(sp, 0));
@ -543,6 +539,8 @@ void KeyedLoadIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
// -- lr : return address // -- lr : return address
// -- sp[0] : key // -- sp[0] : key
// -- sp[4] : receiver // -- sp[4] : receiver
// -----------------------------------
__ ldm(ia, sp, r2.bit() | r3.bit()); __ ldm(ia, sp, r2.bit() | r3.bit());
__ stm(db_w, sp, r2.bit() | r3.bit()); __ stm(db_w, sp, r2.bit() | r3.bit());
@ -555,6 +553,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// -- lr : return address // -- lr : return address
// -- sp[0] : key // -- sp[0] : key
// -- sp[4] : receiver // -- sp[4] : receiver
// -----------------------------------
Label slow, fast; Label slow, fast;
// Get the key and receiver object from the stack. // Get the key and receiver object from the stack.
@ -569,11 +568,10 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// Get the map of the receiver. // Get the map of the receiver.
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset)); __ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
// Check that the receiver does not require access checks. We need
// to check this explicitly since this generic stub does not perform // Check bit field.
// map checks.
__ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset)); __ ldrb(r3, FieldMemOperand(r2, Map::kBitFieldOffset));
__ tst(r3, Operand(1 << Map::kIsAccessCheckNeeded)); __ tst(r3, Operand(kSlowCaseBitFieldMask));
__ b(ne, &slow); __ b(ne, &slow);
// Check that the object is some kind of JS object EXCEPT JS Value type. // Check that the object is some kind of JS object EXCEPT JS Value type.
// In the case that the object is a value-wrapper object, // In the case that the object is a value-wrapper object,
@ -623,6 +621,8 @@ void KeyedLoadIC::GenerateString(MacroAssembler* masm) {
// -- lr : return address // -- lr : return address
// -- sp[0] : key // -- sp[0] : key
// -- sp[4] : receiver // -- sp[4] : receiver
// -----------------------------------
GenerateGeneric(masm); GenerateGeneric(masm);
} }
@ -641,6 +641,7 @@ void KeyedStoreIC::Generate(MacroAssembler* masm,
// -- lr : return address // -- lr : return address
// -- sp[0] : key // -- sp[0] : key
// -- sp[1] : receiver // -- sp[1] : receiver
// -----------------------------------
__ ldm(ia, sp, r2.bit() | r3.bit()); __ ldm(ia, sp, r2.bit() | r3.bit());
__ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit()); __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit());
@ -655,7 +656,9 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
// -- lr : return address // -- lr : return address
// -- sp[0] : key // -- sp[0] : key
// -- sp[1] : receiver // -- sp[1] : receiver
// -----------------------------------
Label slow, fast, array, extra, exit; Label slow, fast, array, extra, exit;
// Get the key and the object from the stack. // Get the key and the object from the stack.
__ ldm(ia, sp, r1.bit() | r3.bit()); // r1 = key, r3 = receiver __ ldm(ia, sp, r1.bit() | r3.bit()); // r1 = key, r3 = receiver
// Check that the key is a smi. // Check that the key is a smi.
@ -807,7 +810,7 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg); StubCache::GenerateProbe(masm, flags, r1, r2, r3, no_reg);
// Cache miss: Jump to runtime. // Cache miss: Jump to runtime.
Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss))); GenerateMiss(masm);
} }
@ -828,7 +831,7 @@ void StoreIC::GenerateExtendStorage(MacroAssembler* masm) {
} }
void StoreIC::Generate(MacroAssembler* masm, const ExternalReference& f) { void StoreIC::GenerateMiss(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- r0 : value // -- r0 : value
// -- r2 : name // -- r2 : name
@ -840,7 +843,7 @@ void StoreIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
__ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit()); __ stm(db_w, sp, r0.bit() | r2.bit() | r3.bit());
// Perform tail call to the entry. // Perform tail call to the entry.
__ TailCallRuntime(f, 3, 1); __ TailCallRuntime(ExternalReference(IC_Utility(kStoreIC_Miss)), 3, 1);
} }

63
deps/v8/src/arm/macro-assembler-arm.cc

@ -205,6 +205,11 @@ void MacroAssembler::LoadRoot(Register destination,
// tag is shifted away. // tag is shifted away.
void MacroAssembler::RecordWrite(Register object, Register offset, void MacroAssembler::RecordWrite(Register object, Register offset,
Register scratch) { Register scratch) {
// The compiled code assumes that record write doesn't change the
// context register, so we check that none of the clobbered
// registers are cp.
ASSERT(!object.is(cp) && !offset.is(cp) && !scratch.is(cp));
// This is how much we shift the remembered set bit offset to get the // This is how much we shift the remembered set bit offset to get the
// offset of the word in the remembered set. We divide by kBitsPerInt (32, // offset of the word in the remembered set. We divide by kBitsPerInt (32,
// shift right 5) and then multiply by kIntSize (4, shift left 2). // shift right 5) and then multiply by kIntSize (4, shift left 2).
@ -272,6 +277,14 @@ void MacroAssembler::RecordWrite(Register object, Register offset,
str(scratch, MemOperand(object)); str(scratch, MemOperand(object));
bind(&done); bind(&done);
// Clobber all input registers when running with the debug-code flag
// turned on to provoke errors.
if (FLAG_debug_code) {
mov(object, Operand(bit_cast<int32_t>(kZapValue)));
mov(offset, Operand(bit_cast<int32_t>(kZapValue)));
mov(scratch, Operand(bit_cast<int32_t>(kZapValue)));
}
} }
@ -1035,9 +1048,13 @@ void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
return; return;
} }
Runtime::FunctionId function_id = // TODO(1236192): Most runtime routines don't need the number of
static_cast<Runtime::FunctionId>(f->stub_id); // arguments passed in because it is constant. At some point we
RuntimeStub stub(function_id, num_arguments); // should remove this need and make the runtime routine entry code
// smarter.
mov(r0, Operand(num_arguments));
mov(r1, Operand(ExternalReference(f)));
CEntryStub stub(1);
CallStub(&stub); CallStub(&stub);
} }
@ -1221,6 +1238,46 @@ void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
} }
void MacroAssembler::JumpIfNonSmisNotBothSequentialAsciiStrings(
Register first,
Register second,
Register scratch1,
Register scratch2,
Label* failure) {
// Test that both first and second are sequential ASCII strings.
// Assume that they are non-smis.
ldr(scratch1, FieldMemOperand(first, HeapObject::kMapOffset));
ldr(scratch2, FieldMemOperand(second, HeapObject::kMapOffset));
ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
ldrb(scratch2, FieldMemOperand(scratch2, Map::kInstanceTypeOffset));
int kFlatAsciiStringMask =
kIsNotStringMask | kStringEncodingMask | kStringRepresentationMask;
int kFlatAsciiStringTag = ASCII_STRING_TYPE;
and_(scratch1, scratch1, Operand(kFlatAsciiStringMask));
and_(scratch2, scratch2, Operand(kFlatAsciiStringMask));
cmp(scratch1, Operand(kFlatAsciiStringTag));
// Ignore second test if first test failed.
cmp(scratch2, Operand(kFlatAsciiStringTag), eq);
b(ne, failure);
}
void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register first,
Register second,
Register scratch1,
Register scratch2,
Label* failure) {
// Check that neither is a smi.
ASSERT_EQ(0, kSmiTag);
and_(scratch1, first, Operand(second));
tst(scratch1, Operand(kSmiTagMask));
b(eq, failure);
JumpIfNonSmisNotBothSequentialAsciiStrings(first,
second,
scratch1,
scratch2,
failure);
}
#ifdef ENABLE_DEBUGGER_SUPPORT #ifdef ENABLE_DEBUGGER_SUPPORT
CodePatcher::CodePatcher(byte* address, int instructions) CodePatcher::CodePatcher(byte* address, int instructions)

19
deps/v8/src/arm/macro-assembler-arm.h

@ -337,6 +337,25 @@ class MacroAssembler: public Assembler {
void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; } void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
bool allow_stub_calls() { return allow_stub_calls_; } bool allow_stub_calls() { return allow_stub_calls_; }
// ---------------------------------------------------------------------------
// String utilities
// Checks if both objects are sequential ASCII strings and jumps to label
// if either is not. Assumes that neither object is a smi.
void JumpIfNonSmisNotBothSequentialAsciiStrings(Register object1,
Register object2,
Register scratch1,
Register scratch2,
Label *failure);
// Checks if both objects are sequential ASCII strings and jumps to label
// if either is not.
void JumpIfNotBothSequentialAsciiStrings(Register first,
Register second,
Register scratch1,
Register scratch2,
Label* not_flat_ascii_strings);
private: private:
List<Unresolved> unresolved_; List<Unresolved> unresolved_;
bool generating_stub_; bool generating_stub_;

110
deps/v8/src/arm/regexp-macro-assembler-arm.cc

@ -63,8 +63,6 @@ namespace internal {
* through the runtime system) * through the runtime system)
* - stack_area_base (High end of the memory area to use as * - stack_area_base (High end of the memory area to use as
* backtracking stack) * backtracking stack)
* - at_start (if 1, we are starting at the start of the
* string, otherwise 0)
* - int* capture_array (int[num_saved_registers_], for output). * - int* capture_array (int[num_saved_registers_], for output).
* --- sp when called --- * --- sp when called ---
* - link address * - link address
@ -76,6 +74,8 @@ namespace internal {
* - void* input_string (location of a handle containing the string) * - void* input_string (location of a handle containing the string)
* - Offset of location before start of input (effectively character * - Offset of location before start of input (effectively character
* position -1). Used to initialize capture registers to a non-position. * position -1). Used to initialize capture registers to a non-position.
* - At start (if 1, we are starting at the start of the
* string, otherwise 0)
* - register 0 (Only positions must be stored in the first * - register 0 (Only positions must be stored in the first
* - register 1 num_saved_registers_ registers) * - register 1 num_saved_registers_ registers)
* - ... * - ...
@ -526,64 +526,54 @@ bool RegExpMacroAssemblerARM::CheckSpecialCharacterClass(uc16 type,
return true; return true;
} }
case 'n': { case 'n': {
// Match newlines (0x0a('\n'), 0x0d('\r'), 0x2028 and 0x2029) // Match newlines (0x0a('\n'), 0x0d('\r'), 0x2028 and 0x2029)
__ eor(r0, current_character(), Operand(0x01)); __ eor(r0, current_character(), Operand(0x01));
// See if current character is '\n'^1 or '\r'^1, i.e., 0x0b or 0x0c // See if current character is '\n'^1 or '\r'^1, i.e., 0x0b or 0x0c
__ sub(r0, r0, Operand(0x0b)); __ sub(r0, r0, Operand(0x0b));
__ cmp(r0, Operand(0x0c - 0x0b)); __ cmp(r0, Operand(0x0c - 0x0b));
if (mode_ == ASCII) { if (mode_ == ASCII) {
BranchOrBacktrack(hi, on_no_match); BranchOrBacktrack(hi, on_no_match);
} else { } else {
Label done; Label done;
__ b(ls, &done); __ b(ls, &done);
// Compare original value to 0x2028 and 0x2029, using the already // Compare original value to 0x2028 and 0x2029, using the already
// computed (current_char ^ 0x01 - 0x0b). I.e., check for // computed (current_char ^ 0x01 - 0x0b). I.e., check for
// 0x201d (0x2028 - 0x0b) or 0x201e. // 0x201d (0x2028 - 0x0b) or 0x201e.
__ sub(r0, r0, Operand(0x2028 - 0x0b)); __ sub(r0, r0, Operand(0x2028 - 0x0b));
__ cmp(r0, Operand(1)); __ cmp(r0, Operand(1));
BranchOrBacktrack(hi, on_no_match); BranchOrBacktrack(hi, on_no_match);
__ bind(&done); __ bind(&done);
}
return true;
} }
return true;
}
case 'w': { case 'w': {
// Match word character (0-9, A-Z, a-z and _). if (mode_ != ASCII) {
Label digits, done; // Table is 128 entries, so all ASCII characters can be tested.
__ cmp(current_character(), Operand('9')); __ cmp(current_character(), Operand('z'));
__ b(ls, &digits); BranchOrBacktrack(hi, on_no_match);
__ cmp(current_character(), Operand('_')); }
__ b(eq, &done); ExternalReference map = ExternalReference::re_word_character_map();
__ orr(r0, current_character(), Operand(0x20)); __ mov(r0, Operand(map));
__ sub(r0, r0, Operand('a')); __ ldrb(r0, MemOperand(r0, current_character()));
__ cmp(r0, Operand('z' - 'a')); __ tst(r0, Operand(r0));
BranchOrBacktrack(hi, on_no_match); BranchOrBacktrack(eq, on_no_match);
__ jmp(&done);
__ bind(&digits);
__ cmp(current_character(), Operand('0'));
BranchOrBacktrack(lo, on_no_match);
__ bind(&done);
return true; return true;
} }
case 'W': { case 'W': {
// Match non-word character (not 0-9, A-Z, a-z and _). Label done;
Label digits, done; if (mode_ != ASCII) {
__ cmp(current_character(), Operand('9')); // Table is 128 entries, so all ASCII characters can be tested.
__ b(ls, &digits); __ cmp(current_character(), Operand('z'));
__ cmp(current_character(), Operand('_')); __ b(hi, &done);
BranchOrBacktrack(eq, on_no_match); }
__ orr(r0, current_character(), Operand(0x20)); ExternalReference map = ExternalReference::re_word_character_map();
__ sub(r0, r0, Operand('a')); __ mov(r0, Operand(map));
__ cmp(r0, Operand('z' - 'a')); __ ldrb(r0, MemOperand(r0, current_character()));
BranchOrBacktrack(ls, on_no_match); __ tst(r0, Operand(r0));
__ jmp(&done); BranchOrBacktrack(ne, on_no_match);
if (mode_ != ASCII) {
__ bind(&digits); __ bind(&done);
__ cmp(current_character(), Operand('0')); }
BranchOrBacktrack(hs, on_no_match);
__ bind(&done);
return true; return true;
} }
case '*': case '*':
@ -620,6 +610,7 @@ Handle<Object> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
// Set frame pointer just above the arguments. // Set frame pointer just above the arguments.
__ add(frame_pointer(), sp, Operand(4 * kPointerSize)); __ add(frame_pointer(), sp, Operand(4 * kPointerSize));
__ push(r0); // Make room for "position - 1" constant (value is irrelevant). __ push(r0); // Make room for "position - 1" constant (value is irrelevant).
__ push(r0); // Make room for "at start" constant (value is irrelevant).
// Check if we have space on the stack for registers. // Check if we have space on the stack for registers.
Label stack_limit_hit; Label stack_limit_hit;
@ -663,6 +654,15 @@ Handle<Object> RegExpMacroAssemblerARM::GetCode(Handle<String> source) {
// Store this value in a local variable, for use when clearing // Store this value in a local variable, for use when clearing
// position registers. // position registers.
__ str(r0, MemOperand(frame_pointer(), kInputStartMinusOne)); __ str(r0, MemOperand(frame_pointer(), kInputStartMinusOne));
// Determine whether the start index is zero, that is at the start of the
// string, and store that value in a local variable.
__ ldr(r1, MemOperand(frame_pointer(), kStartIndex));
__ tst(r1, Operand(r1));
__ mov(r1, Operand(1), LeaveCC, eq);
__ mov(r1, Operand(0), LeaveCC, ne);
__ str(r1, MemOperand(frame_pointer(), kAtStart));
if (num_saved_registers_ > 0) { // Always is, if generated from a regexp. if (num_saved_registers_ > 0) { // Always is, if generated from a regexp.
// Fill saved registers with initial value = start offset - 1 // Fill saved registers with initial value = start offset - 1

6
deps/v8/src/arm/regexp-macro-assembler-arm.h

@ -123,8 +123,7 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
static const int kReturnAddress = kStoredRegisters + 8 * kPointerSize; static const int kReturnAddress = kStoredRegisters + 8 * kPointerSize;
// Stack parameters placed by caller. // Stack parameters placed by caller.
static const int kRegisterOutput = kReturnAddress + kPointerSize; static const int kRegisterOutput = kReturnAddress + kPointerSize;
static const int kAtStart = kRegisterOutput + kPointerSize; static const int kStackHighEnd = kRegisterOutput + kPointerSize;
static const int kStackHighEnd = kAtStart + kPointerSize;
static const int kDirectCall = kStackHighEnd + kPointerSize; static const int kDirectCall = kStackHighEnd + kPointerSize;
// Below the frame pointer. // Below the frame pointer.
@ -136,8 +135,9 @@ class RegExpMacroAssemblerARM: public NativeRegExpMacroAssembler {
// When adding local variables remember to push space for them in // When adding local variables remember to push space for them in
// the frame in GetCode. // the frame in GetCode.
static const int kInputStartMinusOne = kInputString - kPointerSize; static const int kInputStartMinusOne = kInputString - kPointerSize;
static const int kAtStart = kInputStartMinusOne - kPointerSize;
// First register address. Following registers are below it on the stack. // First register address. Following registers are below it on the stack.
static const int kRegisterZero = kInputStartMinusOne - kPointerSize; static const int kRegisterZero = kAtStart - kPointerSize;
// Initial size of code buffer. // Initial size of code buffer.
static const size_t kRegExpCodeSize = 1024; static const size_t kRegExpCodeSize = 1024;

103
deps/v8/src/arm/simulator-arm.cc

@ -1,4 +1,4 @@
// Copyright 2009 the V8 project authors. All rights reserved. // Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -47,9 +47,9 @@ using ::v8::internal::ReadLine;
using ::v8::internal::DeleteArray; using ::v8::internal::DeleteArray;
// This macro provides a platform independent use of sscanf. The reason for // This macro provides a platform independent use of sscanf. The reason for
// SScanF not being implemented in a platform independent was through // SScanF not being implemented in a platform independent way through
// ::v8::internal::OS in the same way as SNPrintF is that the Windows C Run-Time // ::v8::internal::OS in the same way as SNPrintF is that the
// Library does not provide vsscanf. // Windows C Run-Time Library does not provide vsscanf.
#define SScanF sscanf // NOLINT #define SScanF sscanf // NOLINT
// The Debugger class is used by the simulator while debugging simulated ARM // The Debugger class is used by the simulator while debugging simulated ARM
@ -355,6 +355,10 @@ void Debugger::Debug() {
} else { } else {
PrintF("Not at debugger stop."); PrintF("Not at debugger stop.");
} }
} else if ((strcmp(cmd, "t") == 0) || strcmp(cmd, "trace") == 0) {
::v8::internal::FLAG_trace_sim = !::v8::internal::FLAG_trace_sim;
PrintF("Trace of executed instructions is %s\n",
::v8::internal::FLAG_trace_sim ? "on" : "off");
} else if ((strcmp(cmd, "h") == 0) || (strcmp(cmd, "help") == 0)) { } else if ((strcmp(cmd, "h") == 0) || (strcmp(cmd, "help") == 0)) {
PrintF("cont\n"); PrintF("cont\n");
PrintF(" continue execution (alias 'c')\n"); PrintF(" continue execution (alias 'c')\n");
@ -378,7 +382,9 @@ void Debugger::Debug() {
PrintF(" delete the breakpoint\n"); PrintF(" delete the breakpoint\n");
PrintF("unstop\n"); PrintF("unstop\n");
PrintF(" ignore the stop instruction at the current location"); PrintF(" ignore the stop instruction at the current location");
PrintF(" from now on\n"); PrintF(" from now on\n");
PrintF("trace (alias 't')\n");
PrintF(" toogle the tracing of all executed statements");
} else { } else {
PrintF("Unknown command: %s\n", cmd); PrintF("Unknown command: %s\n", cmd);
} }
@ -890,8 +896,13 @@ bool Simulator::OverflowFrom(int32_t alu_out,
// Support for VFP comparisons. // Support for VFP comparisons.
void Simulator::Compute_FPSCR_Flags(double val1, double val2) { void Simulator::Compute_FPSCR_Flags(double val1, double val2) {
if (isnan(val1) || isnan(val2)) {
n_flag_FPSCR_ = false;
z_flag_FPSCR_ = false;
c_flag_FPSCR_ = true;
v_flag_FPSCR_ = true;
// All non-NaN cases. // All non-NaN cases.
if (val1 == val2) { } else if (val1 == val2) {
n_flag_FPSCR_ = false; n_flag_FPSCR_ = false;
z_flag_FPSCR_ = true; z_flag_FPSCR_ = true;
c_flag_FPSCR_ = true; c_flag_FPSCR_ = true;
@ -2022,42 +2033,62 @@ void Simulator::DecodeTypeVFP(Instr* instr) {
// Decode Type 6 coprocessor instructions. // Decode Type 6 coprocessor instructions.
// Dm = vmov(Rt, Rt2) // Dm = vmov(Rt, Rt2)
// <Rt, Rt2> = vmov(Dm) // <Rt, Rt2> = vmov(Dm)
// Ddst = MEM(Rbase + 4*offset).
// MEM(Rbase + 4*offset) = Dsrc.
void Simulator::DecodeType6CoprocessorIns(Instr* instr) { void Simulator::DecodeType6CoprocessorIns(Instr* instr) {
ASSERT((instr->TypeField() == 6)); ASSERT((instr->TypeField() == 6));
int rt = instr->RtField(); if (instr->CoprocessorField() != 0xB) {
int rn = instr->RnField(); UNIMPLEMENTED(); // Not used by V8.
int vm = instr->VmField(); } else {
switch (instr->OpcodeField()) {
case 0x2:
// Load and store double to two GP registers
if (instr->Bits(7, 4) != 0x1) {
UNIMPLEMENTED(); // Not used by V8.
} else {
int rt = instr->RtField();
int rn = instr->RnField();
int vm = instr->VmField();
if (instr->HasL()) {
int32_t rt_int_value = get_sinteger_from_s_register(2*vm);
int32_t rn_int_value = get_sinteger_from_s_register(2*vm+1);
if (instr->Bit(23) == 1) { set_register(rt, rt_int_value);
UNIMPLEMENTED(); set_register(rn, rn_int_value);
} else if (instr->Bit(22) == 1) { } else {
if ((instr->Bits(27, 24) == 0xC) && int32_t rs_val = get_register(rt);
(instr->Bit(22) == 1) && int32_t rn_val = get_register(rn);
(instr->Bits(11, 8) == 0xB) &&
(instr->Bits(7, 6) == 0x0) && set_s_register_from_sinteger(2*vm, rs_val);
(instr->Bit(4) == 1)) { set_s_register_from_sinteger((2*vm+1), rn_val);
if (instr->Bit(20) == 0) { }
int32_t rs_val = get_register(rt); }
int32_t rn_val = get_register(rn); break;
case 0x8:
set_s_register_from_sinteger(2*vm, rs_val); case 0xC: { // Load and store double to memory.
set_s_register_from_sinteger((2*vm+1), rn_val); int rn = instr->RnField();
int vd = instr->VdField();
} else if (instr->Bit(20) == 1) { int offset = instr->Immed8Field();
int32_t rt_int_value = get_sinteger_from_s_register(2*vm); if (!instr->HasU()) {
int32_t rn_int_value = get_sinteger_from_s_register(2*vm+1); offset = -offset;
}
set_register(rt, rt_int_value); int32_t address = get_register(rn) + 4 * offset;
set_register(rn, rn_int_value); if (instr->HasL()) {
// Load double from memory: vldr.
set_s_register_from_sinteger(2*vd, ReadW(address, instr));
set_s_register_from_sinteger(2*vd + 1, ReadW(address + 4, instr));
} else {
// Store double to memory: vstr.
WriteW(address, get_sinteger_from_s_register(2*vd), instr);
WriteW(address + 4, get_sinteger_from_s_register(2*vd + 1), instr);
}
break;
} }
} else { default:
UNIMPLEMENTED(); UNIMPLEMENTED(); // Not used by V8.
break;
} }
} else if (instr->Bit(21) == 1) {
UNIMPLEMENTED();
} else {
UNIMPLEMENTED();
} }
} }

8
deps/v8/src/arm/simulator-arm.h

@ -63,8 +63,8 @@ class SimulatorStack : public v8::internal::AllStatic {
// Call the generated regexp code directly. The entry function pointer should // Call the generated regexp code directly. The entry function pointer should
// expect eight int/pointer sized arguments and return an int. // expect eight int/pointer sized arguments and return an int.
#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \ #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
entry(p0, p1, p2, p3, p4, p5, p6, p7) entry(p0, p1, p2, p3, p4, p5, p6)
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \ #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
reinterpret_cast<TryCatch*>(try_catch_address) reinterpret_cast<TryCatch*>(try_catch_address)
@ -79,9 +79,9 @@ class SimulatorStack : public v8::internal::AllStatic {
assembler::arm::Simulator::current()->Call(FUNCTION_ADDR(entry), 5, \ assembler::arm::Simulator::current()->Call(FUNCTION_ADDR(entry), 5, \
p0, p1, p2, p3, p4)) p0, p1, p2, p3, p4))
#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \ #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
assembler::arm::Simulator::current()->Call( \ assembler::arm::Simulator::current()->Call( \
FUNCTION_ADDR(entry), 8, p0, p1, p2, p3, p4, p5, p6, p7) FUNCTION_ADDR(entry), 7, p0, p1, p2, p3, p4, p5, p6)
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \ #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
try_catch_address == NULL ? \ try_catch_address == NULL ? \

551
deps/v8/src/arm/stub-cache-arm.cc

@ -362,6 +362,369 @@ void StubCompiler::GenerateLoadMiss(MacroAssembler* masm, Code::Kind kind) {
} }
static void GenerateCallFunction(MacroAssembler* masm,
Object* object,
const ParameterCount& arguments,
Label* miss) {
// ----------- S t a t e -------------
// -- r0: receiver
// -- r1: function to call
// -----------------------------------
// Check that the function really is a function.
__ BranchOnSmi(r1, miss);
__ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
__ b(ne, miss);
// Patch the receiver on the stack with the global proxy if
// necessary.
if (object->IsGlobalObject()) {
__ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
__ str(r3, MemOperand(sp, arguments.immediate() * kPointerSize));
}
// Invoke the function.
__ InvokeFunction(r1, arguments, JUMP_FUNCTION);
}
static void GenerateCallConstFunction(MacroAssembler* masm,
JSFunction* function,
const ParameterCount& arguments) {
ASSERT(function->is_compiled());
// Get the function and setup the context.
__ mov(r1, Operand(Handle<JSFunction>(function)));
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
Handle<Code> code(function->code());
ParameterCount expected(function->shared()->formal_parameter_count());
__ InvokeCode(code, expected, arguments,
RelocInfo::CODE_TARGET, JUMP_FUNCTION);
}
template <class Compiler>
static void CompileLoadInterceptor(Compiler* compiler,
StubCompiler* stub_compiler,
MacroAssembler* masm,
JSObject* object,
JSObject* holder,
String* name,
LookupResult* lookup,
Register receiver,
Register scratch1,
Register scratch2,
Label* miss) {
ASSERT(holder->HasNamedInterceptor());
ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
// Check that the receiver isn't a smi.
__ BranchOnSmi(receiver, miss);
// Check that the maps haven't changed.
Register reg =
stub_compiler->CheckPrototypes(object, receiver, holder,
scratch1, scratch2, name, miss);
if (lookup->IsValid() && lookup->IsCacheable()) {
compiler->CompileCacheable(masm,
stub_compiler,
receiver,
reg,
scratch1,
scratch2,
holder,
lookup,
name,
miss);
} else {
compiler->CompileRegular(masm,
receiver,
reg,
scratch2,
holder,
miss);
}
}
static void PushInterceptorArguments(MacroAssembler* masm,
Register receiver,
Register holder,
Register name,
JSObject* holder_obj) {
__ push(receiver);
__ push(holder);
__ push(name);
InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
ASSERT(!Heap::InNewSpace(interceptor));
Register scratch = receiver;
__ mov(scratch, Operand(Handle<Object>(interceptor)));
__ push(scratch);
__ ldr(scratch, FieldMemOperand(scratch, InterceptorInfo::kDataOffset));
__ push(scratch);
}
static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
Register receiver,
Register holder,
Register name,
JSObject* holder_obj) {
PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
ExternalReference ref =
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorOnly));
__ mov(r0, Operand(5));
__ mov(r1, Operand(ref));
CEntryStub stub(1);
__ CallStub(&stub);
}
class LoadInterceptorCompiler BASE_EMBEDDED {
public:
explicit LoadInterceptorCompiler(Register name) : name_(name) {}
void CompileCacheable(MacroAssembler* masm,
StubCompiler* stub_compiler,
Register receiver,
Register holder,
Register scratch1,
Register scratch2,
JSObject* holder_obj,
LookupResult* lookup,
String* name,
Label* miss_label) {
AccessorInfo* callback = 0;
bool optimize = false;
// So far the most popular follow ups for interceptor loads are FIELD
// and CALLBACKS, so inline only them, other cases may be added
// later.
if (lookup->type() == FIELD) {
optimize = true;
} else if (lookup->type() == CALLBACKS) {
Object* callback_object = lookup->GetCallbackObject();
if (callback_object->IsAccessorInfo()) {
callback = AccessorInfo::cast(callback_object);
optimize = callback->getter() != NULL;
}
}
if (!optimize) {
CompileRegular(masm, receiver, holder, scratch2, holder_obj, miss_label);
return;
}
// Note: starting a frame here makes GC aware of pointers pushed below.
__ EnterInternalFrame();
if (lookup->type() == CALLBACKS) {
__ push(receiver);
}
__ push(holder);
__ push(name_);
CompileCallLoadPropertyWithInterceptor(masm,
receiver,
holder,
name_,
holder_obj);
Label interceptor_failed;
// Compare with no_interceptor_result_sentinel.
__ LoadRoot(scratch1, Heap::kNoInterceptorResultSentinelRootIndex);
__ cmp(r0, scratch1);
__ b(eq, &interceptor_failed);
__ LeaveInternalFrame();
__ Ret();
__ bind(&interceptor_failed);
__ pop(name_);
__ pop(holder);
if (lookup->type() == CALLBACKS) {
__ pop(receiver);
}
__ LeaveInternalFrame();
if (lookup->type() == FIELD) {
holder = stub_compiler->CheckPrototypes(holder_obj,
holder,
lookup->holder(),
scratch1,
scratch2,
name,
miss_label);
stub_compiler->GenerateFastPropertyLoad(masm,
r0,
holder,
lookup->holder(),
lookup->GetFieldIndex());
__ Ret();
} else {
ASSERT(lookup->type() == CALLBACKS);
ASSERT(lookup->GetCallbackObject()->IsAccessorInfo());
ASSERT(callback != NULL);
ASSERT(callback->getter() != NULL);
Label cleanup;
__ pop(scratch2);
__ push(receiver);
__ push(scratch2);
holder = stub_compiler->CheckPrototypes(holder_obj, holder,
lookup->holder(), scratch1,
scratch2,
name,
&cleanup);
__ push(holder);
__ Move(holder, Handle<AccessorInfo>(callback));
__ push(holder);
__ ldr(scratch1, FieldMemOperand(holder, AccessorInfo::kDataOffset));
__ push(scratch1);
__ push(name_);
ExternalReference ref =
ExternalReference(IC_Utility(IC::kLoadCallbackProperty));
__ TailCallRuntime(ref, 5, 1);
__ bind(&cleanup);
__ pop(scratch1);
__ pop(scratch2);
__ push(scratch1);
}
}
void CompileRegular(MacroAssembler* masm,
Register receiver,
Register holder,
Register scratch,
JSObject* holder_obj,
Label* miss_label) {
PushInterceptorArguments(masm, receiver, holder, name_, holder_obj);
ExternalReference ref = ExternalReference(
IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
__ TailCallRuntime(ref, 5, 1);
}
private:
Register name_;
};
class CallInterceptorCompiler BASE_EMBEDDED {
public:
CallInterceptorCompiler(const ParameterCount& arguments, Register name)
: arguments_(arguments), argc_(arguments.immediate()), name_(name) {}
void CompileCacheable(MacroAssembler* masm,
StubCompiler* stub_compiler,
Register receiver,
Register holder,
Register scratch1,
Register scratch2,
JSObject* holder_obj,
LookupResult* lookup,
String* name,
Label* miss_label) {
JSFunction* function = 0;
bool optimize = false;
// So far the most popular case for failed interceptor is
// CONSTANT_FUNCTION sitting below.
if (lookup->type() == CONSTANT_FUNCTION) {
function = lookup->GetConstantFunction();
// JSArray holder is a special case for call constant function
// (see the corresponding code).
if (function->is_compiled() && !holder_obj->IsJSArray()) {
optimize = true;
}
}
if (!optimize) {
CompileRegular(masm, receiver, holder, scratch2, holder_obj, miss_label);
return;
}
// Constant functions cannot sit on global object.
ASSERT(!lookup->holder()->IsGlobalObject());
__ EnterInternalFrame();
__ push(holder); // Save the holder.
__ push(name_); // Save the name.
CompileCallLoadPropertyWithInterceptor(masm,
receiver,
holder,
name_,
holder_obj);
ASSERT(!r0.is(name_));
ASSERT(!r0.is(scratch1));
__ pop(name_); // Restore the name.
__ pop(scratch1); // Restore the holder.
__ LeaveInternalFrame();
// Compare with no_interceptor_result_sentinel.
__ LoadRoot(scratch2, Heap::kNoInterceptorResultSentinelRootIndex);
__ cmp(r0, scratch2);
Label invoke;
__ b(ne, &invoke);
stub_compiler->CheckPrototypes(holder_obj, scratch1,
lookup->holder(), scratch1,
scratch2,
name,
miss_label);
GenerateCallConstFunction(masm, function, arguments_);
__ bind(&invoke);
}
void CompileRegular(MacroAssembler* masm,
Register receiver,
Register holder,
Register scratch,
JSObject* holder_obj,
Label* miss_label) {
__ EnterInternalFrame();
// Save the name_ register across the call.
__ push(name_);
PushInterceptorArguments(masm,
receiver,
holder,
name_,
holder_obj);
ExternalReference ref = ExternalReference(
IC_Utility(IC::kLoadPropertyWithInterceptorForCall));
__ mov(r0, Operand(5));
__ mov(r1, Operand(ref));
CEntryStub stub(1);
__ CallStub(&stub);
// Restore the name_ register.
__ pop(name_);
__ LeaveInternalFrame();
}
private:
const ParameterCount& arguments_;
int argc_;
Register name_;
};
#undef __ #undef __
#define __ ACCESS_MASM(masm()) #define __ ACCESS_MASM(masm())
@ -491,30 +854,18 @@ void StubCompiler::GenerateLoadInterceptor(JSObject* object,
Register scratch2, Register scratch2,
String* name, String* name,
Label* miss) { Label* miss) {
// Check that the receiver isn't a smi. LoadInterceptorCompiler compiler(name_reg);
__ tst(receiver, Operand(kSmiTagMask)); CompileLoadInterceptor(&compiler,
__ b(eq, miss); this,
masm(),
// Check that the maps haven't changed. object,
Register reg = holder,
CheckPrototypes(object, receiver, holder, scratch1, scratch2, name, miss); name,
lookup,
// Push the arguments on the JS stack of the caller. receiver,
__ push(receiver); // receiver scratch1,
__ push(reg); // holder scratch2,
__ push(name_reg); // name miss);
InterceptorInfo* interceptor = holder->GetNamedInterceptor();
ASSERT(!Heap::InNewSpace(interceptor));
__ mov(scratch1, Operand(Handle<Object>(interceptor)));
__ push(scratch1);
__ ldr(scratch2, FieldMemOperand(scratch1, InterceptorInfo::kDataOffset));
__ push(scratch2);
// Do tail-call to the runtime system.
ExternalReference load_ic_property =
ExternalReference(IC_Utility(IC::kLoadPropertyWithInterceptorForLoad));
__ TailCallRuntime(load_ic_property, 5, 1);
} }
@ -572,22 +923,7 @@ Object* CallStubCompiler::CompileCallField(Object* object,
CheckPrototypes(JSObject::cast(object), r0, holder, r3, r2, name, &miss); CheckPrototypes(JSObject::cast(object), r0, holder, r3, r2, name, &miss);
GenerateFastPropertyLoad(masm(), r1, reg, holder, index); GenerateFastPropertyLoad(masm(), r1, reg, holder, index);
// Check that the function really is a function. GenerateCallFunction(masm(), object, arguments(), &miss);
__ tst(r1, Operand(kSmiTagMask));
__ b(eq, &miss);
// Get the map.
__ CompareObjectType(r1, r2, r2, JS_FUNCTION_TYPE);
__ b(ne, &miss);
// Patch the receiver on the stack with the global proxy if
// necessary.
if (object->IsGlobalObject()) {
__ ldr(r3, FieldMemOperand(r0, GlobalObject::kGlobalReceiverOffset));
__ str(r3, MemOperand(sp, argc * kPointerSize));
}
// Invoke the function.
__ InvokeFunction(r1, arguments(), JUMP_FUNCTION);
// Handle call cache miss. // Handle call cache miss.
__ bind(&miss); __ bind(&miss);
@ -637,50 +973,65 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
break; break;
case STRING_CHECK: case STRING_CHECK:
// Check that the object is a two-byte string or a symbol. if (!function->IsBuiltin()) {
__ CompareObjectType(r1, r2, r2, FIRST_NONSTRING_TYPE); // Calling non-builtins with a value as receiver requires boxing.
__ b(hs, &miss); __ jmp(&miss);
// Check that the maps starting from the prototype haven't changed. } else {
GenerateLoadGlobalFunctionPrototype(masm(), // Check that the object is a two-byte string or a symbol.
Context::STRING_FUNCTION_INDEX, __ CompareObjectType(r1, r2, r2, FIRST_NONSTRING_TYPE);
r2); __ b(hs, &miss);
CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3, // Check that the maps starting from the prototype haven't changed.
r1, name, &miss); GenerateLoadGlobalFunctionPrototype(masm(),
Context::STRING_FUNCTION_INDEX,
r2);
CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3,
r1, name, &miss);
}
break; break;
case NUMBER_CHECK: { case NUMBER_CHECK: {
Label fast; if (!function->IsBuiltin()) {
// Check that the object is a smi or a heap number. // Calling non-builtins with a value as receiver requires boxing.
__ tst(r1, Operand(kSmiTagMask)); __ jmp(&miss);
__ b(eq, &fast); } else {
__ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE); Label fast;
__ b(ne, &miss); // Check that the object is a smi or a heap number.
__ bind(&fast); __ tst(r1, Operand(kSmiTagMask));
// Check that the maps starting from the prototype haven't changed. __ b(eq, &fast);
GenerateLoadGlobalFunctionPrototype(masm(), __ CompareObjectType(r1, r2, r2, HEAP_NUMBER_TYPE);
Context::NUMBER_FUNCTION_INDEX, __ b(ne, &miss);
r2); __ bind(&fast);
CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3, // Check that the maps starting from the prototype haven't changed.
r1, name, &miss); GenerateLoadGlobalFunctionPrototype(masm(),
Context::NUMBER_FUNCTION_INDEX,
r2);
CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3,
r1, name, &miss);
}
break; break;
} }
case BOOLEAN_CHECK: { case BOOLEAN_CHECK: {
Label fast; if (!function->IsBuiltin()) {
// Check that the object is a boolean. // Calling non-builtins with a value as receiver requires boxing.
__ LoadRoot(ip, Heap::kTrueValueRootIndex); __ jmp(&miss);
__ cmp(r1, ip); } else {
__ b(eq, &fast); Label fast;
__ LoadRoot(ip, Heap::kFalseValueRootIndex); // Check that the object is a boolean.
__ cmp(r1, ip); __ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ b(ne, &miss); __ cmp(r1, ip);
__ bind(&fast); __ b(eq, &fast);
// Check that the maps starting from the prototype haven't changed. __ LoadRoot(ip, Heap::kFalseValueRootIndex);
GenerateLoadGlobalFunctionPrototype(masm(), __ cmp(r1, ip);
Context::BOOLEAN_FUNCTION_INDEX, __ b(ne, &miss);
r2); __ bind(&fast);
CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3, // Check that the maps starting from the prototype haven't changed.
r1, name, &miss); GenerateLoadGlobalFunctionPrototype(masm(),
Context::BOOLEAN_FUNCTION_INDEX,
r2);
CheckPrototypes(JSObject::cast(object->GetPrototype()), r2, holder, r3,
r1, name, &miss);
}
break; break;
} }
@ -700,16 +1051,7 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
UNREACHABLE(); UNREACHABLE();
} }
// Get the function and setup the context. GenerateCallConstFunction(masm(), function, arguments());
__ mov(r1, Operand(Handle<JSFunction>(function)));
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
ASSERT(function->is_compiled());
Handle<Code> code(function->code());
ParameterCount expected(function->shared()->formal_parameter_count());
__ InvokeCode(code, expected, arguments(),
RelocInfo::CODE_TARGET, JUMP_FUNCTION);
// Handle call cache miss. // Handle call cache miss.
__ bind(&miss); __ bind(&miss);
@ -733,7 +1075,34 @@ Object* CallStubCompiler::CompileCallInterceptor(Object* object,
// ----------------------------------- // -----------------------------------
Label miss; Label miss;
// TODO(1224669): Implement. // Get the number of arguments.
const int argc = arguments().immediate();
LookupResult lookup;
LookupPostInterceptor(holder, name, &lookup);
// Get the receiver from the stack into r0.
__ ldr(r0, MemOperand(sp, argc * kPointerSize));
// Load the name from the stack into r1.
__ ldr(r1, MemOperand(sp, (argc + 1) * kPointerSize));
CallInterceptorCompiler compiler(arguments(), r1);
CompileLoadInterceptor(&compiler,
this,
masm(),
JSObject::cast(object),
holder,
name,
&lookup,
r0,
r2,
r3,
&miss);
// Restore receiver.
__ ldr(r0, MemOperand(sp, argc * kPointerSize));
GenerateCallFunction(masm(), object, arguments(), &miss);
// Handle call cache miss. // Handle call cache miss.
__ bind(&miss); __ bind(&miss);
@ -906,7 +1275,6 @@ Object* StoreStubCompiler::CompileStoreCallback(JSObject* object,
// Handle store cache miss. // Handle store cache miss.
__ bind(&miss); __ bind(&miss);
__ mov(r2, Operand(Handle<String>(name))); // restore name
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
__ Jump(ic, RelocInfo::CODE_TARGET); __ Jump(ic, RelocInfo::CODE_TARGET);
@ -958,7 +1326,6 @@ Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
// Handle store cache miss. // Handle store cache miss.
__ bind(&miss); __ bind(&miss);
__ mov(r2, Operand(Handle<String>(name))); // restore name
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
__ Jump(ic, RelocInfo::CODE_TARGET); __ Jump(ic, RelocInfo::CODE_TARGET);
@ -1084,7 +1451,7 @@ Object* LoadStubCompiler::CompileLoadInterceptor(JSObject* object,
__ ldr(r0, MemOperand(sp, 0)); __ ldr(r0, MemOperand(sp, 0));
LookupResult lookup; LookupResult lookup;
holder->LocalLookupRealNamedProperty(name, &lookup); LookupPostInterceptor(holder, name, &lookup);
GenerateLoadInterceptor(object, GenerateLoadInterceptor(object,
holder, holder,
&lookup, &lookup,
@ -1250,7 +1617,7 @@ Object* KeyedLoadStubCompiler::CompileLoadInterceptor(JSObject* receiver,
__ b(ne, &miss); __ b(ne, &miss);
LookupResult lookup; LookupResult lookup;
holder->LocalLookupRealNamedProperty(name, &lookup); LookupPostInterceptor(holder, name, &lookup);
GenerateLoadInterceptor(receiver, GenerateLoadInterceptor(receiver,
holder, holder,
&lookup, &lookup,

101
deps/v8/src/arm/virtual-frame-arm.cc

@ -219,36 +219,15 @@ void VirtualFrame::PushTryHandler(HandlerType type) {
} }
void VirtualFrame::RawCallStub(CodeStub* stub) {
ASSERT(cgen()->HasValidEntryRegisters());
__ CallStub(stub);
}
void VirtualFrame::CallStub(CodeStub* stub, Result* arg) {
PrepareForCall(0, 0);
arg->Unuse();
RawCallStub(stub);
}
void VirtualFrame::CallStub(CodeStub* stub, Result* arg0, Result* arg1) {
PrepareForCall(0, 0);
arg0->Unuse();
arg1->Unuse();
RawCallStub(stub);
}
void VirtualFrame::CallRuntime(Runtime::Function* f, int arg_count) { void VirtualFrame::CallRuntime(Runtime::Function* f, int arg_count) {
PrepareForCall(arg_count, arg_count); Forget(arg_count);
ASSERT(cgen()->HasValidEntryRegisters()); ASSERT(cgen()->HasValidEntryRegisters());
__ CallRuntime(f, arg_count); __ CallRuntime(f, arg_count);
} }
void VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) { void VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) {
PrepareForCall(arg_count, arg_count); Forget(arg_count);
ASSERT(cgen()->HasValidEntryRegisters()); ASSERT(cgen()->HasValidEntryRegisters());
__ CallRuntime(id, arg_count); __ CallRuntime(id, arg_count);
} }
@ -257,102 +236,34 @@ void VirtualFrame::CallRuntime(Runtime::FunctionId id, int arg_count) {
void VirtualFrame::InvokeBuiltin(Builtins::JavaScript id, void VirtualFrame::InvokeBuiltin(Builtins::JavaScript id,
InvokeJSFlags flags, InvokeJSFlags flags,
int arg_count) { int arg_count) {
PrepareForCall(arg_count, arg_count); Forget(arg_count);
__ InvokeBuiltin(id, flags); __ InvokeBuiltin(id, flags);
} }
void VirtualFrame::RawCallCodeObject(Handle<Code> code,
RelocInfo::Mode rmode) {
ASSERT(cgen()->HasValidEntryRegisters());
__ Call(code, rmode);
}
void VirtualFrame::CallCodeObject(Handle<Code> code, void VirtualFrame::CallCodeObject(Handle<Code> code,
RelocInfo::Mode rmode, RelocInfo::Mode rmode,
int dropped_args) { int dropped_args) {
int spilled_args = 0;
switch (code->kind()) { switch (code->kind()) {
case Code::CALL_IC: case Code::CALL_IC:
spilled_args = dropped_args + 1;
break;
case Code::FUNCTION: case Code::FUNCTION:
spilled_args = dropped_args + 1;
break; break;
case Code::KEYED_LOAD_IC: case Code::KEYED_LOAD_IC:
ASSERT(dropped_args == 0);
spilled_args = 2;
break;
default:
// The other types of code objects are called with values
// in specific registers, and are handled in functions with
// a different signature.
UNREACHABLE();
break;
}
PrepareForCall(spilled_args, dropped_args);
RawCallCodeObject(code, rmode);
}
void VirtualFrame::CallCodeObject(Handle<Code> code,
RelocInfo::Mode rmode,
Result* arg,
int dropped_args) {
int spilled_args = 0;
switch (code->kind()) {
case Code::LOAD_IC: case Code::LOAD_IC:
ASSERT(arg->reg().is(r2));
ASSERT(dropped_args == 0);
spilled_args = 1;
break;
case Code::KEYED_STORE_IC: case Code::KEYED_STORE_IC:
ASSERT(arg->reg().is(r0));
ASSERT(dropped_args == 0);
spilled_args = 2;
break;
default:
// No other types of code objects are called with values
// in exactly one register.
UNREACHABLE();
break;
}
PrepareForCall(spilled_args, dropped_args);
arg->Unuse();
RawCallCodeObject(code, rmode);
}
void VirtualFrame::CallCodeObject(Handle<Code> code,
RelocInfo::Mode rmode,
Result* arg0,
Result* arg1,
int dropped_args) {
int spilled_args = 1;
switch (code->kind()) {
case Code::STORE_IC: case Code::STORE_IC:
ASSERT(arg0->reg().is(r0));
ASSERT(arg1->reg().is(r2));
ASSERT(dropped_args == 0); ASSERT(dropped_args == 0);
spilled_args = 1;
break; break;
case Code::BUILTIN: case Code::BUILTIN:
ASSERT(*code == Builtins::builtin(Builtins::JSConstructCall)); ASSERT(*code == Builtins::builtin(Builtins::JSConstructCall));
ASSERT(arg0->reg().is(r0));
ASSERT(arg1->reg().is(r1));
spilled_args = dropped_args + 1;
break; break;
default: default:
// No other types of code objects are called with values
// in exactly two registers.
UNREACHABLE(); UNREACHABLE();
break; break;
} }
PrepareForCall(spilled_args, dropped_args); Forget(dropped_args);
arg0->Unuse(); ASSERT(cgen()->HasValidEntryRegisters());
arg1->Unuse(); __ Call(code, rmode);
RawCallCodeObject(code, rmode);
} }

34
deps/v8/src/arm/virtual-frame-arm.h

@ -287,18 +287,11 @@ class VirtualFrame : public ZoneObject {
// Call stub given the number of arguments it expects on (and // Call stub given the number of arguments it expects on (and
// removes from) the stack. // removes from) the stack.
void CallStub(CodeStub* stub, int arg_count) { void CallStub(CodeStub* stub, int arg_count) {
PrepareForCall(arg_count, arg_count); Forget(arg_count);
RawCallStub(stub); ASSERT(cgen()->HasValidEntryRegisters());
masm()->CallStub(stub);
} }
// Call stub that expects its argument in r0. The argument is given
// as a result which must be the register r0.
void CallStub(CodeStub* stub, Result* arg);
// Call stub that expects its arguments in r1 and r0. The arguments
// are given as results which must be the appropriate registers.
void CallStub(CodeStub* stub, Result* arg0, Result* arg1);
// Call runtime given the number of arguments expected on (and // Call runtime given the number of arguments expected on (and
// removed from) the stack. // removed from) the stack.
void CallRuntime(Runtime::Function* f, int arg_count); void CallRuntime(Runtime::Function* f, int arg_count);
@ -311,19 +304,10 @@ class VirtualFrame : public ZoneObject {
int arg_count); int arg_count);
// Call into an IC stub given the number of arguments it removes // Call into an IC stub given the number of arguments it removes
// from the stack. Register arguments are passed as results and // from the stack. Register arguments to the IC stub are implicit,
// consumed by the call. // and depend on the type of IC stub.
void CallCodeObject(Handle<Code> ic,
RelocInfo::Mode rmode,
int dropped_args);
void CallCodeObject(Handle<Code> ic, void CallCodeObject(Handle<Code> ic,
RelocInfo::Mode rmode, RelocInfo::Mode rmode,
Result* arg,
int dropped_args);
void CallCodeObject(Handle<Code> ic,
RelocInfo::Mode rmode,
Result* arg0,
Result* arg1,
int dropped_args); int dropped_args);
// Drop a number of elements from the top of the expression stack. May // Drop a number of elements from the top of the expression stack. May
@ -511,14 +495,6 @@ class VirtualFrame : public ZoneObject {
// Register counts are correctly updated. // Register counts are correctly updated.
int InvalidateFrameSlotAt(int index); int InvalidateFrameSlotAt(int index);
// Call a code stub that has already been prepared for calling (via
// PrepareForCall).
void RawCallStub(CodeStub* stub);
// Calls a code object which has already been prepared for calling
// (via PrepareForCall).
void RawCallCodeObject(Handle<Code> code, RelocInfo::Mode rmode);
bool Equals(VirtualFrame* other); bool Equals(VirtualFrame* other);
// Classes that need raw access to the elements_ array. // Classes that need raw access to the elements_ array.

20
deps/v8/src/assembler.cc

@ -44,6 +44,7 @@
#include "regexp-stack.h" #include "regexp-stack.h"
#include "ast.h" #include "ast.h"
#include "regexp-macro-assembler.h" #include "regexp-macro-assembler.h"
#include "platform.h"
// Include native regexp-macro-assembler. // Include native regexp-macro-assembler.
#ifdef V8_NATIVE_REGEXP #ifdef V8_NATIVE_REGEXP
#if V8_TARGET_ARCH_IA32 #if V8_TARGET_ARCH_IA32
@ -563,11 +564,6 @@ ExternalReference ExternalReference::perform_gc_function() {
} }
ExternalReference ExternalReference::builtin_passed_function() {
return ExternalReference(&Builtins::builtin_passed_function);
}
ExternalReference ExternalReference::random_positive_smi_function() { ExternalReference ExternalReference::random_positive_smi_function() {
return ExternalReference(Redirect(FUNCTION_ADDR(V8::RandomPositiveSmi))); return ExternalReference(Redirect(FUNCTION_ADDR(V8::RandomPositiveSmi)));
} }
@ -659,7 +655,7 @@ ExternalReference ExternalReference::re_check_stack_guard_state() {
#elif V8_TARGET_ARCH_ARM #elif V8_TARGET_ARCH_ARM
function = FUNCTION_ADDR(RegExpMacroAssemblerARM::CheckStackGuardState); function = FUNCTION_ADDR(RegExpMacroAssemblerARM::CheckStackGuardState);
#else #else
UNREACHABLE("Unexpected architecture"); UNREACHABLE();
#endif #endif
return ExternalReference(Redirect(function)); return ExternalReference(Redirect(function));
} }
@ -674,6 +670,10 @@ ExternalReference ExternalReference::re_case_insensitive_compare_uc16() {
FUNCTION_ADDR(NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16))); FUNCTION_ADDR(NativeRegExpMacroAssembler::CaseInsensitiveCompareUC16)));
} }
ExternalReference ExternalReference::re_word_character_map() {
return ExternalReference(
NativeRegExpMacroAssembler::word_character_map_address());
}
ExternalReference ExternalReference::address_of_static_offsets_vector() { ExternalReference ExternalReference::address_of_static_offsets_vector() {
return ExternalReference(OffsetsVector::static_offsets_vector_address()); return ExternalReference(OffsetsVector::static_offsets_vector_address());
@ -711,13 +711,13 @@ static double div_two_doubles(double x, double y) {
static double mod_two_doubles(double x, double y) { static double mod_two_doubles(double x, double y) {
return fmod(x, y); return modulo(x, y);
} }
static int native_compare_doubles(double x, double y) { static int native_compare_doubles(double y, double x) {
if (x == y) return 0; if (x == y) return EQUAL;
return x < y ? 1 : -1; return x < y ? LESS : GREATER;
} }

5
deps/v8/src/assembler.h

@ -398,7 +398,6 @@ class ExternalReference BASE_EMBEDDED {
// ExternalReferenceTable in serialize.cc manually. // ExternalReferenceTable in serialize.cc manually.
static ExternalReference perform_gc_function(); static ExternalReference perform_gc_function();
static ExternalReference builtin_passed_function();
static ExternalReference random_positive_smi_function(); static ExternalReference random_positive_smi_function();
// Static data in the keyed lookup cache. // Static data in the keyed lookup cache.
@ -463,6 +462,10 @@ class ExternalReference BASE_EMBEDDED {
// Function NativeRegExpMacroAssembler::GrowStack() // Function NativeRegExpMacroAssembler::GrowStack()
static ExternalReference re_grow_stack(); static ExternalReference re_grow_stack();
// byte NativeRegExpMacroAssembler::word_character_bitmap
static ExternalReference re_word_character_map();
#endif #endif
// This lets you register a function that rewrites all external references. // This lets you register a function that rewrites all external references.

21
deps/v8/src/ast.cc

@ -146,27 +146,6 @@ bool ObjectLiteral::Property::IsCompileTimeValue() {
} }
bool ObjectLiteral::IsValidJSON() {
int length = properties()->length();
for (int i = 0; i < length; i++) {
Property* prop = properties()->at(i);
if (!prop->value()->IsValidJSON())
return false;
}
return true;
}
bool ArrayLiteral::IsValidJSON() {
int length = values()->length();
for (int i = 0; i < length; i++) {
if (!values()->at(i)->IsValidJSON())
return false;
}
return true;
}
void TargetCollector::AddTarget(BreakTarget* target) { void TargetCollector::AddTarget(BreakTarget* target) {
// Add the label to the collector, but discard duplicates. // Add the label to the collector, but discard duplicates.
int length = targets_->length(); int length = targets_->length();

35
deps/v8/src/ast.h

@ -180,11 +180,12 @@ class Expression: public AstNode {
kTestValue kTestValue
}; };
Expression() : context_(kUninitialized) {} static const int kNoLabel = -1;
Expression() : num_(kNoLabel) {}
virtual Expression* AsExpression() { return this; } virtual Expression* AsExpression() { return this; }
virtual bool IsValidJSON() { return false; }
virtual bool IsValidLeftHandSide() { return false; } virtual bool IsValidLeftHandSide() { return false; }
// Symbols that cannot be parsed as array indices are considered property // Symbols that cannot be parsed as array indices are considered property
@ -200,12 +201,14 @@ class Expression: public AstNode {
// Static type information for this expression. // Static type information for this expression.
StaticType* type() { return &type_; } StaticType* type() { return &type_; }
Context context() { return context_; } int num() { return num_; }
void set_context(Context context) { context_ = context; }
// AST node numbering ordered by evaluation order.
void set_num(int n) { num_ = n; }
private: private:
StaticType type_; StaticType type_;
Context context_; int num_;
}; };
@ -709,8 +712,6 @@ class Literal: public Expression {
return handle_.is_identical_to(other->handle_); return handle_.is_identical_to(other->handle_);
} }
virtual bool IsValidJSON() { return true; }
virtual bool IsPropertyName() { virtual bool IsPropertyName() {
if (handle_->IsSymbol()) { if (handle_->IsSymbol()) {
uint32_t ignored; uint32_t ignored;
@ -747,8 +748,6 @@ class MaterializedLiteral: public Expression {
// constants and simple object and array literals. // constants and simple object and array literals.
bool is_simple() const { return is_simple_; } bool is_simple() const { return is_simple_; }
virtual bool IsValidJSON() { return true; }
int depth() const { return depth_; } int depth() const { return depth_; }
private: private:
@ -802,7 +801,6 @@ class ObjectLiteral: public MaterializedLiteral {
virtual ObjectLiteral* AsObjectLiteral() { return this; } virtual ObjectLiteral* AsObjectLiteral() { return this; }
virtual void Accept(AstVisitor* v); virtual void Accept(AstVisitor* v);
virtual bool IsValidJSON();
Handle<FixedArray> constant_properties() const { Handle<FixedArray> constant_properties() const {
return constant_properties_; return constant_properties_;
@ -850,7 +848,6 @@ class ArrayLiteral: public MaterializedLiteral {
virtual void Accept(AstVisitor* v); virtual void Accept(AstVisitor* v);
virtual ArrayLiteral* AsArrayLiteral() { return this; } virtual ArrayLiteral* AsArrayLiteral() { return this; }
virtual bool IsValidJSON();
Handle<FixedArray> constant_elements() const { return constant_elements_; } Handle<FixedArray> constant_elements() const { return constant_elements_; }
ZoneList<Expression*>* values() const { return values_; } ZoneList<Expression*>* values() const { return values_; }
@ -1184,6 +1181,9 @@ class CountOperation: public Expression {
bool is_prefix() const { return is_prefix_; } bool is_prefix() const { return is_prefix_; }
bool is_postfix() const { return !is_prefix_; } bool is_postfix() const { return !is_prefix_; }
Token::Value op() const { return op_; } Token::Value op() const { return op_; }
Token::Value binary_op() {
return op_ == Token::INC ? Token::ADD : Token::SUB;
}
Expression* expression() const { return expression_; } Expression* expression() const { return expression_; }
virtual void MarkAsStatement() { is_prefix_ = true; } virtual void MarkAsStatement() { is_prefix_ = true; }
@ -1324,10 +1324,9 @@ class FunctionLiteral: public Expression {
start_position_(start_position), start_position_(start_position),
end_position_(end_position), end_position_(end_position),
is_expression_(is_expression), is_expression_(is_expression),
loop_nesting_(0),
function_token_position_(RelocInfo::kNoPosition), function_token_position_(RelocInfo::kNoPosition),
inferred_name_(Heap::empty_string()), inferred_name_(Heap::empty_string()),
try_fast_codegen_(false) { try_full_codegen_(false) {
#ifdef DEBUG #ifdef DEBUG
already_compiled_ = false; already_compiled_ = false;
#endif #endif
@ -1359,16 +1358,13 @@ class FunctionLiteral: public Expression {
bool AllowsLazyCompilation(); bool AllowsLazyCompilation();
bool loop_nesting() const { return loop_nesting_; }
void set_loop_nesting(int nesting) { loop_nesting_ = nesting; }
Handle<String> inferred_name() const { return inferred_name_; } Handle<String> inferred_name() const { return inferred_name_; }
void set_inferred_name(Handle<String> inferred_name) { void set_inferred_name(Handle<String> inferred_name) {
inferred_name_ = inferred_name; inferred_name_ = inferred_name;
} }
bool try_fast_codegen() { return try_fast_codegen_; } bool try_full_codegen() { return try_full_codegen_; }
void set_try_fast_codegen(bool flag) { try_fast_codegen_ = flag; } void set_try_full_codegen(bool flag) { try_full_codegen_ = flag; }
#ifdef DEBUG #ifdef DEBUG
void mark_as_compiled() { void mark_as_compiled() {
@ -1389,10 +1385,9 @@ class FunctionLiteral: public Expression {
int start_position_; int start_position_;
int end_position_; int end_position_;
bool is_expression_; bool is_expression_;
int loop_nesting_;
int function_token_position_; int function_token_position_;
Handle<String> inferred_name_; Handle<String> inferred_name_;
bool try_fast_codegen_; bool try_full_codegen_;
#ifdef DEBUG #ifdef DEBUG
bool already_compiled_; bool already_compiled_;
#endif #endif

20
deps/v8/src/bootstrapper.cc

@ -249,26 +249,24 @@ bool PendingFixups::Process(Handle<JSBuiltinsObject> builtins) {
V8_Fatal(__FILE__, __LINE__, "Cannot resolve call to builtin %s", name); V8_Fatal(__FILE__, __LINE__, "Cannot resolve call to builtin %s", name);
} }
#endif #endif
Handle<JSFunction> f = Handle<JSFunction>(JSFunction::cast(o)); Handle<SharedFunctionInfo> shared(JSFunction::cast(o)->shared());
// Make sure the number of parameters match the formal parameter count. // Make sure the number of parameters match the formal parameter count.
int argc = Bootstrapper::FixupFlagsArgumentsCount::decode(flags); int argc = Bootstrapper::FixupFlagsArgumentsCount::decode(flags);
USE(argc); USE(argc);
ASSERT(f->shared()->formal_parameter_count() == argc); ASSERT(shared->formal_parameter_count() == argc);
if (!f->is_compiled()) { // Do lazy compilation if necessary and check for stack overflows.
// Do lazy compilation and check for stack overflows. if (!EnsureCompiled(shared, CLEAR_EXCEPTION)) {
if (!CompileLazy(f, CLEAR_EXCEPTION)) { Clear();
Clear(); return false;
return false;
}
} }
Code* code = Code::cast(code_[i]); Code* code = Code::cast(code_[i]);
Address pc = code->instruction_start() + pc_[i]; Address pc = code->instruction_start() + pc_[i];
RelocInfo target(pc, RelocInfo::CODE_TARGET, 0); RelocInfo target(pc, RelocInfo::CODE_TARGET, 0);
bool use_code_object = Bootstrapper::FixupFlagsUseCodeObject::decode(flags); bool use_code_object = Bootstrapper::FixupFlagsUseCodeObject::decode(flags);
if (use_code_object) { if (use_code_object) {
target.set_target_object(f->code()); target.set_target_object(shared->code());
} else { } else {
target.set_target_address(f->code()->instruction_start()); target.set_target_address(shared->code()->instruction_start());
} }
LOG(StringEvent("resolved", name)); LOG(StringEvent("resolved", name));
} }
@ -960,7 +958,7 @@ bool Genesis::CompileScriptCached(Vector<const char> name,
Handle<JSFunction> fun = Handle<JSFunction> fun =
Factory::NewFunctionFromBoilerplate(boilerplate, context); Factory::NewFunctionFromBoilerplate(boilerplate, context);
// Call function using the either the runtime object or the global // Call function using either the runtime object or the global
// object as the receiver. Provide no parameters. // object as the receiver. Provide no parameters.
Handle<Object> receiver = Handle<Object> receiver =
Handle<Object>(use_runtime_context Handle<Object>(use_runtime_context

218
deps/v8/src/builtins.cc

@ -36,8 +36,78 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
namespace {
// Arguments object passed to C++ builtins.
template <BuiltinExtraArguments extra_args>
class BuiltinArguments : public Arguments {
public:
BuiltinArguments(int length, Object** arguments)
: Arguments(length, arguments) { }
Object*& operator[] (int index) {
ASSERT(index < length());
return Arguments::operator[](index);
}
template <class S> Handle<S> at(int index) {
ASSERT(index < length());
return Arguments::at<S>(index);
}
Handle<Object> receiver() {
return Arguments::at<Object>(0);
}
Handle<JSFunction> called_function() {
STATIC_ASSERT(extra_args == NEEDS_CALLED_FUNCTION);
return Arguments::at<JSFunction>(Arguments::length() - 1);
}
// Gets the total number of arguments including the receiver (but
// excluding extra arguments).
int length() const {
STATIC_ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
return Arguments::length();
}
#ifdef DEBUG
void Verify() {
// Check we have at least the receiver.
ASSERT(Arguments::length() >= 1);
}
#endif
};
// Specialize BuiltinArguments for the called function extra argument.
template <>
int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length() const {
return Arguments::length() - 1;
}
#ifdef DEBUG
template <>
void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() {
// Check we have at least the receiver and the called function.
ASSERT(Arguments::length() >= 2);
// Make sure cast to JSFunction succeeds.
called_function();
}
#endif
#define DEF_ARG_TYPE(name, spec) \
typedef BuiltinArguments<spec> name##ArgumentsType;
BUILTIN_LIST_C(DEF_ARG_TYPE)
#undef DEF_ARG_TYPE
} // namespace
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// Support macros for defining builtins in C. // Support macro for defining builtins in C++.
// ---------------------------------------------------------------------------- // ----------------------------------------------------------------------------
// //
// A builtin function is defined by writing: // A builtin function is defined by writing:
@ -45,30 +115,26 @@ namespace internal {
// BUILTIN(name) { // BUILTIN(name) {
// ... // ...
// } // }
// BUILTIN_END
// //
// In the body of the builtin function, the variable 'receiver' is visible. // In the body of the builtin function the arguments can be accessed
// The arguments can be accessed through the Arguments object args. // through the BuiltinArguments object args.
//
// args[0]: Receiver (also available as 'receiver')
// args[1]: First argument
// ...
// args[n]: Last argument
// args.length(): Number of arguments including the receiver.
// ----------------------------------------------------------------------------
#ifdef DEBUG
// TODO(428): We should consider passing whether or not the #define BUILTIN(name) \
// builtin was invoked as a constructor as part of the static Object* Builtin_Impl_##name(name##ArgumentsType args); \
// arguments. Maybe we also want to pass the called function? static Object* Builtin_##name(name##ArgumentsType args) { \
#define BUILTIN(name) \ args.Verify(); \
static Object* Builtin_##name(Arguments args) { \ return Builtin_Impl_##name(args); \
Handle<Object> receiver = args.at<Object>(0); } \
static Object* Builtin_Impl_##name(name##ArgumentsType args)
#else // For release mode.
#define BUILTIN_END \ #define BUILTIN(name) \
return Heap::undefined_value(); \ static Object* Builtin_##name(name##ArgumentsType args)
}
#endif
static inline bool CalledAsConstructor() { static inline bool CalledAsConstructor() {
@ -109,12 +175,12 @@ Handle<Code> Builtins::GetCode(JavaScript id, bool* resolved) {
if (Top::context() != NULL) { if (Top::context() != NULL) {
Object* object = Top::builtins()->javascript_builtin(id); Object* object = Top::builtins()->javascript_builtin(id);
if (object->IsJSFunction()) { if (object->IsJSFunction()) {
Handle<JSFunction> function(JSFunction::cast(object)); Handle<SharedFunctionInfo> shared(JSFunction::cast(object)->shared());
// Make sure the number of parameters match the formal parameter count. // Make sure the number of parameters match the formal parameter count.
ASSERT(function->shared()->formal_parameter_count() == ASSERT(shared->formal_parameter_count() ==
Builtins::GetArgumentsCount(id)); Builtins::GetArgumentsCount(id));
if (function->is_compiled() || CompileLazy(function, CLEAR_EXCEPTION)) { if (EnsureCompiled(shared, CLEAR_EXCEPTION)) {
code = function->code(); code = shared->code();
*resolved = true; *resolved = true;
} }
} }
@ -126,13 +192,13 @@ Handle<Code> Builtins::GetCode(JavaScript id, bool* resolved) {
BUILTIN(Illegal) { BUILTIN(Illegal) {
UNREACHABLE(); UNREACHABLE();
return Heap::undefined_value(); // Make compiler happy.
} }
BUILTIN_END
BUILTIN(EmptyFunction) { BUILTIN(EmptyFunction) {
return Heap::undefined_value();
} }
BUILTIN_END
BUILTIN(ArrayCodeGeneric) { BUILTIN(ArrayCodeGeneric) {
@ -140,7 +206,7 @@ BUILTIN(ArrayCodeGeneric) {
JSArray* array; JSArray* array;
if (CalledAsConstructor()) { if (CalledAsConstructor()) {
array = JSArray::cast(*receiver); array = JSArray::cast(*args.receiver());
} else { } else {
// Allocate the JS Array // Allocate the JS Array
JSFunction* constructor = JSFunction* constructor =
@ -181,8 +247,10 @@ BUILTIN(ArrayCodeGeneric) {
Smi* len = Smi::FromInt(number_of_elements); Smi* len = Smi::FromInt(number_of_elements);
Object* obj = Heap::AllocateFixedArrayWithHoles(len->value()); Object* obj = Heap::AllocateFixedArrayWithHoles(len->value());
if (obj->IsFailure()) return obj; if (obj->IsFailure()) return obj;
AssertNoAllocation no_gc;
FixedArray* elms = FixedArray::cast(obj); FixedArray* elms = FixedArray::cast(obj);
WriteBarrierMode mode = elms->GetWriteBarrierMode(); WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
// Fill in the content // Fill in the content
for (int index = 0; index < number_of_elements; index++) { for (int index = 0; index < number_of_elements; index++) {
elms->set(index, args[index+1], mode); elms->set(index, args[index+1], mode);
@ -190,15 +258,14 @@ BUILTIN(ArrayCodeGeneric) {
// Set length and elements on the array. // Set length and elements on the array.
array->set_elements(FixedArray::cast(obj)); array->set_elements(FixedArray::cast(obj));
array->set_length(len, SKIP_WRITE_BARRIER); array->set_length(len);
return array; return array;
} }
BUILTIN_END
BUILTIN(ArrayPush) { BUILTIN(ArrayPush) {
JSArray* array = JSArray::cast(*receiver); JSArray* array = JSArray::cast(*args.receiver());
ASSERT(array->HasFastElements()); ASSERT(array->HasFastElements());
// Make sure we have space for the elements. // Make sure we have space for the elements.
@ -218,8 +285,10 @@ BUILTIN(ArrayPush) {
int capacity = new_length + (new_length >> 1) + 16; int capacity = new_length + (new_length >> 1) + 16;
Object* obj = Heap::AllocateFixedArrayWithHoles(capacity); Object* obj = Heap::AllocateFixedArrayWithHoles(capacity);
if (obj->IsFailure()) return obj; if (obj->IsFailure()) return obj;
AssertNoAllocation no_gc;
FixedArray* new_elms = FixedArray::cast(obj); FixedArray* new_elms = FixedArray::cast(obj);
WriteBarrierMode mode = new_elms->GetWriteBarrierMode(); WriteBarrierMode mode = new_elms->GetWriteBarrierMode(no_gc);
// Fill out the new array with old elements. // Fill out the new array with old elements.
for (int i = 0; i < len; i++) new_elms->set(i, elms->get(i), mode); for (int i = 0; i < len; i++) new_elms->set(i, elms->get(i), mode);
// Add the provided values. // Add the provided values.
@ -230,14 +299,13 @@ BUILTIN(ArrayPush) {
array->set_elements(new_elms); array->set_elements(new_elms);
} }
// Set the length. // Set the length.
array->set_length(Smi::FromInt(new_length), SKIP_WRITE_BARRIER); array->set_length(Smi::FromInt(new_length));
return array->length(); return array->length();
} }
BUILTIN_END
BUILTIN(ArrayPop) { BUILTIN(ArrayPop) {
JSArray* array = JSArray::cast(*receiver); JSArray* array = JSArray::cast(*args.receiver());
ASSERT(array->HasFastElements()); ASSERT(array->HasFastElements());
Object* undefined = Heap::undefined_value(); Object* undefined = Heap::undefined_value();
@ -249,7 +317,7 @@ BUILTIN(ArrayPop) {
Object* top = elms->get(len - 1); Object* top = elms->get(len - 1);
// Set the length. // Set the length.
array->set_length(Smi::FromInt(len - 1), SKIP_WRITE_BARRIER); array->set_length(Smi::FromInt(len - 1));
if (!top->IsTheHole()) { if (!top->IsTheHole()) {
// Delete the top element. // Delete the top element.
@ -265,7 +333,6 @@ BUILTIN(ArrayPop) {
return top; return top;
} }
BUILTIN_END
// ----------------------------------------------------------------------------- // -----------------------------------------------------------------------------
@ -320,20 +387,20 @@ static inline Object* TypeCheck(int argc,
} }
BUILTIN(HandleApiCall) { template <bool is_construct>
HandleScope scope; static Object* HandleApiCallHelper(
bool is_construct = CalledAsConstructor(); BuiltinArguments<NEEDS_CALLED_FUNCTION> args) {
ASSERT(is_construct == CalledAsConstructor());
// TODO(428): Remove use of static variable, handle API callbacks directly. HandleScope scope;
Handle<JSFunction> function = Handle<JSFunction> function = args.called_function();
Handle<JSFunction>(JSFunction::cast(Builtins::builtin_passed_function));
if (is_construct) { if (is_construct) {
Handle<FunctionTemplateInfo> desc = Handle<FunctionTemplateInfo> desc =
Handle<FunctionTemplateInfo>( Handle<FunctionTemplateInfo>(
FunctionTemplateInfo::cast(function->shared()->function_data())); FunctionTemplateInfo::cast(function->shared()->function_data()));
bool pending_exception = false; bool pending_exception = false;
Factory::ConfigureInstance(desc, Handle<JSObject>::cast(receiver), Factory::ConfigureInstance(desc, Handle<JSObject>::cast(args.receiver()),
&pending_exception); &pending_exception);
ASSERT(Top::has_pending_exception() == pending_exception); ASSERT(Top::has_pending_exception() == pending_exception);
if (pending_exception) return Failure::Exception(); if (pending_exception) return Failure::Exception();
@ -359,15 +426,13 @@ BUILTIN(HandleApiCall) {
Object* data_obj = call_data->data(); Object* data_obj = call_data->data();
Object* result; Object* result;
v8::Local<v8::Object> self =
v8::Utils::ToLocal(Handle<JSObject>::cast(receiver));
Handle<Object> data_handle(data_obj); Handle<Object> data_handle(data_obj);
v8::Local<v8::Value> data = v8::Utils::ToLocal(data_handle); v8::Local<v8::Value> data = v8::Utils::ToLocal(data_handle);
ASSERT(raw_holder->IsJSObject()); ASSERT(raw_holder->IsJSObject());
v8::Local<v8::Function> callee = v8::Utils::ToLocal(function); v8::Local<v8::Function> callee = v8::Utils::ToLocal(function);
Handle<JSObject> holder_handle(JSObject::cast(raw_holder)); Handle<JSObject> holder_handle(JSObject::cast(raw_holder));
v8::Local<v8::Object> holder = v8::Utils::ToLocal(holder_handle); v8::Local<v8::Object> holder = v8::Utils::ToLocal(holder_handle);
LOG(ApiObjectAccess("call", JSObject::cast(*receiver))); LOG(ApiObjectAccess("call", JSObject::cast(*args.receiver())));
v8::Arguments new_args = v8::ImplementationUtilities::NewArguments( v8::Arguments new_args = v8::ImplementationUtilities::NewArguments(
data, data,
holder, holder,
@ -395,16 +460,26 @@ BUILTIN(HandleApiCall) {
if (!is_construct || result->IsJSObject()) return result; if (!is_construct || result->IsJSObject()) return result;
} }
return *receiver; return *args.receiver();
}
BUILTIN(HandleApiCall) {
return HandleApiCallHelper<false>(args);
}
BUILTIN(HandleApiCallConstruct) {
return HandleApiCallHelper<true>(args);
} }
BUILTIN_END
// Helper function to handle calls to non-function objects created through the // Helper function to handle calls to non-function objects created through the
// API. The object can be called as either a constructor (using new) or just as // API. The object can be called as either a constructor (using new) or just as
// a function (without new). // a function (without new).
static Object* HandleApiCallAsFunctionOrConstructor(bool is_construct_call, static Object* HandleApiCallAsFunctionOrConstructor(
Arguments args) { bool is_construct_call,
BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
// Non-functions are never called as constructors. Even if this is an object // Non-functions are never called as constructors. Even if this is an object
// called as a constructor the delegate call is not a construct call. // called as a constructor the delegate call is not a construct call.
ASSERT(!CalledAsConstructor()); ASSERT(!CalledAsConstructor());
@ -412,7 +487,7 @@ static Object* HandleApiCallAsFunctionOrConstructor(bool is_construct_call,
Handle<Object> receiver = args.at<Object>(0); Handle<Object> receiver = args.at<Object>(0);
// Get the object called. // Get the object called.
JSObject* obj = JSObject::cast(*receiver); JSObject* obj = JSObject::cast(*args.receiver());
// Get the invocation callback from the function descriptor that was // Get the invocation callback from the function descriptor that was
// used to create the called object. // used to create the called object.
@ -432,12 +507,12 @@ static Object* HandleApiCallAsFunctionOrConstructor(bool is_construct_call,
Object* result; Object* result;
{ HandleScope scope; { HandleScope scope;
v8::Local<v8::Object> self = v8::Local<v8::Object> self =
v8::Utils::ToLocal(Handle<JSObject>::cast(receiver)); v8::Utils::ToLocal(Handle<JSObject>::cast(args.receiver()));
Handle<Object> data_handle(data_obj); Handle<Object> data_handle(data_obj);
v8::Local<v8::Value> data = v8::Utils::ToLocal(data_handle); v8::Local<v8::Value> data = v8::Utils::ToLocal(data_handle);
Handle<JSFunction> callee_handle(constructor); Handle<JSFunction> callee_handle(constructor);
v8::Local<v8::Function> callee = v8::Utils::ToLocal(callee_handle); v8::Local<v8::Function> callee = v8::Utils::ToLocal(callee_handle);
LOG(ApiObjectAccess("call non-function", JSObject::cast(*receiver))); LOG(ApiObjectAccess("call non-function", JSObject::cast(*args.receiver())));
v8::Arguments new_args = v8::ImplementationUtilities::NewArguments( v8::Arguments new_args = v8::ImplementationUtilities::NewArguments(
data, data,
self, self,
@ -471,7 +546,6 @@ static Object* HandleApiCallAsFunctionOrConstructor(bool is_construct_call,
BUILTIN(HandleApiCallAsFunction) { BUILTIN(HandleApiCallAsFunction) {
return HandleApiCallAsFunctionOrConstructor(false, args); return HandleApiCallAsFunctionOrConstructor(false, args);
} }
BUILTIN_END
// Handle calls to non-function objects created through the API. This delegate // Handle calls to non-function objects created through the API. This delegate
@ -479,14 +553,6 @@ BUILTIN_END
BUILTIN(HandleApiCallAsConstructor) { BUILTIN(HandleApiCallAsConstructor) {
return HandleApiCallAsFunctionOrConstructor(true, args); return HandleApiCallAsFunctionOrConstructor(true, args);
} }
BUILTIN_END
// TODO(1238487): This is a nasty hack. We need to improve the way we
// call builtins considerable to get rid of this and the hairy macros
// in builtins.cc.
Object* Builtins::builtin_passed_function;
static void Generate_LoadIC_ArrayLength(MacroAssembler* masm) { static void Generate_LoadIC_ArrayLength(MacroAssembler* masm) {
@ -708,7 +774,7 @@ static void Generate_StubNoRegisters_DebugBreak(MacroAssembler* masm) {
Object* Builtins::builtins_[builtin_count] = { NULL, }; Object* Builtins::builtins_[builtin_count] = { NULL, };
const char* Builtins::names_[builtin_count] = { NULL, }; const char* Builtins::names_[builtin_count] = { NULL, };
#define DEF_ENUM_C(name) FUNCTION_ADDR(Builtin_##name), #define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
Address Builtins::c_functions_[cfunction_count] = { Address Builtins::c_functions_[cfunction_count] = {
BUILTIN_LIST_C(DEF_ENUM_C) BUILTIN_LIST_C(DEF_ENUM_C)
}; };
@ -739,14 +805,16 @@ void Builtins::Setup(bool create_heap_objects) {
const char* s_name; // name is only used for generating log information. const char* s_name; // name is only used for generating log information.
int name; int name;
Code::Flags flags; Code::Flags flags;
BuiltinExtraArguments extra_args;
}; };
#define DEF_FUNCTION_PTR_C(name) \ #define DEF_FUNCTION_PTR_C(name, extra_args) \
{ FUNCTION_ADDR(Generate_Adaptor), \ { FUNCTION_ADDR(Generate_Adaptor), \
FUNCTION_ADDR(Builtin_##name), \ FUNCTION_ADDR(Builtin_##name), \
#name, \ #name, \
c_##name, \ c_##name, \
Code::ComputeFlags(Code::BUILTIN) \ Code::ComputeFlags(Code::BUILTIN), \
extra_args \
}, },
#define DEF_FUNCTION_PTR_A(name, kind, state) \ #define DEF_FUNCTION_PTR_A(name, kind, state) \
@ -754,7 +822,8 @@ void Builtins::Setup(bool create_heap_objects) {
NULL, \ NULL, \
#name, \ #name, \
name, \ name, \
Code::ComputeFlags(Code::kind, NOT_IN_LOOP, state) \ Code::ComputeFlags(Code::kind, NOT_IN_LOOP, state), \
NO_EXTRA_ARGUMENTS \
}, },
// Define array of pointers to generators and C builtin functions. // Define array of pointers to generators and C builtin functions.
@ -763,7 +832,8 @@ void Builtins::Setup(bool create_heap_objects) {
BUILTIN_LIST_A(DEF_FUNCTION_PTR_A) BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A) BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
// Terminator: // Terminator:
{ NULL, NULL, NULL, builtin_count, static_cast<Code::Flags>(0) } { NULL, NULL, NULL, builtin_count, static_cast<Code::Flags>(0),
NO_EXTRA_ARGUMENTS }
}; };
#undef DEF_FUNCTION_PTR_C #undef DEF_FUNCTION_PTR_C
@ -779,12 +849,12 @@ void Builtins::Setup(bool create_heap_objects) {
if (create_heap_objects) { if (create_heap_objects) {
MacroAssembler masm(buffer, sizeof buffer); MacroAssembler masm(buffer, sizeof buffer);
// Generate the code/adaptor. // Generate the code/adaptor.
typedef void (*Generator)(MacroAssembler*, int); typedef void (*Generator)(MacroAssembler*, int, BuiltinExtraArguments);
Generator g = FUNCTION_CAST<Generator>(functions[i].generator); Generator g = FUNCTION_CAST<Generator>(functions[i].generator);
// We pass all arguments to the generator, but it may not use all of // We pass all arguments to the generator, but it may not use all of
// them. This works because the first arguments are on top of the // them. This works because the first arguments are on top of the
// stack. // stack.
g(&masm, functions[i].name); g(&masm, functions[i].name, functions[i].extra_args);
// Move the code into the object heap. // Move the code into the object heap.
CodeDesc desc; CodeDesc desc;
masm.GetCode(&desc); masm.GetCode(&desc);

48
deps/v8/src/builtins.h

@ -31,20 +31,28 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
// Define list of builtins implemented in C. // Specifies extra arguments required by a C++ builtin.
#define BUILTIN_LIST_C(V) \ enum BuiltinExtraArguments {
V(Illegal) \ NO_EXTRA_ARGUMENTS = 0,
\ NEEDS_CALLED_FUNCTION = 1
V(EmptyFunction) \ };
\
V(ArrayCodeGeneric) \
\ // Define list of builtins implemented in C++.
V(ArrayPush) \ #define BUILTIN_LIST_C(V) \
V(ArrayPop) \ V(Illegal, NO_EXTRA_ARGUMENTS) \
\ \
V(HandleApiCall) \ V(EmptyFunction, NO_EXTRA_ARGUMENTS) \
V(HandleApiCallAsFunction) \ \
V(HandleApiCallAsConstructor) V(ArrayCodeGeneric, NO_EXTRA_ARGUMENTS) \
\
V(ArrayPush, NO_EXTRA_ARGUMENTS) \
V(ArrayPop, NO_EXTRA_ARGUMENTS) \
\
V(HandleApiCall, NEEDS_CALLED_FUNCTION) \
V(HandleApiCallConstruct, NEEDS_CALLED_FUNCTION) \
V(HandleApiCallAsFunction, NO_EXTRA_ARGUMENTS) \
V(HandleApiCallAsConstructor, NO_EXTRA_ARGUMENTS)
// Define list of builtins implemented in assembly. // Define list of builtins implemented in assembly.
@ -52,6 +60,7 @@ namespace internal {
V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED) \ V(ArgumentsAdaptorTrampoline, BUILTIN, UNINITIALIZED) \
V(JSConstructCall, BUILTIN, UNINITIALIZED) \ V(JSConstructCall, BUILTIN, UNINITIALIZED) \
V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED) \ V(JSConstructStubGeneric, BUILTIN, UNINITIALIZED) \
V(JSConstructStubApi, BUILTIN, UNINITIALIZED) \
V(JSEntryTrampoline, BUILTIN, UNINITIALIZED) \ V(JSEntryTrampoline, BUILTIN, UNINITIALIZED) \
V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED) \ V(JSConstructEntryTrampoline, BUILTIN, UNINITIALIZED) \
\ \
@ -169,7 +178,7 @@ class Builtins : public AllStatic {
static const char* Lookup(byte* pc); static const char* Lookup(byte* pc);
enum Name { enum Name {
#define DEF_ENUM_C(name) name, #define DEF_ENUM_C(name, ignore) name,
#define DEF_ENUM_A(name, kind, state) name, #define DEF_ENUM_A(name, kind, state) name,
BUILTIN_LIST_C(DEF_ENUM_C) BUILTIN_LIST_C(DEF_ENUM_C)
BUILTIN_LIST_A(DEF_ENUM_A) BUILTIN_LIST_A(DEF_ENUM_A)
@ -180,7 +189,7 @@ class Builtins : public AllStatic {
}; };
enum CFunctionId { enum CFunctionId {
#define DEF_ENUM_C(name) c_##name, #define DEF_ENUM_C(name, ignore) c_##name,
BUILTIN_LIST_C(DEF_ENUM_C) BUILTIN_LIST_C(DEF_ENUM_C)
#undef DEF_ENUM_C #undef DEF_ENUM_C
cfunction_count cfunction_count
@ -212,8 +221,6 @@ class Builtins : public AllStatic {
static Handle<Code> GetCode(JavaScript id, bool* resolved); static Handle<Code> GetCode(JavaScript id, bool* resolved);
static int NumberOfJavaScriptBuiltins() { return id_count; } static int NumberOfJavaScriptBuiltins() { return id_count; }
static Object* builtin_passed_function;
private: private:
// The external C++ functions called from the code. // The external C++ functions called from the code.
static Address c_functions_[cfunction_count]; static Address c_functions_[cfunction_count];
@ -226,9 +233,12 @@ class Builtins : public AllStatic {
static const char* javascript_names_[id_count]; static const char* javascript_names_[id_count];
static int javascript_argc_[id_count]; static int javascript_argc_[id_count];
static void Generate_Adaptor(MacroAssembler* masm, CFunctionId id); static void Generate_Adaptor(MacroAssembler* masm,
CFunctionId id,
BuiltinExtraArguments extra_args);
static void Generate_JSConstructCall(MacroAssembler* masm); static void Generate_JSConstructCall(MacroAssembler* masm);
static void Generate_JSConstructStubGeneric(MacroAssembler* masm); static void Generate_JSConstructStubGeneric(MacroAssembler* masm);
static void Generate_JSConstructStubApi(MacroAssembler* masm);
static void Generate_JSEntryTrampoline(MacroAssembler* masm); static void Generate_JSEntryTrampoline(MacroAssembler* masm);
static void Generate_JSConstructEntryTrampoline(MacroAssembler* masm); static void Generate_JSConstructEntryTrampoline(MacroAssembler* masm);
static void Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm); static void Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm);

4
deps/v8/src/code-stubs.h

@ -55,9 +55,9 @@ namespace internal {
V(CounterOp) \ V(CounterOp) \
V(ArgumentsAccess) \ V(ArgumentsAccess) \
V(RegExpExec) \ V(RegExpExec) \
V(Runtime) \
V(CEntry) \ V(CEntry) \
V(JSEntry) V(JSEntry) \
V(DebuggerStatement)
// List of code stubs only used on ARM platforms. // List of code stubs only used on ARM platforms.
#ifdef V8_TARGET_ARCH_ARM #ifdef V8_TARGET_ARCH_ARM

43
deps/v8/src/codegen.cc

@ -216,13 +216,19 @@ Handle<Code> CodeGenerator::MakeCodeEpilogue(FunctionLiteral* fun,
// the compiler.cc code. // the compiler.cc code.
Handle<Code> CodeGenerator::MakeCode(FunctionLiteral* fun, Handle<Code> CodeGenerator::MakeCode(FunctionLiteral* fun,
Handle<Script> script, Handle<Script> script,
bool is_eval) { bool is_eval,
CompilationInfo* info) {
if (!script->IsUndefined() && !script->source()->IsUndefined()) {
int len = String::cast(script->source())->length();
Counters::total_old_codegen_source_size.Increment(len);
}
MakeCodePrologue(fun); MakeCodePrologue(fun);
// Generate code. // Generate code.
const int kInitialBufferSize = 4 * KB; const int kInitialBufferSize = 4 * KB;
CodeGenerator cgen(kInitialBufferSize, script, is_eval); MacroAssembler masm(NULL, kInitialBufferSize);
CodeGenerator cgen(&masm, script, is_eval);
CodeGeneratorScope scope(&cgen); CodeGeneratorScope scope(&cgen);
cgen.GenCode(fun); cgen.Generate(fun, PRIMARY, info);
if (cgen.HasStackOverflow()) { if (cgen.HasStackOverflow()) {
ASSERT(!Top::has_pending_exception()); ASSERT(!Top::has_pending_exception());
return Handle<Code>::null(); return Handle<Code>::null();
@ -344,6 +350,7 @@ CodeGenerator::InlineRuntimeLUT CodeGenerator::kInlineRuntimeLUT[] = {
{&CodeGenerator::GenerateRandomPositiveSmi, "_RandomPositiveSmi"}, {&CodeGenerator::GenerateRandomPositiveSmi, "_RandomPositiveSmi"},
{&CodeGenerator::GenerateIsObject, "_IsObject"}, {&CodeGenerator::GenerateIsObject, "_IsObject"},
{&CodeGenerator::GenerateIsFunction, "_IsFunction"}, {&CodeGenerator::GenerateIsFunction, "_IsFunction"},
{&CodeGenerator::GenerateIsUndetectableObject, "_IsUndetectableObject"},
{&CodeGenerator::GenerateStringAdd, "_StringAdd"}, {&CodeGenerator::GenerateStringAdd, "_StringAdd"},
{&CodeGenerator::GenerateSubString, "_SubString"}, {&CodeGenerator::GenerateSubString, "_SubString"},
{&CodeGenerator::GenerateStringCompare, "_StringCompare"}, {&CodeGenerator::GenerateStringCompare, "_StringCompare"},
@ -446,11 +453,6 @@ void CodeGenerator::CodeForSourcePosition(int pos) {
} }
const char* RuntimeStub::GetName() {
return Runtime::FunctionForId(id_)->stub_name;
}
const char* GenericUnaryOpStub::GetName() { const char* GenericUnaryOpStub::GetName() {
switch (op_) { switch (op_) {
case Token::SUB: case Token::SUB:
@ -468,14 +470,6 @@ const char* GenericUnaryOpStub::GetName() {
} }
void RuntimeStub::Generate(MacroAssembler* masm) {
Runtime::Function* f = Runtime::FunctionForId(id_);
masm->TailCallRuntime(ExternalReference(f),
num_arguments_,
f->result_size);
}
void ArgumentsAccessStub::Generate(MacroAssembler* masm) { void ArgumentsAccessStub::Generate(MacroAssembler* masm) {
switch (type_) { switch (type_) {
case READ_LENGTH: GenerateReadLength(masm); break; case READ_LENGTH: GenerateReadLength(masm); break;
@ -485,6 +479,17 @@ void ArgumentsAccessStub::Generate(MacroAssembler* masm) {
} }
int CEntryStub::MinorKey() {
ASSERT(result_size_ <= 2);
#ifdef _WIN64
return ExitFrameModeBits::encode(mode_)
| IndirectResultBits::encode(result_size_ > 1);
#else
return ExitFrameModeBits::encode(mode_);
#endif
}
bool ApiGetterEntryStub::GetCustomCache(Code** code_out) { bool ApiGetterEntryStub::GetCustomCache(Code** code_out) {
Object* cache = info()->load_stub_cache(); Object* cache = info()->load_stub_cache();
if (cache->IsUndefined()) { if (cache->IsUndefined()) {
@ -501,4 +506,10 @@ void ApiGetterEntryStub::SetCustomCache(Code* value) {
} }
void DebuggerStatementStub::Generate(MacroAssembler* masm) {
Runtime::Function* f = Runtime::FunctionForId(Runtime::kDebugBreak);
masm->TailCallRuntime(ExternalReference(f), 0, f->result_size);
}
} } // namespace v8::internal } } // namespace v8::internal

128
deps/v8/src/codegen.h

@ -55,7 +55,7 @@
// CodeGenerator // CodeGenerator
// ~CodeGenerator // ~CodeGenerator
// ProcessDeferred // ProcessDeferred
// GenCode // Generate
// ComputeLazyCompile // ComputeLazyCompile
// BuildBoilerplate // BuildBoilerplate
// ComputeCallInitialize // ComputeCallInitialize
@ -181,43 +181,6 @@ class DeferredCode: public ZoneObject {
DISALLOW_COPY_AND_ASSIGN(DeferredCode); DISALLOW_COPY_AND_ASSIGN(DeferredCode);
}; };
// RuntimeStub models code stubs calling entry points in the Runtime class.
class RuntimeStub : public CodeStub {
public:
explicit RuntimeStub(Runtime::FunctionId id, int num_arguments)
: id_(id), num_arguments_(num_arguments) { }
void Generate(MacroAssembler* masm);
// Disassembler support. It is useful to be able to print the name
// of the runtime function called through this stub.
static const char* GetNameFromMinorKey(int minor_key) {
return Runtime::FunctionForId(IdField::decode(minor_key))->stub_name;
}
private:
Runtime::FunctionId id_;
int num_arguments_;
class ArgumentField: public BitField<int, 0, 16> {};
class IdField: public BitField<Runtime::FunctionId, 16, kMinorBits - 16> {};
Major MajorKey() { return Runtime; }
int MinorKey() {
return IdField::encode(id_) | ArgumentField::encode(num_arguments_);
}
const char* GetName();
#ifdef DEBUG
void Print() {
PrintF("RuntimeStub (id %s)\n", Runtime::FunctionForId(id_)->name);
}
#endif
};
class StackCheckStub : public CodeStub { class StackCheckStub : public CodeStub {
public: public:
StackCheckStub() { } StackCheckStub() { }
@ -367,25 +330,30 @@ class CompareStub: public CodeStub {
class CEntryStub : public CodeStub { class CEntryStub : public CodeStub {
public: public:
explicit CEntryStub(int result_size) : result_size_(result_size) { } explicit CEntryStub(int result_size,
ExitFrame::Mode mode = ExitFrame::MODE_NORMAL)
: result_size_(result_size), mode_(mode) { }
void Generate(MacroAssembler* masm) { GenerateBody(masm, false); } void Generate(MacroAssembler* masm);
protected: private:
void GenerateBody(MacroAssembler* masm, bool is_debug_break);
void GenerateCore(MacroAssembler* masm, void GenerateCore(MacroAssembler* masm,
Label* throw_normal_exception, Label* throw_normal_exception,
Label* throw_termination_exception, Label* throw_termination_exception,
Label* throw_out_of_memory_exception, Label* throw_out_of_memory_exception,
ExitFrame::Mode mode,
bool do_gc, bool do_gc,
bool always_allocate_scope); bool always_allocate_scope);
void GenerateThrowTOS(MacroAssembler* masm); void GenerateThrowTOS(MacroAssembler* masm);
void GenerateThrowUncatchable(MacroAssembler* masm, void GenerateThrowUncatchable(MacroAssembler* masm,
UncatchableExceptionType type); UncatchableExceptionType type);
private:
// Number of pointers/values returned. // Number of pointers/values returned.
int result_size_; const int result_size_;
const ExitFrame::Mode mode_;
// Minor key encoding
class ExitFrameModeBits: public BitField<ExitFrame::Mode, 0, 1> {};
class IndirectResultBits: public BitField<bool, 1, 1> {};
Major MajorKey() { return CEntry; } Major MajorKey() { return CEntry; }
// Minor key must differ if different result_size_ values means different // Minor key must differ if different result_size_ values means different
@ -422,16 +390,18 @@ class ApiGetterEntryStub : public CodeStub {
}; };
class CEntryDebugBreakStub : public CEntryStub { // Mark the debugger statement to be recognized by debugger (by the MajorKey)
class DebuggerStatementStub : public CodeStub {
public: public:
CEntryDebugBreakStub() : CEntryStub(1) { } DebuggerStatementStub() { }
void Generate(MacroAssembler* masm) { GenerateBody(masm, true); } void Generate(MacroAssembler* masm);
private: private:
int MinorKey() { return 1; } Major MajorKey() { return DebuggerStatement; }
int MinorKey() { return 0; }
const char* GetName() { return "CEntryDebugBreakStub"; } const char* GetName() { return "DebuggerStatementStub"; }
}; };
@ -516,6 +486,64 @@ class RegExpExecStub: public CodeStub {
}; };
class CallFunctionStub: public CodeStub {
public:
CallFunctionStub(int argc, InLoopFlag in_loop, CallFunctionFlags flags)
: argc_(argc), in_loop_(in_loop), flags_(flags) { }
void Generate(MacroAssembler* masm);
private:
int argc_;
InLoopFlag in_loop_;
CallFunctionFlags flags_;
#ifdef DEBUG
void Print() {
PrintF("CallFunctionStub (args %d, in_loop %d, flags %d)\n",
argc_,
static_cast<int>(in_loop_),
static_cast<int>(flags_));
}
#endif
// Minor key encoding in 31 bits AAAAAAAAAAAAAAAAAAAAAFI A(rgs)F(lag)I(nloop).
class InLoopBits: public BitField<InLoopFlag, 0, 1> {};
class FlagBits: public BitField<CallFunctionFlags, 1, 1> {};
class ArgcBits: public BitField<int, 2, 29> {};
Major MajorKey() { return CallFunction; }
int MinorKey() {
// Encode the parameters in a unique 31 bit value.
return InLoopBits::encode(in_loop_)
| FlagBits::encode(flags_)
| ArgcBits::encode(argc_);
}
InLoopFlag InLoop() { return in_loop_; }
bool ReceiverMightBeValue() {
return (flags_ & RECEIVER_MIGHT_BE_VALUE) != 0;
}
public:
static int ExtractArgcFromMinorKey(int minor_key) {
return ArgcBits::decode(minor_key);
}
};
class ToBooleanStub: public CodeStub {
public:
ToBooleanStub() { }
void Generate(MacroAssembler* masm);
private:
Major MajorKey() { return ToBoolean; }
int MinorKey() { return 0; }
};
} // namespace internal } // namespace internal
} // namespace v8 } // namespace v8

735
deps/v8/src/compiler.cc

@ -33,6 +33,7 @@
#include "compiler.h" #include "compiler.h"
#include "debug.h" #include "debug.h"
#include "fast-codegen.h" #include "fast-codegen.h"
#include "full-codegen.h"
#include "oprofile-agent.h" #include "oprofile-agent.h"
#include "rewriter.h" #include "rewriter.h"
#include "scopes.h" #include "scopes.h"
@ -42,51 +43,11 @@ namespace v8 {
namespace internal { namespace internal {
class CodeGenSelector: public AstVisitor {
public:
enum CodeGenTag { NORMAL, FAST };
CodeGenSelector()
: has_supported_syntax_(true),
context_(Expression::kUninitialized) {
}
CodeGenTag Select(FunctionLiteral* fun);
private:
// Visit an expression in a given expression context.
void ProcessExpression(Expression* expr, Expression::Context context) {
ASSERT(expr->context() == Expression::kUninitialized ||
expr->context() == context);
Expression::Context saved = context_;
context_ = context;
Visit(expr);
expr->set_context(context);
context_ = saved;
}
void VisitDeclarations(ZoneList<Declaration*>* decls);
void VisitStatements(ZoneList<Statement*>* stmts);
// AST node visit functions.
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
bool has_supported_syntax_;
// The desired expression context of the currently visited expression.
Expression::Context context_;
DISALLOW_COPY_AND_ASSIGN(CodeGenSelector);
};
static Handle<Code> MakeCode(FunctionLiteral* literal, static Handle<Code> MakeCode(FunctionLiteral* literal,
Handle<Script> script, Handle<Script> script,
Handle<Context> context, Handle<Context> context,
bool is_eval, bool is_eval,
Handle<SharedFunctionInfo> shared) { CompilationInfo* info) {
ASSERT(literal != NULL); ASSERT(literal != NULL);
// Rewrite the AST by introducing .result assignments where needed. // Rewrite the AST by introducing .result assignments where needed.
@ -121,39 +82,41 @@ static Handle<Code> MakeCode(FunctionLiteral* literal,
return Handle<Code>::null(); return Handle<Code>::null();
} }
// Generate code and return it. // Generate code and return it. Code generator selection is governed by
if (FLAG_fast_compiler) { // which backends are enabled and whether the function is considered
// If there is no shared function info, try the fast code // run-once code or not:
// generator for code in the global scope. Otherwise obey the //
// explicit hint in the shared function info. // --full-compiler enables the dedicated backend for code we expect to be
// If always_fast_compiler is true, always try the fast compiler. // run once
if (shared.is_null() && !literal->scope()->is_global_scope() && // --fast-compiler enables a speculative optimizing backend (for
!FLAG_always_fast_compiler) { // non-run-once code)
if (FLAG_trace_bailout) PrintF("Non-global scope\n"); //
} else if (!shared.is_null() && !shared->try_fast_codegen() && // The normal choice of backend can be overridden with the flags
!FLAG_always_fast_compiler) { // --always-full-compiler and --always-fast-compiler, which are mutually
if (FLAG_trace_bailout) PrintF("No hint to try fast\n"); // incompatible.
} else { CHECK(!FLAG_always_full_compiler || !FLAG_always_fast_compiler);
CodeGenSelector selector;
CodeGenSelector::CodeGenTag code_gen = selector.Select(literal); Handle<SharedFunctionInfo> shared = info->shared_info();
if (code_gen == CodeGenSelector::FAST) { bool is_run_once = (shared.is_null())
return FastCodeGenerator::MakeCode(literal, script, is_eval); ? literal->scope()->is_global_scope()
} : (shared->is_toplevel() || shared->try_full_codegen());
ASSERT(code_gen == CodeGenSelector::NORMAL);
if (FLAG_always_full_compiler || (FLAG_full_compiler && is_run_once)) {
FullCodeGenSyntaxChecker checker;
checker.Check(literal);
if (checker.has_supported_syntax()) {
return FullCodeGenerator::MakeCode(literal, script, is_eval);
}
} else if (FLAG_always_fast_compiler ||
(FLAG_fast_compiler && !is_run_once)) {
FastCodeGenSyntaxChecker checker;
checker.Check(literal, info);
if (checker.has_supported_syntax()) {
return FastCodeGenerator::MakeCode(literal, script, is_eval, info);
} }
} }
return CodeGenerator::MakeCode(literal, script, is_eval);
}
return CodeGenerator::MakeCode(literal, script, is_eval, info);
static bool IsValidJSON(FunctionLiteral* lit) {
if (lit->body()->length() != 1)
return false;
Statement* stmt = lit->body()->at(0);
if (stmt->AsExpressionStatement() == NULL)
return false;
Expression* expr = stmt->AsExpressionStatement()->expression();
return expr->IsValidJSON();
} }
@ -171,8 +134,8 @@ static Handle<JSFunction> MakeFunction(bool is_global,
ASSERT(!i::Top::global_context().is_null()); ASSERT(!i::Top::global_context().is_null());
script->set_context_data((*i::Top::global_context())->data()); script->set_context_data((*i::Top::global_context())->data());
#ifdef ENABLE_DEBUGGER_SUPPORT
bool is_json = (validate == Compiler::VALIDATE_JSON); bool is_json = (validate == Compiler::VALIDATE_JSON);
#ifdef ENABLE_DEBUGGER_SUPPORT
if (is_eval || is_json) { if (is_eval || is_json) {
script->set_compilation_type( script->set_compilation_type(
is_json ? Smi::FromInt(Script::COMPILATION_TYPE_JSON) : is_json ? Smi::FromInt(Script::COMPILATION_TYPE_JSON) :
@ -180,12 +143,14 @@ static Handle<JSFunction> MakeFunction(bool is_global,
// For eval scripts add information on the function from which eval was // For eval scripts add information on the function from which eval was
// called. // called.
if (is_eval) { if (is_eval) {
JavaScriptFrameIterator it; StackTraceFrameIterator it;
script->set_eval_from_shared( if (!it.done()) {
JSFunction::cast(it.frame()->function())->shared()); script->set_eval_from_shared(
int offset = static_cast<int>( JSFunction::cast(it.frame()->function())->shared());
it.frame()->pc() - it.frame()->code()->instruction_start()); int offset = static_cast<int>(
script->set_eval_from_instructions_offset(Smi::FromInt(offset)); it.frame()->pc() - it.frame()->code()->instruction_start());
script->set_eval_from_instructions_offset(Smi::FromInt(offset));
}
} }
} }
@ -197,7 +162,8 @@ static Handle<JSFunction> MakeFunction(bool is_global,
ASSERT(is_eval || is_global); ASSERT(is_eval || is_global);
// Build AST. // Build AST.
FunctionLiteral* lit = MakeAST(is_global, script, extension, pre_data); FunctionLiteral* lit =
MakeAST(is_global, script, extension, pre_data, is_json);
// Check for parse errors. // Check for parse errors.
if (lit == NULL) { if (lit == NULL) {
@ -205,19 +171,6 @@ static Handle<JSFunction> MakeFunction(bool is_global,
return Handle<JSFunction>::null(); return Handle<JSFunction>::null();
} }
// When parsing JSON we do an ordinary parse and then afterwards
// check the AST to ensure it was well-formed. If not we give a
// syntax error.
if (validate == Compiler::VALIDATE_JSON && !IsValidJSON(lit)) {
HandleScope scope;
Handle<JSArray> args = Factory::NewJSArray(1);
Handle<Object> source(script->source());
SetElement(args, 0, source);
Handle<Object> result = Factory::NewSyntaxError("invalid_json", args);
Top::Throw(*result, NULL);
return Handle<JSFunction>::null();
}
// Measure how long it takes to do the compilation; only take the // Measure how long it takes to do the compilation; only take the
// rest of the function into account to avoid overlap with the // rest of the function into account to avoid overlap with the
// parsing statistics. // parsing statistics.
@ -227,8 +180,10 @@ static Handle<JSFunction> MakeFunction(bool is_global,
HistogramTimerScope timer(rate); HistogramTimerScope timer(rate);
// Compile the code. // Compile the code.
Handle<Code> code = MakeCode(lit, script, context, is_eval, CompilationInfo info(Handle<SharedFunctionInfo>::null(),
Handle<SharedFunctionInfo>::null()); Handle<Object>::null(), // No receiver.
0); // Not nested in a loop.
Handle<Code> code = MakeCode(lit, script, context, is_eval, &info);
// Check for stack-overflow exceptions. // Check for stack-overflow exceptions.
if (code.is_null()) { if (code.is_null()) {
@ -389,8 +344,7 @@ Handle<JSFunction> Compiler::CompileEval(Handle<String> source,
} }
bool Compiler::CompileLazy(Handle<SharedFunctionInfo> shared, bool Compiler::CompileLazy(CompilationInfo* info) {
int loop_nesting) {
CompilationZoneScope zone_scope(DELETE_ON_EXIT); CompilationZoneScope zone_scope(DELETE_ON_EXIT);
// The VM is in the COMPILER state until exiting this function. // The VM is in the COMPILER state until exiting this function.
@ -399,6 +353,7 @@ bool Compiler::CompileLazy(Handle<SharedFunctionInfo> shared,
PostponeInterruptsScope postpone; PostponeInterruptsScope postpone;
// Compute name, source code and script data. // Compute name, source code and script data.
Handle<SharedFunctionInfo> shared = info->shared_info();
Handle<String> name(String::cast(shared->name())); Handle<String> name(String::cast(shared->name()));
Handle<Script> script(Script::cast(shared->script())); Handle<Script> script(Script::cast(shared->script()));
@ -420,17 +375,17 @@ bool Compiler::CompileLazy(Handle<SharedFunctionInfo> shared,
return false; return false;
} }
// Update the loop nesting in the function literal.
lit->set_loop_nesting(loop_nesting);
// Measure how long it takes to do the lazy compilation; only take // Measure how long it takes to do the lazy compilation; only take
// the rest of the function into account to avoid overlap with the // the rest of the function into account to avoid overlap with the
// lazy parsing statistics. // lazy parsing statistics.
HistogramTimerScope timer(&Counters::compile_lazy); HistogramTimerScope timer(&Counters::compile_lazy);
// Compile the code. // Compile the code.
Handle<Code> code = MakeCode(lit, script, Handle<Context>::null(), false, Handle<Code> code = MakeCode(lit,
shared); script,
Handle<Context>::null(),
false,
info);
// Check for stack-overflow exception. // Check for stack-overflow exception.
if (code.is_null()) { if (code.is_null()) {
@ -508,24 +463,43 @@ Handle<JSFunction> Compiler::BuildBoilerplate(FunctionLiteral* literal,
return Handle<JSFunction>::null(); return Handle<JSFunction>::null();
} }
// Generate code and return it. // Generate code and return it. The way that the compilation mode
// is controlled by the command-line flags is described in
// the static helper function MakeCode.
CompilationInfo info(Handle<SharedFunctionInfo>::null(),
Handle<Object>::null(), // No receiver.
0); // Not nested in a loop.
CHECK(!FLAG_always_full_compiler || !FLAG_always_fast_compiler);
bool is_run_once = literal->try_full_codegen();
bool is_compiled = false; bool is_compiled = false;
if (FLAG_fast_compiler && literal->try_fast_codegen()) { if (FLAG_always_full_compiler || (FLAG_full_compiler && is_run_once)) {
CodeGenSelector selector; FullCodeGenSyntaxChecker checker;
CodeGenSelector::CodeGenTag code_gen = selector.Select(literal); checker.Check(literal);
if (code_gen == CodeGenSelector::FAST) { if (checker.has_supported_syntax()) {
code = FastCodeGenerator::MakeCode(literal, code = FullCodeGenerator::MakeCode(literal,
script, script,
false); // Not eval. false); // Not eval.
is_compiled = true; is_compiled = true;
} }
} else if (FLAG_always_fast_compiler ||
(FLAG_fast_compiler && !is_run_once)) {
// Since we are not lazily compiling we do not have a receiver to
// specialize for.
FastCodeGenSyntaxChecker checker;
checker.Check(literal, &info);
if (checker.has_supported_syntax()) {
code = FastCodeGenerator::MakeCode(literal, script, false, &info);
is_compiled = true;
}
} }
if (!is_compiled) { if (!is_compiled) {
// We didn't try the fast compiler, or we failed to select it. // We fall back to the classic V8 code generator.
code = CodeGenerator::MakeCode(literal, code = CodeGenerator::MakeCode(literal,
script, script,
false); // Not eval. false, // Not eval.
&info);
} }
// Check for stack-overflow exception. // Check for stack-overflow exception.
@ -584,549 +558,8 @@ void Compiler::SetFunctionInfo(Handle<JSFunction> fun,
fun->shared()->SetThisPropertyAssignmentsInfo( fun->shared()->SetThisPropertyAssignmentsInfo(
lit->has_only_simple_this_property_assignments(), lit->has_only_simple_this_property_assignments(),
*lit->this_property_assignments()); *lit->this_property_assignments());
fun->shared()->set_try_fast_codegen(lit->try_fast_codegen()); fun->shared()->set_try_full_codegen(lit->try_full_codegen());
}
CodeGenSelector::CodeGenTag CodeGenSelector::Select(FunctionLiteral* fun) {
Scope* scope = fun->scope();
if (scope->num_heap_slots() > 0) {
// We support functions with a local context if they do not have
// parameters that need to be copied into the context.
for (int i = 0, len = scope->num_parameters(); i < len; i++) {
Slot* slot = scope->parameter(i)->slot();
if (slot != NULL && slot->type() == Slot::CONTEXT) {
if (FLAG_trace_bailout) {
PrintF("Function has context-allocated parameters.\n");
}
return NORMAL;
}
}
}
has_supported_syntax_ = true;
VisitDeclarations(scope->declarations());
if (!has_supported_syntax_) return NORMAL;
VisitStatements(fun->body());
return has_supported_syntax_ ? FAST : NORMAL;
}
#define BAILOUT(reason) \
do { \
if (FLAG_trace_bailout) { \
PrintF("%s\n", reason); \
} \
has_supported_syntax_ = false; \
return; \
} while (false)
#define CHECK_BAILOUT \
do { \
if (!has_supported_syntax_) return; \
} while (false)
void CodeGenSelector::VisitDeclarations(ZoneList<Declaration*>* decls) {
for (int i = 0; i < decls->length(); i++) {
Visit(decls->at(i));
CHECK_BAILOUT;
}
}
void CodeGenSelector::VisitStatements(ZoneList<Statement*>* stmts) {
for (int i = 0, len = stmts->length(); i < len; i++) {
Visit(stmts->at(i));
CHECK_BAILOUT;
}
}
void CodeGenSelector::VisitDeclaration(Declaration* decl) {
Property* prop = decl->proxy()->AsProperty();
if (prop != NULL) {
ProcessExpression(prop->obj(), Expression::kValue);
ProcessExpression(prop->key(), Expression::kValue);
}
if (decl->fun() != NULL) {
ProcessExpression(decl->fun(), Expression::kValue);
}
}
void CodeGenSelector::VisitBlock(Block* stmt) {
VisitStatements(stmt->statements());
}
void CodeGenSelector::VisitExpressionStatement(ExpressionStatement* stmt) {
ProcessExpression(stmt->expression(), Expression::kEffect);
}
void CodeGenSelector::VisitEmptyStatement(EmptyStatement* stmt) {
// EmptyStatement is supported.
}
void CodeGenSelector::VisitIfStatement(IfStatement* stmt) {
ProcessExpression(stmt->condition(), Expression::kTest);
CHECK_BAILOUT;
Visit(stmt->then_statement());
CHECK_BAILOUT;
Visit(stmt->else_statement());
}
void CodeGenSelector::VisitContinueStatement(ContinueStatement* stmt) {
}
void CodeGenSelector::VisitBreakStatement(BreakStatement* stmt) {
}
void CodeGenSelector::VisitReturnStatement(ReturnStatement* stmt) {
ProcessExpression(stmt->expression(), Expression::kValue);
}
void CodeGenSelector::VisitWithEnterStatement(WithEnterStatement* stmt) {
ProcessExpression(stmt->expression(), Expression::kValue);
}
void CodeGenSelector::VisitWithExitStatement(WithExitStatement* stmt) {
// Supported.
}
void CodeGenSelector::VisitSwitchStatement(SwitchStatement* stmt) {
BAILOUT("SwitchStatement");
}
void CodeGenSelector::VisitDoWhileStatement(DoWhileStatement* stmt) {
// We do not handle loops with breaks or continue statements in their
// body. We will bailout when we hit those statements in the body.
ProcessExpression(stmt->cond(), Expression::kTest);
CHECK_BAILOUT;
Visit(stmt->body());
}
void CodeGenSelector::VisitWhileStatement(WhileStatement* stmt) {
// We do not handle loops with breaks or continue statements in their
// body. We will bailout when we hit those statements in the body.
ProcessExpression(stmt->cond(), Expression::kTest);
CHECK_BAILOUT;
Visit(stmt->body());
}
void CodeGenSelector::VisitForStatement(ForStatement* stmt) {
BAILOUT("ForStatement");
}
void CodeGenSelector::VisitForInStatement(ForInStatement* stmt) {
BAILOUT("ForInStatement");
}
void CodeGenSelector::VisitTryCatchStatement(TryCatchStatement* stmt) {
Visit(stmt->try_block());
CHECK_BAILOUT;
Visit(stmt->catch_block());
}
void CodeGenSelector::VisitTryFinallyStatement(TryFinallyStatement* stmt) {
Visit(stmt->try_block());
CHECK_BAILOUT;
Visit(stmt->finally_block());
}
void CodeGenSelector::VisitDebuggerStatement(DebuggerStatement* stmt) {
// Debugger statement is supported.
}
void CodeGenSelector::VisitFunctionLiteral(FunctionLiteral* expr) {
// Function literal is supported.
}
void CodeGenSelector::VisitFunctionBoilerplateLiteral(
FunctionBoilerplateLiteral* expr) {
BAILOUT("FunctionBoilerplateLiteral");
}
void CodeGenSelector::VisitConditional(Conditional* expr) {
ProcessExpression(expr->condition(), Expression::kTest);
CHECK_BAILOUT;
ProcessExpression(expr->then_expression(), context_);
CHECK_BAILOUT;
ProcessExpression(expr->else_expression(), context_);
}
void CodeGenSelector::VisitSlot(Slot* expr) {
UNREACHABLE();
}
void CodeGenSelector::VisitVariableProxy(VariableProxy* expr) {
Expression* rewrite = expr->var()->rewrite();
// A rewrite of NULL indicates a global variable.
if (rewrite != NULL) {
// Non-global.
Slot* slot = rewrite->AsSlot();
if (slot != NULL) {
Slot::Type type = slot->type();
// When LOOKUP slots are enabled, some currently dead code
// implementing unary typeof will become live.
if (type == Slot::LOOKUP) {
BAILOUT("Lookup slot");
}
} else {
#ifdef DEBUG
// Only remaining possibility is a property where the object is
// a slotted variable and the key is a smi.
Property* property = rewrite->AsProperty();
ASSERT_NOT_NULL(property);
Variable* object = property->obj()->AsVariableProxy()->AsVariable();
ASSERT_NOT_NULL(object);
ASSERT_NOT_NULL(object->slot());
ASSERT_NOT_NULL(property->key()->AsLiteral());
ASSERT(property->key()->AsLiteral()->handle()->IsSmi());
#endif
}
}
}
void CodeGenSelector::VisitLiteral(Literal* expr) {
/* Nothing to do. */
}
void CodeGenSelector::VisitRegExpLiteral(RegExpLiteral* expr) {
/* Nothing to do. */
}
void CodeGenSelector::VisitObjectLiteral(ObjectLiteral* expr) {
ZoneList<ObjectLiteral::Property*>* properties = expr->properties();
for (int i = 0, len = properties->length(); i < len; i++) {
ObjectLiteral::Property* property = properties->at(i);
if (property->IsCompileTimeValue()) continue;
switch (property->kind()) {
case ObjectLiteral::Property::CONSTANT:
UNREACHABLE();
// For (non-compile-time) materialized literals and computed
// properties with symbolic keys we will use an IC and therefore not
// generate code for the key.
case ObjectLiteral::Property::COMPUTED: // Fall through.
case ObjectLiteral::Property::MATERIALIZED_LITERAL:
if (property->key()->handle()->IsSymbol()) {
break;
}
// Fall through.
// In all other cases we need the key's value on the stack
// for a runtime call. (Relies on TEMP meaning STACK.)
case ObjectLiteral::Property::GETTER: // Fall through.
case ObjectLiteral::Property::SETTER: // Fall through.
case ObjectLiteral::Property::PROTOTYPE:
ProcessExpression(property->key(), Expression::kValue);
CHECK_BAILOUT;
break;
}
ProcessExpression(property->value(), Expression::kValue);
CHECK_BAILOUT;
}
}
void CodeGenSelector::VisitArrayLiteral(ArrayLiteral* expr) {
ZoneList<Expression*>* subexprs = expr->values();
for (int i = 0, len = subexprs->length(); i < len; i++) {
Expression* subexpr = subexprs->at(i);
if (subexpr->AsLiteral() != NULL) continue;
if (CompileTimeValue::IsCompileTimeValue(subexpr)) continue;
ProcessExpression(subexpr, Expression::kValue);
CHECK_BAILOUT;
}
}
void CodeGenSelector::VisitCatchExtensionObject(CatchExtensionObject* expr) {
ProcessExpression(expr->key(), Expression::kValue);
CHECK_BAILOUT;
ProcessExpression(expr->value(), Expression::kValue);
}
void CodeGenSelector::VisitAssignment(Assignment* expr) {
// We support plain non-compound assignments to properties, parameters and
// non-context (stack-allocated) locals, and global variables.
Token::Value op = expr->op();
if (op == Token::INIT_CONST) BAILOUT("initialize constant");
Variable* var = expr->target()->AsVariableProxy()->AsVariable();
Property* prop = expr->target()->AsProperty();
ASSERT(var == NULL || prop == NULL);
if (var != NULL) {
if (var->mode() == Variable::CONST) {
BAILOUT("Assignment to const");
}
// All global variables are supported.
if (!var->is_global()) {
ASSERT(var->slot() != NULL);
Slot::Type type = var->slot()->type();
if (type == Slot::LOOKUP) {
BAILOUT("Lookup slot");
}
}
} else if (prop != NULL) {
ProcessExpression(prop->obj(), Expression::kValue);
CHECK_BAILOUT;
// We will only visit the key during code generation for keyed property
// stores. Leave its expression context uninitialized for named
// property stores.
if (!prop->key()->IsPropertyName()) {
ProcessExpression(prop->key(), Expression::kValue);
CHECK_BAILOUT;
}
} else {
// This is a throw reference error.
BAILOUT("non-variable/non-property assignment");
}
ProcessExpression(expr->value(), Expression::kValue);
}
void CodeGenSelector::VisitThrow(Throw* expr) {
ProcessExpression(expr->exception(), Expression::kValue);
}
void CodeGenSelector::VisitProperty(Property* expr) {
ProcessExpression(expr->obj(), Expression::kValue);
CHECK_BAILOUT;
ProcessExpression(expr->key(), Expression::kValue);
}
void CodeGenSelector::VisitCall(Call* expr) {
Expression* fun = expr->expression();
ZoneList<Expression*>* args = expr->arguments();
Variable* var = fun->AsVariableProxy()->AsVariable();
// Check for supported calls
if (var != NULL && var->is_possibly_eval()) {
BAILOUT("call to the identifier 'eval'");
} else if (var != NULL && !var->is_this() && var->is_global()) {
// Calls to global variables are supported.
} else if (var != NULL && var->slot() != NULL &&
var->slot()->type() == Slot::LOOKUP) {
BAILOUT("call to a lookup slot");
} else if (fun->AsProperty() != NULL) {
Property* prop = fun->AsProperty();
Literal* literal_key = prop->key()->AsLiteral();
if (literal_key != NULL && literal_key->handle()->IsSymbol()) {
ProcessExpression(prop->obj(), Expression::kValue);
CHECK_BAILOUT;
} else {
ProcessExpression(prop->obj(), Expression::kValue);
CHECK_BAILOUT;
ProcessExpression(prop->key(), Expression::kValue);
CHECK_BAILOUT;
}
} else {
// Otherwise the call is supported if the function expression is.
ProcessExpression(fun, Expression::kValue);
}
// Check all arguments to the call.
for (int i = 0; i < args->length(); i++) {
ProcessExpression(args->at(i), Expression::kValue);
CHECK_BAILOUT;
}
}
void CodeGenSelector::VisitCallNew(CallNew* expr) {
ProcessExpression(expr->expression(), Expression::kValue);
CHECK_BAILOUT;
ZoneList<Expression*>* args = expr->arguments();
// Check all arguments to the call
for (int i = 0; i < args->length(); i++) {
ProcessExpression(args->at(i), Expression::kValue);
CHECK_BAILOUT;
}
}
void CodeGenSelector::VisitCallRuntime(CallRuntime* expr) {
// Check for inline runtime call
if (expr->name()->Get(0) == '_' &&
CodeGenerator::FindInlineRuntimeLUT(expr->name()) != NULL) {
BAILOUT("inlined runtime call");
}
// Check all arguments to the call. (Relies on TEMP meaning STACK.)
for (int i = 0; i < expr->arguments()->length(); i++) {
ProcessExpression(expr->arguments()->at(i), Expression::kValue);
CHECK_BAILOUT;
}
} }
void CodeGenSelector::VisitUnaryOperation(UnaryOperation* expr) {
switch (expr->op()) {
case Token::VOID:
ProcessExpression(expr->expression(), Expression::kEffect);
break;
case Token::NOT:
ProcessExpression(expr->expression(), Expression::kTest);
break;
case Token::TYPEOF:
ProcessExpression(expr->expression(), Expression::kValue);
break;
default:
BAILOUT("UnaryOperation");
}
}
void CodeGenSelector::VisitCountOperation(CountOperation* expr) {
Variable* var = expr->expression()->AsVariableProxy()->AsVariable();
Property* prop = expr->expression()->AsProperty();
ASSERT(var == NULL || prop == NULL);
if (var != NULL) {
// All global variables are supported.
if (!var->is_global()) {
ASSERT(var->slot() != NULL);
Slot::Type type = var->slot()->type();
if (type == Slot::LOOKUP) {
BAILOUT("CountOperation with lookup slot");
}
}
} else if (prop != NULL) {
ProcessExpression(prop->obj(), Expression::kValue);
CHECK_BAILOUT;
// We will only visit the key during code generation for keyed property
// stores. Leave its expression context uninitialized for named
// property stores.
if (!prop->key()->IsPropertyName()) {
ProcessExpression(prop->key(), Expression::kValue);
CHECK_BAILOUT;
}
} else {
// This is a throw reference error.
BAILOUT("CountOperation non-variable/non-property expression");
}
}
void CodeGenSelector::VisitBinaryOperation(BinaryOperation* expr) {
switch (expr->op()) {
case Token::COMMA:
ProcessExpression(expr->left(), Expression::kEffect);
CHECK_BAILOUT;
ProcessExpression(expr->right(), context_);
break;
case Token::OR:
switch (context_) {
case Expression::kUninitialized:
UNREACHABLE();
case Expression::kEffect: // Fall through.
case Expression::kTest: // Fall through.
case Expression::kTestValue:
// The left subexpression's value is not needed, it is in a pure
// test context.
ProcessExpression(expr->left(), Expression::kTest);
break;
case Expression::kValue: // Fall through.
case Expression::kValueTest:
// The left subexpression's value is needed, it is in a hybrid
// value/test context.
ProcessExpression(expr->left(), Expression::kValueTest);
break;
}
CHECK_BAILOUT;
ProcessExpression(expr->right(), context_);
break;
case Token::AND:
switch (context_) {
case Expression::kUninitialized:
UNREACHABLE();
case Expression::kEffect: // Fall through.
case Expression::kTest: // Fall through.
case Expression::kValueTest:
// The left subexpression's value is not needed, it is in a pure
// test context.
ProcessExpression(expr->left(), Expression::kTest);
break;
case Expression::kValue: // Fall through.
case Expression::kTestValue:
// The left subexpression's value is needed, it is in a hybrid
// test/value context.
ProcessExpression(expr->left(), Expression::kTestValue);
break;
}
CHECK_BAILOUT;
ProcessExpression(expr->right(), context_);
break;
case Token::ADD:
case Token::SUB:
case Token::DIV:
case Token::MOD:
case Token::MUL:
case Token::BIT_OR:
case Token::BIT_AND:
case Token::BIT_XOR:
case Token::SHL:
case Token::SHR:
case Token::SAR:
ProcessExpression(expr->left(), Expression::kValue);
CHECK_BAILOUT;
ProcessExpression(expr->right(), Expression::kValue);
break;
default:
BAILOUT("Unsupported binary operation");
}
}
void CodeGenSelector::VisitCompareOperation(CompareOperation* expr) {
ProcessExpression(expr->left(), Expression::kValue);
CHECK_BAILOUT;
ProcessExpression(expr->right(), Expression::kValue);
}
void CodeGenSelector::VisitThisFunction(ThisFunction* expr) {
// ThisFunction is supported.
}
#undef BAILOUT
#undef CHECK_BAILOUT
} } // namespace v8::internal } } // namespace v8::internal

37
deps/v8/src/compiler.h

@ -35,6 +35,41 @@
namespace v8 { namespace v8 {
namespace internal { namespace internal {
// CompilationInfo encapsulates some information known at compile time.
class CompilationInfo BASE_EMBEDDED {
public:
CompilationInfo(Handle<SharedFunctionInfo> shared_info,
Handle<Object> receiver,
int loop_nesting)
: shared_info_(shared_info),
receiver_(receiver),
loop_nesting_(loop_nesting),
has_this_properties_(false),
has_globals_(false) {
}
Handle<SharedFunctionInfo> shared_info() { return shared_info_; }
bool has_receiver() { return !receiver_.is_null(); }
Handle<Object> receiver() { return receiver_; }
int loop_nesting() { return loop_nesting_; }
bool has_this_properties() { return has_this_properties_; }
void set_has_this_properties(bool flag) { has_this_properties_ = flag; }
bool has_globals() { return has_globals_; }
void set_has_globals(bool flag) { has_globals_ = flag; }
private:
Handle<SharedFunctionInfo> shared_info_;
Handle<Object> receiver_;
int loop_nesting_;
bool has_this_properties_;
bool has_globals_;
};
// The V8 compiler // The V8 compiler
// //
// General strategy: Source code is translated into an anonymous function w/o // General strategy: Source code is translated into an anonymous function w/o
@ -70,7 +105,7 @@ class Compiler : public AllStatic {
// Compile from function info (used for lazy compilation). Returns // Compile from function info (used for lazy compilation). Returns
// true on success and false if the compilation resulted in a stack // true on success and false if the compilation resulted in a stack
// overflow. // overflow.
static bool CompileLazy(Handle<SharedFunctionInfo> shared, int loop_nesting); static bool CompileLazy(CompilationInfo* info);
// Compile a function boilerplate object (the function is possibly // Compile a function boilerplate object (the function is possibly
// lazily compiled). Called recursively from a backend code // lazily compiled). Called recursively from a backend code

267
deps/v8/src/data-flow.cc

@ -0,0 +1,267 @@
// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "data-flow.h"
namespace v8 {
namespace internal {
void AstLabeler::Label(FunctionLiteral* fun) {
VisitStatements(fun->body());
}
void AstLabeler::VisitStatements(ZoneList<Statement*>* stmts) {
for (int i = 0, len = stmts->length(); i < len; i++) {
Visit(stmts->at(i));
}
}
void AstLabeler::VisitDeclarations(ZoneList<Declaration*>* decls) {
UNREACHABLE();
}
void AstLabeler::VisitBlock(Block* stmt) {
VisitStatements(stmt->statements());
}
void AstLabeler::VisitExpressionStatement(
ExpressionStatement* stmt) {
Visit(stmt->expression());
}
void AstLabeler::VisitEmptyStatement(EmptyStatement* stmt) {
// Do nothing.
}
void AstLabeler::VisitIfStatement(IfStatement* stmt) {
UNREACHABLE();
}
void AstLabeler::VisitContinueStatement(ContinueStatement* stmt) {
UNREACHABLE();
}
void AstLabeler::VisitBreakStatement(BreakStatement* stmt) {
UNREACHABLE();
}
void AstLabeler::VisitReturnStatement(ReturnStatement* stmt) {
UNREACHABLE();
}
void AstLabeler::VisitWithEnterStatement(
WithEnterStatement* stmt) {
UNREACHABLE();
}
void AstLabeler::VisitWithExitStatement(WithExitStatement* stmt) {
UNREACHABLE();
}
void AstLabeler::VisitSwitchStatement(SwitchStatement* stmt) {
UNREACHABLE();
}
void AstLabeler::VisitDoWhileStatement(DoWhileStatement* stmt) {
UNREACHABLE();
}
void AstLabeler::VisitWhileStatement(WhileStatement* stmt) {
UNREACHABLE();
}
void AstLabeler::VisitForStatement(ForStatement* stmt) {
UNREACHABLE();
}
void AstLabeler::VisitForInStatement(ForInStatement* stmt) {
UNREACHABLE();
}
void AstLabeler::VisitTryCatchStatement(TryCatchStatement* stmt) {
UNREACHABLE();
}
void AstLabeler::VisitTryFinallyStatement(
TryFinallyStatement* stmt) {
UNREACHABLE();
}
void AstLabeler::VisitDebuggerStatement(
DebuggerStatement* stmt) {
UNREACHABLE();
}
void AstLabeler::VisitFunctionLiteral(FunctionLiteral* expr) {
UNREACHABLE();
}
void AstLabeler::VisitFunctionBoilerplateLiteral(
FunctionBoilerplateLiteral* expr) {
UNREACHABLE();
}
void AstLabeler::VisitConditional(Conditional* expr) {
UNREACHABLE();
}
void AstLabeler::VisitSlot(Slot* expr) {
UNREACHABLE();
}
void AstLabeler::VisitVariableProxy(VariableProxy* expr) {
expr->set_num(next_number_++);
}
void AstLabeler::VisitLiteral(Literal* expr) {
UNREACHABLE();
}
void AstLabeler::VisitRegExpLiteral(RegExpLiteral* expr) {
UNREACHABLE();
}
void AstLabeler::VisitObjectLiteral(ObjectLiteral* expr) {
UNREACHABLE();
}
void AstLabeler::VisitArrayLiteral(ArrayLiteral* expr) {
UNREACHABLE();
}
void AstLabeler::VisitCatchExtensionObject(
CatchExtensionObject* expr) {
UNREACHABLE();
}
void AstLabeler::VisitAssignment(Assignment* expr) {
Property* prop = expr->target()->AsProperty();
ASSERT(prop != NULL);
if (prop != NULL) {
ASSERT(prop->key()->IsPropertyName());
VariableProxy* proxy = prop->obj()->AsVariableProxy();
if (proxy != NULL && proxy->var()->is_this()) {
has_this_properties_ = true;
} else {
Visit(prop->obj());
}
}
Visit(expr->value());
expr->set_num(next_number_++);
}
void AstLabeler::VisitThrow(Throw* expr) {
UNREACHABLE();
}
void AstLabeler::VisitProperty(Property* expr) {
UNREACHABLE();
}
void AstLabeler::VisitCall(Call* expr) {
UNREACHABLE();
}
void AstLabeler::VisitCallNew(CallNew* expr) {
UNREACHABLE();
}
void AstLabeler::VisitCallRuntime(CallRuntime* expr) {
UNREACHABLE();
}
void AstLabeler::VisitUnaryOperation(UnaryOperation* expr) {
UNREACHABLE();
}
void AstLabeler::VisitCountOperation(CountOperation* expr) {
UNREACHABLE();
}
void AstLabeler::VisitBinaryOperation(BinaryOperation* expr) {
Visit(expr->left());
Visit(expr->right());
expr->set_num(next_number_++);
}
void AstLabeler::VisitCompareOperation(CompareOperation* expr) {
UNREACHABLE();
}
void AstLabeler::VisitThisFunction(ThisFunction* expr) {
UNREACHABLE();
}
void AstLabeler::VisitDeclaration(Declaration* decl) {
UNREACHABLE();
}
} } // namespace v8::internal

67
deps/v8/src/data-flow.h

@ -0,0 +1,67 @@
// Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef V8_DATAFLOW_H_
#define V8_DATAFLOW_H_
#include "ast.h"
#include "scopes.h"
namespace v8 {
namespace internal {
// This class is used to number all expressions in the AST according to
// their evaluation order (post-order left-to-right traversal).
class AstLabeler: public AstVisitor {
public:
AstLabeler() : next_number_(0), has_this_properties_(false) {}
void Label(FunctionLiteral* fun);
bool has_this_properties() { return has_this_properties_; }
private:
void VisitDeclarations(ZoneList<Declaration*>* decls);
void VisitStatements(ZoneList<Statement*>* stmts);
// AST node visit functions.
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
// Traversal number for labelling AST nodes.
int next_number_;
bool has_this_properties_;
DISALLOW_COPY_AND_ASSIGN(AstLabeler);
};
} } // namespace v8::internal
#endif // V8_DATAFLOW_H_

32
deps/v8/src/dateparser.cc

@ -72,15 +72,9 @@ bool DateParser::DayComposer::Write(FixedArray* output) {
if (!Smi::IsValid(year) || !IsMonth(month) || !IsDay(day)) return false; if (!Smi::IsValid(year) || !IsMonth(month) || !IsDay(day)) return false;
output->set(YEAR, output->set(YEAR, Smi::FromInt(year));
Smi::FromInt(year), output->set(MONTH, Smi::FromInt(month - 1)); // 0-based
SKIP_WRITE_BARRIER); output->set(DAY, Smi::FromInt(day));
output->set(MONTH,
Smi::FromInt(month - 1),
SKIP_WRITE_BARRIER); // 0-based
output->set(DAY,
Smi::FromInt(day),
SKIP_WRITE_BARRIER);
return true; return true;
} }
@ -103,15 +97,9 @@ bool DateParser::TimeComposer::Write(FixedArray* output) {
if (!IsHour(hour) || !IsMinute(minute) || !IsSecond(second)) return false; if (!IsHour(hour) || !IsMinute(minute) || !IsSecond(second)) return false;
output->set(HOUR, output->set(HOUR, Smi::FromInt(hour));
Smi::FromInt(hour), output->set(MINUTE, Smi::FromInt(minute));
SKIP_WRITE_BARRIER); output->set(SECOND, Smi::FromInt(second));
output->set(MINUTE,
Smi::FromInt(minute),
SKIP_WRITE_BARRIER);
output->set(SECOND,
Smi::FromInt(second),
SKIP_WRITE_BARRIER);
return true; return true;
} }
@ -121,13 +109,9 @@ bool DateParser::TimeZoneComposer::Write(FixedArray* output) {
if (minute_ == kNone) minute_ = 0; if (minute_ == kNone) minute_ = 0;
int total_seconds = sign_ * (hour_ * 3600 + minute_ * 60); int total_seconds = sign_ * (hour_ * 3600 + minute_ * 60);
if (!Smi::IsValid(total_seconds)) return false; if (!Smi::IsValid(total_seconds)) return false;
output->set(UTC_OFFSET, output->set(UTC_OFFSET, Smi::FromInt(total_seconds));
Smi::FromInt(total_seconds),
SKIP_WRITE_BARRIER);
} else { } else {
output->set(UTC_OFFSET, output->set_null(UTC_OFFSET);
Heap::null_value(),
SKIP_WRITE_BARRIER);
} }
return true; return true;
} }

4
deps/v8/src/debug-agent.cc

@ -54,10 +54,12 @@ void DebuggerAgent::Run() {
while (!bound && !terminate_) { while (!bound && !terminate_) {
bound = server_->Bind(port_); bound = server_->Bind(port_);
// If an error occoured wait a bit before retrying. The most common error // If an error occurred wait a bit before retrying. The most common error
// would be that the port is already in use so this avoids a busy loop and // would be that the port is already in use so this avoids a busy loop and
// make the agent take over the port when it becomes free. // make the agent take over the port when it becomes free.
if (!bound) { if (!bound) {
PrintF("Failed to open socket on port %d, "
"waiting %d ms before retrying\n", port_, kOneSecondInMicros / 1000);
terminate_now_->Wait(kOneSecondInMicros); terminate_now_->Wait(kOneSecondInMicros);
} }
} }

2
deps/v8/src/debug-delay.js

@ -1704,7 +1704,7 @@ DebugCommandProcessor.prototype.evaluateRequest_ = function(request, response) {
if (global) { if (global) {
// Evaluate in the global context. // Evaluate in the global context.
response.body = response.body =
this.exec_state_.evaluateGlobal(expression), Boolean(disable_break); this.exec_state_.evaluateGlobal(expression, Boolean(disable_break));
return; return;
} }

110
deps/v8/src/debug.cc

@ -75,9 +75,6 @@ BreakLocationIterator::BreakLocationIterator(Handle<DebugInfo> debug_info,
BreakLocatorType type) { BreakLocatorType type) {
debug_info_ = debug_info; debug_info_ = debug_info;
type_ = type; type_ = type;
// Get the stub early to avoid possible GC during iterations. We may need
// this stub to detect debugger calls generated from debugger statements.
debug_break_stub_ = RuntimeStub(Runtime::kDebugBreak, 0).GetCode();
reloc_iterator_ = NULL; reloc_iterator_ = NULL;
reloc_iterator_original_ = NULL; reloc_iterator_original_ = NULL;
Reset(); // Initialize the rest of the member variables. Reset(); // Initialize the rest of the member variables.
@ -461,9 +458,7 @@ bool BreakLocationIterator::IsDebuggerStatement() {
Code* code = Code::GetCodeFromTargetAddress(target); Code* code = Code::GetCodeFromTargetAddress(target);
if (code->kind() == Code::STUB) { if (code->kind() == Code::STUB) {
CodeStub::Major major_key = code->major_key(); CodeStub::Major major_key = code->major_key();
if (major_key == CodeStub::Runtime) { return (major_key == CodeStub::DebuggerStatement);
return (*debug_break_stub_ == code);
}
} }
} }
return false; return false;
@ -1241,12 +1236,14 @@ void Debug::PrepareStep(StepAction step_action, int step_count) {
uint32_t key = Smi::cast(*obj)->value(); uint32_t key = Smi::cast(*obj)->value();
// Argc in the stub is the number of arguments passed - not the // Argc in the stub is the number of arguments passed - not the
// expected arguments of the called function. // expected arguments of the called function.
int call_function_arg_count = CodeStub::MinorKeyFromKey(key); int call_function_arg_count =
CallFunctionStub::ExtractArgcFromMinorKey(
CodeStub::MinorKeyFromKey(key));
ASSERT(call_function_stub->major_key() == ASSERT(call_function_stub->major_key() ==
CodeStub::MajorKeyFromKey(key)); CodeStub::MajorKeyFromKey(key));
// Find target function on the expression stack. // Find target function on the expression stack.
// Expression stack lools like this (top to bottom): // Expression stack looks like this (top to bottom):
// argN // argN
// ... // ...
// arg0 // arg0
@ -1524,19 +1521,13 @@ void Debug::ClearStepNext() {
} }
bool Debug::EnsureCompiled(Handle<SharedFunctionInfo> shared) {
if (shared->is_compiled()) return true;
return CompileLazyShared(shared, CLEAR_EXCEPTION, 0);
}
// Ensures the debug information is present for shared. // Ensures the debug information is present for shared.
bool Debug::EnsureDebugInfo(Handle<SharedFunctionInfo> shared) { bool Debug::EnsureDebugInfo(Handle<SharedFunctionInfo> shared) {
// Return if we already have the debug info for shared. // Return if we already have the debug info for shared.
if (HasDebugInfo(shared)) return true; if (HasDebugInfo(shared)) return true;
// Ensure shared in compiled. Return false if this failed. // Ensure shared in compiled. Return false if this failed.
if (!EnsureCompiled(shared)) return false; if (!EnsureCompiled(shared, CLEAR_EXCEPTION)) return false;
// Create the debug info object. // Create the debug info object.
Handle<DebugInfo> debug_info = Factory::NewDebugInfo(shared); Handle<DebugInfo> debug_info = Factory::NewDebugInfo(shared);
@ -1693,9 +1684,7 @@ void Debug::CreateScriptCache() {
// Scan heap for Script objects. // Scan heap for Script objects.
int count = 0; int count = 0;
HeapIterator iterator; HeapIterator iterator;
while (iterator.has_next()) { for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
HeapObject* obj = iterator.next();
ASSERT(obj != NULL);
if (obj->IsScript() && Script::cast(obj)->HasValidSource()) { if (obj->IsScript() && Script::cast(obj)->HasValidSource()) {
script_cache_->Add(Handle<Script>(Script::cast(obj))); script_cache_->Add(Handle<Script>(Script::cast(obj)));
count++; count++;
@ -1759,8 +1748,10 @@ bool Debugger::never_unload_debugger_ = false;
v8::Debug::MessageHandler2 Debugger::message_handler_ = NULL; v8::Debug::MessageHandler2 Debugger::message_handler_ = NULL;
bool Debugger::debugger_unload_pending_ = false; bool Debugger::debugger_unload_pending_ = false;
v8::Debug::HostDispatchHandler Debugger::host_dispatch_handler_ = NULL; v8::Debug::HostDispatchHandler Debugger::host_dispatch_handler_ = NULL;
Mutex* Debugger::dispatch_handler_access_ = OS::CreateMutex();
v8::Debug::DebugMessageDispatchHandler v8::Debug::DebugMessageDispatchHandler
Debugger::debug_message_dispatch_handler_ = NULL; Debugger::debug_message_dispatch_handler_ = NULL;
MessageDispatchHelperThread* Debugger::message_dispatch_helper_thread_ = NULL;
int Debugger::host_dispatch_micros_ = 100 * 1000; int Debugger::host_dispatch_micros_ = 100 * 1000;
DebuggerAgent* Debugger::agent_ = NULL; DebuggerAgent* Debugger::agent_ = NULL;
LockingCommandMessageQueue Debugger::command_queue_(kQueueInitialSize); LockingCommandMessageQueue Debugger::command_queue_(kQueueInitialSize);
@ -2379,17 +2370,12 @@ void Debugger::ListenersChanged() {
if (IsDebuggerActive()) { if (IsDebuggerActive()) {
// Disable the compilation cache when the debugger is active. // Disable the compilation cache when the debugger is active.
CompilationCache::Disable(); CompilationCache::Disable();
debugger_unload_pending_ = false;
} else { } else {
CompilationCache::Enable(); CompilationCache::Enable();
// Unload the debugger if event listener and message handler cleared. // Unload the debugger if event listener and message handler cleared.
if (Debug::InDebugger()) { // Schedule this for later, because we may be in non-V8 thread.
// If we are in debugger set the flag to unload the debugger when last debugger_unload_pending_ = true;
// EnterDebugger on the current stack is destroyed.
debugger_unload_pending_ = true;
} else {
UnloadDebugger();
}
} }
} }
@ -2402,8 +2388,14 @@ void Debugger::SetHostDispatchHandler(v8::Debug::HostDispatchHandler handler,
void Debugger::SetDebugMessageDispatchHandler( void Debugger::SetDebugMessageDispatchHandler(
v8::Debug::DebugMessageDispatchHandler handler) { v8::Debug::DebugMessageDispatchHandler handler, bool provide_locker) {
ScopedLock with(dispatch_handler_access_);
debug_message_dispatch_handler_ = handler; debug_message_dispatch_handler_ = handler;
if (provide_locker && message_dispatch_helper_thread_ == NULL) {
message_dispatch_helper_thread_ = new MessageDispatchHelperThread;
message_dispatch_helper_thread_->Start();
}
} }
@ -2438,8 +2430,16 @@ void Debugger::ProcessCommand(Vector<const uint16_t> command,
StackGuard::DebugCommand(); StackGuard::DebugCommand();
} }
if (Debugger::debug_message_dispatch_handler_ != NULL) { MessageDispatchHelperThread* dispatch_thread;
Debugger::debug_message_dispatch_handler_(); {
ScopedLock with(dispatch_handler_access_);
dispatch_thread = message_dispatch_helper_thread_;
}
if (dispatch_thread == NULL) {
CallMessageDispatchHandler();
} else {
dispatch_thread->Schedule();
} }
} }
@ -2526,6 +2526,19 @@ void Debugger::WaitForAgent() {
agent_->WaitUntilListening(); agent_->WaitUntilListening();
} }
void Debugger::CallMessageDispatchHandler() {
v8::Debug::DebugMessageDispatchHandler handler;
{
ScopedLock with(dispatch_handler_access_);
handler = Debugger::debug_message_dispatch_handler_;
}
if (handler != NULL) {
handler();
}
}
MessageImpl MessageImpl::NewEvent(DebugEvent event, MessageImpl MessageImpl::NewEvent(DebugEvent event,
bool running, bool running,
Handle<JSObject> exec_state, Handle<JSObject> exec_state,
@ -2746,6 +2759,45 @@ void LockingCommandMessageQueue::Clear() {
queue_.Clear(); queue_.Clear();
} }
MessageDispatchHelperThread::MessageDispatchHelperThread()
: sem_(OS::CreateSemaphore(0)), mutex_(OS::CreateMutex()),
already_signalled_(false) {
}
MessageDispatchHelperThread::~MessageDispatchHelperThread() {
delete mutex_;
delete sem_;
}
void MessageDispatchHelperThread::Schedule() {
{
ScopedLock lock(mutex_);
if (already_signalled_) {
return;
}
already_signalled_ = true;
}
sem_->Signal();
}
void MessageDispatchHelperThread::Run() {
while (true) {
sem_->Wait();
{
ScopedLock lock(mutex_);
already_signalled_ = false;
}
{
Locker locker;
Debugger::CallMessageDispatchHandler();
}
}
}
#endif // ENABLE_DEBUGGER_SUPPORT #endif // ENABLE_DEBUGGER_SUPPORT
} } // namespace v8::internal } } // namespace v8::internal

37
deps/v8/src/debug.h

@ -132,7 +132,6 @@ class BreakLocationIterator {
int position_; int position_;
int statement_position_; int statement_position_;
Handle<DebugInfo> debug_info_; Handle<DebugInfo> debug_info_;
Handle<Code> debug_break_stub_;
RelocIterator* reloc_iterator_; RelocIterator* reloc_iterator_;
RelocIterator* reloc_iterator_original_; RelocIterator* reloc_iterator_original_;
@ -391,7 +390,6 @@ class Debug {
static void ClearStepOut(); static void ClearStepOut();
static void ClearStepNext(); static void ClearStepNext();
// Returns whether the compile succeeded. // Returns whether the compile succeeded.
static bool EnsureCompiled(Handle<SharedFunctionInfo> shared);
static void RemoveDebugInfo(Handle<DebugInfo> debug_info); static void RemoveDebugInfo(Handle<DebugInfo> debug_info);
static void SetAfterBreakTarget(JavaScriptFrame* frame); static void SetAfterBreakTarget(JavaScriptFrame* frame);
static Handle<Object> CheckBreakPoints(Handle<Object> break_point); static Handle<Object> CheckBreakPoints(Handle<Object> break_point);
@ -559,6 +557,9 @@ class CommandMessageQueue BASE_EMBEDDED {
}; };
class MessageDispatchHelperThread;
// LockingCommandMessageQueue is a thread-safe circular buffer of CommandMessage // LockingCommandMessageQueue is a thread-safe circular buffer of CommandMessage
// messages. The message data is not managed by LockingCommandMessageQueue. // messages. The message data is not managed by LockingCommandMessageQueue.
// Pointers to the data are passed in and out. Implemented by adding a // Pointers to the data are passed in and out. Implemented by adding a
@ -619,7 +620,8 @@ class Debugger {
static void SetHostDispatchHandler(v8::Debug::HostDispatchHandler handler, static void SetHostDispatchHandler(v8::Debug::HostDispatchHandler handler,
int period); int period);
static void SetDebugMessageDispatchHandler( static void SetDebugMessageDispatchHandler(
v8::Debug::DebugMessageDispatchHandler handler); v8::Debug::DebugMessageDispatchHandler handler,
bool provide_locker);
// Invoke the message handler function. // Invoke the message handler function.
static void InvokeMessageHandler(MessageImpl message); static void InvokeMessageHandler(MessageImpl message);
@ -645,6 +647,8 @@ class Debugger {
// Blocks until the agent has started listening for connections // Blocks until the agent has started listening for connections
static void WaitForAgent(); static void WaitForAgent();
static void CallMessageDispatchHandler();
// Unload the debugger if possible. Only called when no debugger is currently // Unload the debugger if possible. Only called when no debugger is currently
// active. // active.
static void UnloadDebugger(); static void UnloadDebugger();
@ -654,7 +658,9 @@ class Debugger {
// Check whether the message handler was been cleared. // Check whether the message handler was been cleared.
if (debugger_unload_pending_) { if (debugger_unload_pending_) {
UnloadDebugger(); if (Debug::debugger_entry() == NULL) {
UnloadDebugger();
}
} }
// Currently argument event is not used. // Currently argument event is not used.
@ -681,7 +687,9 @@ class Debugger {
static v8::Debug::MessageHandler2 message_handler_; static v8::Debug::MessageHandler2 message_handler_;
static bool debugger_unload_pending_; // Was message handler cleared? static bool debugger_unload_pending_; // Was message handler cleared?
static v8::Debug::HostDispatchHandler host_dispatch_handler_; static v8::Debug::HostDispatchHandler host_dispatch_handler_;
static Mutex* dispatch_handler_access_; // Mutex guarding dispatch handler.
static v8::Debug::DebugMessageDispatchHandler debug_message_dispatch_handler_; static v8::Debug::DebugMessageDispatchHandler debug_message_dispatch_handler_;
static MessageDispatchHelperThread* message_dispatch_helper_thread_;
static int host_dispatch_micros_; static int host_dispatch_micros_;
static DebuggerAgent* agent_; static DebuggerAgent* agent_;
@ -858,6 +866,27 @@ class Debug_Address {
int reg_; int reg_;
}; };
// The optional thread that Debug Agent may use to temporary call V8 to process
// pending debug requests if debuggee is not running V8 at the moment.
// Techincally it does not call V8 itself, rather it asks embedding program
// to do this via v8::Debug::HostDispatchHandler
class MessageDispatchHelperThread: public Thread {
public:
MessageDispatchHelperThread();
~MessageDispatchHelperThread();
void Schedule();
private:
void Run();
Semaphore* const sem_;
Mutex* const mutex_;
bool already_signalled_;
DISALLOW_COPY_AND_ASSIGN(MessageDispatchHelperThread);
};
} } // namespace v8::internal } } // namespace v8::internal

8
deps/v8/src/disassembler.cc

@ -266,13 +266,7 @@ static int DecodeIt(FILE* f,
case CodeStub::CallFunction: case CodeStub::CallFunction:
out.AddFormatted("argc = %d", minor_key); out.AddFormatted("argc = %d", minor_key);
break; break;
case CodeStub::Runtime: { default:
const char* name =
RuntimeStub::GetNameFromMinorKey(minor_key);
out.AddFormatted("%s", name);
break;
}
default:
out.AddFormatted("minor: %d", minor_key); out.AddFormatted("minor: %d", minor_key);
} }
} }

18
deps/v8/src/execution.cc

@ -638,24 +638,32 @@ Object* Execution::DebugBreakHelper() {
bool debug_command_only = bool debug_command_only =
StackGuard::IsDebugCommand() && !StackGuard::IsDebugBreak(); StackGuard::IsDebugCommand() && !StackGuard::IsDebugBreak();
// Clear the debug request flags. // Clear the debug break request flag.
StackGuard::Continue(DEBUGBREAK); StackGuard::Continue(DEBUGBREAK);
ProcessDebugMesssages(debug_command_only);
// Return to continue execution.
return Heap::undefined_value();
}
void Execution::ProcessDebugMesssages(bool debug_command_only) {
// Clear the debug command request flag.
StackGuard::Continue(DEBUGCOMMAND); StackGuard::Continue(DEBUGCOMMAND);
HandleScope scope; HandleScope scope;
// Enter the debugger. Just continue if we fail to enter the debugger. // Enter the debugger. Just continue if we fail to enter the debugger.
EnterDebugger debugger; EnterDebugger debugger;
if (debugger.FailedToEnter()) { if (debugger.FailedToEnter()) {
return Heap::undefined_value(); return;
} }
// Notify the debug event listeners. Indicate auto continue if the break was // Notify the debug event listeners. Indicate auto continue if the break was
// a debug command break. // a debug command break.
Debugger::OnDebugBreak(Factory::undefined_value(), debug_command_only); Debugger::OnDebugBreak(Factory::undefined_value(), debug_command_only);
// Return to continue execution.
return Heap::undefined_value();
} }
#endif #endif
Object* Execution::HandleStackGuardInterrupt() { Object* Execution::HandleStackGuardInterrupt() {

1
deps/v8/src/execution.h

@ -122,6 +122,7 @@ class Execution : public AllStatic {
Handle<Object> is_global); Handle<Object> is_global);
#ifdef ENABLE_DEBUGGER_SUPPORT #ifdef ENABLE_DEBUGGER_SUPPORT
static Object* DebugBreakHelper(); static Object* DebugBreakHelper();
static void ProcessDebugMesssages(bool debug_command_only);
#endif #endif
// If the stack guard is triggered, but it is not an actual // If the stack guard is triggered, but it is not an actual

8
deps/v8/src/factory.cc

@ -718,6 +718,11 @@ Handle<JSFunction> Factory::NewFunction(Handle<String> name,
} }
Handle<Object> Factory::ToObject(Handle<Object> object) {
CALL_HEAP_FUNCTION(object->ToObject(), Object);
}
Handle<Object> Factory::ToObject(Handle<Object> object, Handle<Object> Factory::ToObject(Handle<Object> object,
Handle<Context> global_context) { Handle<Context> global_context) {
CALL_HEAP_FUNCTION(object->ToObject(*global_context), Object); CALL_HEAP_FUNCTION(object->ToObject(*global_context), Object);
@ -766,6 +771,8 @@ Handle<JSObject> Factory::NewArgumentsObject(Handle<Object> callee,
Handle<JSFunction> Factory::CreateApiFunction( Handle<JSFunction> Factory::CreateApiFunction(
Handle<FunctionTemplateInfo> obj, ApiInstanceType instance_type) { Handle<FunctionTemplateInfo> obj, ApiInstanceType instance_type) {
Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::HandleApiCall)); Handle<Code> code = Handle<Code>(Builtins::builtin(Builtins::HandleApiCall));
Handle<Code> construct_stub =
Handle<Code>(Builtins::builtin(Builtins::JSConstructStubApi));
int internal_field_count = 0; int internal_field_count = 0;
if (!obj->instance_template()->IsUndefined()) { if (!obj->instance_template()->IsUndefined()) {
@ -840,6 +847,7 @@ Handle<JSFunction> Factory::CreateApiFunction(
} }
result->shared()->set_function_data(*obj); result->shared()->set_function_data(*obj);
result->shared()->set_construct_stub(*construct_stub);
result->shared()->DontAdaptArguments(); result->shared()->DontAdaptArguments();
// Recursively copy parent templates' accessors, 'data' may be modified. // Recursively copy parent templates' accessors, 'data' may be modified.

1
deps/v8/src/factory.h

@ -229,6 +229,7 @@ class Factory : public AllStatic {
static Handle<Code> CopyCode(Handle<Code> code); static Handle<Code> CopyCode(Handle<Code> code);
static Handle<Object> ToObject(Handle<Object> object);
static Handle<Object> ToObject(Handle<Object> object, static Handle<Object> ToObject(Handle<Object> object,
Handle<Context> global_context); Handle<Context> global_context);

947
deps/v8/src/fast-codegen.cc

File diff suppressed because it is too large

338
deps/v8/src/fast-codegen.h

@ -1,4 +1,4 @@
// Copyright 2009 the V8 project authors. All rights reserved. // Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -31,316 +31,96 @@
#include "v8.h" #include "v8.h"
#include "ast.h" #include "ast.h"
#include "compiler.h"
namespace v8 { namespace v8 {
namespace internal { namespace internal {
// ----------------------------------------------------------------------------- class FastCodeGenSyntaxChecker: public AstVisitor {
// Fast code generator.
class FastCodeGenerator: public AstVisitor {
public: public:
FastCodeGenerator(MacroAssembler* masm, Handle<Script> script, bool is_eval) explicit FastCodeGenSyntaxChecker()
: masm_(masm), : info_(NULL), has_supported_syntax_(true) {
function_(NULL),
script_(script),
is_eval_(is_eval),
nesting_stack_(NULL),
loop_depth_(0),
true_label_(NULL),
false_label_(NULL) {
} }
static Handle<Code> MakeCode(FunctionLiteral* fun, void Check(FunctionLiteral* fun, CompilationInfo* info);
Handle<Script> script,
bool is_eval);
void Generate(FunctionLiteral* fun); CompilationInfo* info() { return info_; }
bool has_supported_syntax() { return has_supported_syntax_; }
private: private:
class Breakable; void VisitDeclarations(ZoneList<Declaration*>* decls);
class Iteration; void VisitStatements(ZoneList<Statement*>* stmts);
class TryCatch;
class TryFinally;
class Finally;
class ForIn;
class NestedStatement BASE_EMBEDDED {
public:
explicit NestedStatement(FastCodeGenerator* codegen) : codegen_(codegen) {
// Link into codegen's nesting stack.
previous_ = codegen->nesting_stack_;
codegen->nesting_stack_ = this;
}
virtual ~NestedStatement() {
// Unlink from codegen's nesting stack.
ASSERT_EQ(this, codegen_->nesting_stack_);
codegen_->nesting_stack_ = previous_;
}
virtual Breakable* AsBreakable() { return NULL; }
virtual Iteration* AsIteration() { return NULL; }
virtual TryCatch* AsTryCatch() { return NULL; }
virtual TryFinally* AsTryFinally() { return NULL; }
virtual Finally* AsFinally() { return NULL; }
virtual ForIn* AsForIn() { return NULL; }
virtual bool IsContinueTarget(Statement* target) { return false; }
virtual bool IsBreakTarget(Statement* target) { return false; }
// Generate code to leave the nested statement. This includes
// cleaning up any stack elements in use and restoring the
// stack to the expectations of the surrounding statements.
// Takes a number of stack elements currently on top of the
// nested statement's stack, and returns a number of stack
// elements left on top of the surrounding statement's stack.
// The generated code must preserve the result register (which
// contains the value in case of a return).
virtual int Exit(int stack_depth) {
// Default implementation for the case where there is
// nothing to clean up.
return stack_depth;
}
NestedStatement* outer() { return previous_; }
protected:
MacroAssembler* masm() { return codegen_->masm(); }
private:
FastCodeGenerator* codegen_;
NestedStatement* previous_;
DISALLOW_COPY_AND_ASSIGN(NestedStatement);
};
class Breakable : public NestedStatement {
public:
Breakable(FastCodeGenerator* codegen,
BreakableStatement* break_target)
: NestedStatement(codegen),
target_(break_target) {}
virtual ~Breakable() {}
virtual Breakable* AsBreakable() { return this; }
virtual bool IsBreakTarget(Statement* statement) {
return target_ == statement;
}
BreakableStatement* statement() { return target_; }
Label* break_target() { return &break_target_label_; }
private:
BreakableStatement* target_;
Label break_target_label_;
DISALLOW_COPY_AND_ASSIGN(Breakable);
};
class Iteration : public Breakable {
public:
Iteration(FastCodeGenerator* codegen,
IterationStatement* iteration_statement)
: Breakable(codegen, iteration_statement) {}
virtual ~Iteration() {}
virtual Iteration* AsIteration() { return this; }
virtual bool IsContinueTarget(Statement* statement) {
return this->statement() == statement;
}
Label* continue_target() { return &continue_target_label_; }
private:
Label continue_target_label_;
DISALLOW_COPY_AND_ASSIGN(Iteration);
};
// The environment inside the try block of a try/catch statement. // AST node visit functions.
class TryCatch : public NestedStatement { #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
public: AST_NODE_LIST(DECLARE_VISIT)
explicit TryCatch(FastCodeGenerator* codegen, Label* catch_entry) #undef DECLARE_VISIT
: NestedStatement(codegen), catch_entry_(catch_entry) { }
virtual ~TryCatch() {}
virtual TryCatch* AsTryCatch() { return this; }
Label* catch_entry() { return catch_entry_; }
virtual int Exit(int stack_depth);
private:
Label* catch_entry_;
DISALLOW_COPY_AND_ASSIGN(TryCatch);
};
// The environment inside the try block of a try/finally statement.
class TryFinally : public NestedStatement {
public:
explicit TryFinally(FastCodeGenerator* codegen, Label* finally_entry)
: NestedStatement(codegen), finally_entry_(finally_entry) { }
virtual ~TryFinally() {}
virtual TryFinally* AsTryFinally() { return this; }
Label* finally_entry() { return finally_entry_; }
virtual int Exit(int stack_depth);
private:
Label* finally_entry_;
DISALLOW_COPY_AND_ASSIGN(TryFinally);
};
// A FinallyEnvironment represents being inside a finally block.
// Abnormal termination of the finally block needs to clean up
// the block's parameters from the stack.
class Finally : public NestedStatement {
public:
explicit Finally(FastCodeGenerator* codegen) : NestedStatement(codegen) { }
virtual ~Finally() {}
virtual Finally* AsFinally() { return this; }
virtual int Exit(int stack_depth) {
return stack_depth + kFinallyStackElementCount;
}
private:
// Number of extra stack slots occupied during a finally block.
static const int kFinallyStackElementCount = 2;
DISALLOW_COPY_AND_ASSIGN(Finally);
};
// A ForInEnvironment represents being inside a for-in loop.
// Abnormal termination of the for-in block needs to clean up
// the block's temporary storage from the stack.
class ForIn : public Iteration {
public:
ForIn(FastCodeGenerator* codegen,
ForInStatement* statement)
: Iteration(codegen, statement) { }
virtual ~ForIn() {}
virtual ForIn* AsForIn() { return this; }
virtual int Exit(int stack_depth) {
return stack_depth + kForInStackElementCount;
}
private:
// TODO(lrn): Check that this value is correct when implementing
// for-in.
static const int kForInStackElementCount = 5;
DISALLOW_COPY_AND_ASSIGN(ForIn);
};
int SlotOffset(Slot* slot);
// Emit code to complete the evaluation of an expression based on its
// expression context and given its value is in a register, non-lookup
// slot, or a literal.
void Apply(Expression::Context context, Register reg);
void Apply(Expression::Context context, Slot* slot, Register scratch);
void Apply(Expression::Context context, Literal* lit);
// Emit code to complete the evaluation of an expression based on its
// expression context and given its value is on top of the stack.
void ApplyTOS(Expression::Context context);
// Emit code to discard count elements from the top of stack, then
// complete the evaluation of an expression based on its expression
// context and given its value is in a register.
void DropAndApply(int count, Expression::Context context, Register reg);
void Move(Slot* dst, Register source, Register scratch1, Register scratch2); CompilationInfo* info_;
void Move(Register dst, Slot* source); bool has_supported_syntax_;
// Return an operand used to read/write to a known (ie, non-LOOKUP) slot. DISALLOW_COPY_AND_ASSIGN(FastCodeGenSyntaxChecker);
// May emit code to traverse the context chain, destroying the scratch };
// register.
MemOperand EmitSlotSearch(Slot* slot, Register scratch);
// Test the JavaScript value in source as if in a test context, compile
// control flow to a pair of labels.
void TestAndBranch(Register source, Label* true_label, Label* false_label);
void VisitForControl(Expression* expr, Label* if_true, Label* if_false) { class FastCodeGenerator: public AstVisitor {
ASSERT(expr->context() == Expression::kTest || public:
expr->context() == Expression::kValueTest || FastCodeGenerator(MacroAssembler* masm, Handle<Script> script, bool is_eval)
expr->context() == Expression::kTestValue); : masm_(masm),
Label* saved_true = true_label_; script_(script),
Label* saved_false = false_label_; is_eval_(is_eval),
true_label_ = if_true; function_(NULL),
false_label_ = if_false; info_(NULL) {
Visit(expr);
true_label_ = saved_true;
false_label_ = saved_false;
} }
void VisitDeclarations(ZoneList<Declaration*>* declarations); static Handle<Code> MakeCode(FunctionLiteral* fun,
void DeclareGlobals(Handle<FixedArray> pairs); Handle<Script> script,
bool is_eval,
// Platform-specific return sequence CompilationInfo* info);
void EmitReturnSequence(int position);
// Platform-specific code sequences for calls
void EmitCallWithStub(Call* expr);
void EmitCallWithIC(Call* expr, Handle<Object> name, RelocInfo::Mode mode);
// Platform-specific code for loading variables.
void EmitVariableLoad(Variable* expr, Expression::Context context);
// Platform-specific support for compiling assignments.
// Load a value from a named property.
// The receiver is left on the stack by the IC.
void EmitNamedPropertyLoad(Property* expr, Expression::Context context);
// Load a value from a keyed property.
// The receiver and the key is left on the stack by the IC.
void EmitKeyedPropertyLoad(Property* expr, Expression::Context context);
// Apply the compound assignment operator. Expects both operands on top
// of the stack.
void EmitCompoundAssignmentOp(Token::Value op, Expression::Context context);
// Complete a variable assignment. The right-hand-side value is expected
// on top of the stack.
void EmitVariableAssignment(Variable* var, Expression::Context context);
// Complete a named property assignment. The receiver and right-hand-side
// value are expected on top of the stack.
void EmitNamedPropertyAssignment(Assignment* expr);
// Complete a keyed property assignment. The reciever, key, and
// right-hand-side value are expected on top of the stack.
void EmitKeyedPropertyAssignment(Assignment* expr);
void SetFunctionPosition(FunctionLiteral* fun);
void SetReturnPosition(FunctionLiteral* fun);
void SetStatementPosition(Statement* stmt);
void SetSourcePosition(int pos);
// Non-local control flow support.
void EnterFinallyBlock();
void ExitFinallyBlock();
// Loop nesting counter. void Generate(FunctionLiteral* fun, CompilationInfo* info);
int loop_depth() { return loop_depth_; }
void increment_loop_depth() { loop_depth_++; }
void decrement_loop_depth() {
ASSERT(loop_depth_ > 0);
loop_depth_--;
}
private:
MacroAssembler* masm() { return masm_; } MacroAssembler* masm() { return masm_; }
static Register result_register(); FunctionLiteral* function() { return function_; }
static Register context_register(); Label* bailout() { return &bailout_; }
// Set fields in the stack frame. Offsets are the frame pointer relative bool has_receiver() { return !info_->receiver().is_null(); }
// offsets defined in, e.g., StandardFrameConstants. Handle<Object> receiver() { return info_->receiver(); }
void StoreToFrameField(int frame_offset, Register value); bool has_this_properties() { return info_->has_this_properties(); }
// Load a value from the current context. Indices are defined as an enum
// in v8::internal::Context.
void LoadContextField(Register dst, int context_index);
// AST node visit functions. // AST node visit functions.
#define DECLARE_VISIT(type) virtual void Visit##type(type* node); #define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT) AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT #undef DECLARE_VISIT
// Handles the shortcutted logical binary operations in VisitBinaryOperation.
void EmitLogicalOperation(BinaryOperation* expr); // Emit code to load the receiver from the stack into a given register.
void EmitLoadReceiver(Register reg);
// Emit code to check that the receiver has the same map as the
// compile-time receiver. Receiver is expected in {ia32-edx, x64-rdx,
// arm-r1}. Emit a branch to the (single) bailout label if check fails.
void EmitReceiverMapCheck();
// Emit code to load a global variable value into {is32-eax, x64-rax,
// arm-r0}. Register {ia32-edx, x64-rdx, arm-r1} is preserved if it is
// holding the receiver and {is32-ecx, x64-rcx, arm-r2} is always
// clobbered.
void EmitGlobalVariableLoad(Handle<String> name);
// Emit a store to an own property of this. The stored value is expected
// in {ia32-eax, x64-rax, arm-r0} and the receiver in {is32-edx, x64-rdx,
// arm-r1}. Both are preserve.
void EmitThisPropertyStore(Handle<String> name);
MacroAssembler* masm_; MacroAssembler* masm_;
FunctionLiteral* function_;
Handle<Script> script_; Handle<Script> script_;
bool is_eval_; bool is_eval_;
Label return_label_;
NestedStatement* nesting_stack_;
int loop_depth_;
Label* true_label_; FunctionLiteral* function_;
Label* false_label_; CompilationInfo* info_;
friend class NestedStatement; Label bailout_;
DISALLOW_COPY_AND_ASSIGN(FastCodeGenerator); DISALLOW_COPY_AND_ASSIGN(FastCodeGenerator);
}; };

20
deps/v8/src/flag-definitions.h

@ -143,12 +143,14 @@ DEFINE_bool(debug_info, true, "add debug information to compiled functions")
DEFINE_bool(strict, false, "strict error checking") DEFINE_bool(strict, false, "strict error checking")
DEFINE_int(min_preparse_length, 1024, DEFINE_int(min_preparse_length, 1024,
"minimum length for automatic enable preparsing") "minimum length for automatic enable preparsing")
DEFINE_bool(fast_compiler, true, DEFINE_bool(full_compiler, true, "enable dedicated backend for run-once code")
"use the fast-mode compiler for some top-level code") DEFINE_bool(fast_compiler, false, "enable speculative optimizing backend")
DEFINE_bool(trace_bailout, false, DEFINE_bool(always_full_compiler, false,
"print reasons for failing to use fast compilation") "try to use the dedicated run-once backend for all code")
DEFINE_bool(always_fast_compiler, false, DEFINE_bool(always_fast_compiler, false,
"always try using the fast compiler") "try to use the speculative optimizing backend for all code")
DEFINE_bool(trace_bailout, false,
"print reasons for falling back to using the classic V8 backend")
// compilation-cache.cc // compilation-cache.cc
DEFINE_bool(compilation_cache, true, "enable compilation cache") DEFINE_bool(compilation_cache, true, "enable compilation cache")
@ -201,6 +203,11 @@ DEFINE_bool(canonicalize_object_literal_maps, true,
DEFINE_bool(use_big_map_space, true, DEFINE_bool(use_big_map_space, true,
"Use big map space, but don't compact if it grew too big.") "Use big map space, but don't compact if it grew too big.")
DEFINE_int(max_map_space_pages, MapSpace::kMaxMapPageIndex - 1,
"Maximum number of pages in map space which still allows to encode "
"forwarding pointers. That's actually a constant, but it's useful "
"to control it with a flag for better testing.")
// mksnapshot.cc // mksnapshot.cc
DEFINE_bool(h, false, "print this message") DEFINE_bool(h, false, "print this message")
DEFINE_bool(new_snapshot, true, "use new snapshot implementation") DEFINE_bool(new_snapshot, true, "use new snapshot implementation")
@ -294,6 +301,7 @@ DEFINE_string(stop_at, "", "function name where to insert a breakpoint")
// compiler.cc // compiler.cc
DEFINE_bool(print_builtin_scopes, false, "print scopes for builtins") DEFINE_bool(print_builtin_scopes, false, "print scopes for builtins")
DEFINE_bool(print_scopes, false, "print scopes") DEFINE_bool(print_scopes, false, "print scopes")
DEFINE_bool(print_ir, false, "print the AST as seen by the backend")
// contexts.cc // contexts.cc
DEFINE_bool(trace_contexts, false, "trace contexts operations") DEFINE_bool(trace_contexts, false, "trace contexts operations")
@ -358,6 +366,8 @@ DEFINE_bool(log_code, false,
DEFINE_bool(log_gc, false, DEFINE_bool(log_gc, false,
"Log heap samples on garbage collection for the hp2ps tool.") "Log heap samples on garbage collection for the hp2ps tool.")
DEFINE_bool(log_handles, false, "Log global handle events.") DEFINE_bool(log_handles, false, "Log global handle events.")
DEFINE_bool(log_snapshot_positions, false,
"log positions of (de)serialized objects in the snapshot.")
DEFINE_bool(log_state_changes, false, "Log state changes.") DEFINE_bool(log_state_changes, false, "Log state changes.")
DEFINE_bool(log_suspect, false, "Log suspect operations.") DEFINE_bool(log_suspect, false, "Log suspect operations.")
DEFINE_bool(log_producers, false, "Log stack traces of JS objects allocations.") DEFINE_bool(log_producers, false, "Log stack traces of JS objects allocations.")

14
deps/v8/src/frames.cc

@ -176,7 +176,7 @@ StackFrame* StackFrameIterator::SingletonFor(StackFrame::Type type) {
StackTraceFrameIterator::StackTraceFrameIterator() { StackTraceFrameIterator::StackTraceFrameIterator() {
if (!done() && !frame()->function()->IsJSFunction()) Advance(); if (!done() && !IsValidFrame()) Advance();
} }
@ -184,10 +184,18 @@ void StackTraceFrameIterator::Advance() {
while (true) { while (true) {
JavaScriptFrameIterator::Advance(); JavaScriptFrameIterator::Advance();
if (done()) return; if (done()) return;
if (frame()->function()->IsJSFunction()) return; if (IsValidFrame()) return;
} }
} }
bool StackTraceFrameIterator::IsValidFrame() {
if (!frame()->function()->IsJSFunction()) return false;
Object* script = JSFunction::cast(frame()->function())->shared()->script();
// Don't show functions from native scripts to user.
return (script->IsScript() &&
Script::TYPE_NATIVE != Script::cast(script)->type()->value());
}
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
@ -402,7 +410,7 @@ Object*& ExitFrame::code_slot() const {
Code* ExitFrame::code() const { Code* ExitFrame::code() const {
Object* code = code_slot(); Object* code = code_slot();
if (code->IsSmi()) { if (code->IsSmi()) {
return Heap::c_entry_debug_break_code(); return Heap::debugger_statement_code();
} else { } else {
return Code::cast(code); return Code::cast(code);
} }

6
deps/v8/src/frames.h

@ -589,6 +589,9 @@ class StackTraceFrameIterator: public JavaScriptFrameIterator {
public: public:
StackTraceFrameIterator(); StackTraceFrameIterator();
void Advance(); void Advance();
private:
bool IsValidFrame();
}; };
@ -607,11 +610,12 @@ class SafeStackFrameIterator BASE_EMBEDDED {
void Advance(); void Advance();
void Reset(); void Reset();
private:
static bool IsWithinBounds( static bool IsWithinBounds(
Address low_bound, Address high_bound, Address addr) { Address low_bound, Address high_bound, Address addr) {
return low_bound <= addr && addr <= high_bound; return low_bound <= addr && addr <= high_bound;
} }
private:
bool IsValidStackAddress(Address addr) const { bool IsValidStackAddress(Address addr) const {
return IsWithinBounds(low_bound_, high_bound_, addr); return IsWithinBounds(low_bound_, high_bound_, addr);
} }

1155
deps/v8/src/full-codegen.cc

File diff suppressed because it is too large

452
deps/v8/src/full-codegen.h

@ -0,0 +1,452 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef V8_FULL_CODEGEN_H_
#define V8_FULL_CODEGEN_H_
#include "v8.h"
#include "ast.h"
namespace v8 {
namespace internal {
class FullCodeGenSyntaxChecker: public AstVisitor {
public:
FullCodeGenSyntaxChecker() : has_supported_syntax_(true) {}
void Check(FunctionLiteral* fun);
bool has_supported_syntax() { return has_supported_syntax_; }
private:
void VisitDeclarations(ZoneList<Declaration*>* decls);
void VisitStatements(ZoneList<Statement*>* stmts);
// AST node visit functions.
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
bool has_supported_syntax_;
DISALLOW_COPY_AND_ASSIGN(FullCodeGenSyntaxChecker);
};
// -----------------------------------------------------------------------------
// Full code generator.
class FullCodeGenerator: public AstVisitor {
public:
enum Mode {
PRIMARY,
SECONDARY
};
FullCodeGenerator(MacroAssembler* masm, Handle<Script> script, bool is_eval)
: masm_(masm),
script_(script),
is_eval_(is_eval),
function_(NULL),
nesting_stack_(NULL),
loop_depth_(0),
location_(kStack),
true_label_(NULL),
false_label_(NULL) {
}
static Handle<Code> MakeCode(FunctionLiteral* fun,
Handle<Script> script,
bool is_eval);
void Generate(FunctionLiteral* fun, Mode mode);
private:
class Breakable;
class Iteration;
class TryCatch;
class TryFinally;
class Finally;
class ForIn;
class NestedStatement BASE_EMBEDDED {
public:
explicit NestedStatement(FullCodeGenerator* codegen) : codegen_(codegen) {
// Link into codegen's nesting stack.
previous_ = codegen->nesting_stack_;
codegen->nesting_stack_ = this;
}
virtual ~NestedStatement() {
// Unlink from codegen's nesting stack.
ASSERT_EQ(this, codegen_->nesting_stack_);
codegen_->nesting_stack_ = previous_;
}
virtual Breakable* AsBreakable() { return NULL; }
virtual Iteration* AsIteration() { return NULL; }
virtual TryCatch* AsTryCatch() { return NULL; }
virtual TryFinally* AsTryFinally() { return NULL; }
virtual Finally* AsFinally() { return NULL; }
virtual ForIn* AsForIn() { return NULL; }
virtual bool IsContinueTarget(Statement* target) { return false; }
virtual bool IsBreakTarget(Statement* target) { return false; }
// Generate code to leave the nested statement. This includes
// cleaning up any stack elements in use and restoring the
// stack to the expectations of the surrounding statements.
// Takes a number of stack elements currently on top of the
// nested statement's stack, and returns a number of stack
// elements left on top of the surrounding statement's stack.
// The generated code must preserve the result register (which
// contains the value in case of a return).
virtual int Exit(int stack_depth) {
// Default implementation for the case where there is
// nothing to clean up.
return stack_depth;
}
NestedStatement* outer() { return previous_; }
protected:
MacroAssembler* masm() { return codegen_->masm(); }
private:
FullCodeGenerator* codegen_;
NestedStatement* previous_;
DISALLOW_COPY_AND_ASSIGN(NestedStatement);
};
class Breakable : public NestedStatement {
public:
Breakable(FullCodeGenerator* codegen,
BreakableStatement* break_target)
: NestedStatement(codegen),
target_(break_target) {}
virtual ~Breakable() {}
virtual Breakable* AsBreakable() { return this; }
virtual bool IsBreakTarget(Statement* statement) {
return target_ == statement;
}
BreakableStatement* statement() { return target_; }
Label* break_target() { return &break_target_label_; }
private:
BreakableStatement* target_;
Label break_target_label_;
DISALLOW_COPY_AND_ASSIGN(Breakable);
};
class Iteration : public Breakable {
public:
Iteration(FullCodeGenerator* codegen,
IterationStatement* iteration_statement)
: Breakable(codegen, iteration_statement) {}
virtual ~Iteration() {}
virtual Iteration* AsIteration() { return this; }
virtual bool IsContinueTarget(Statement* statement) {
return this->statement() == statement;
}
Label* continue_target() { return &continue_target_label_; }
private:
Label continue_target_label_;
DISALLOW_COPY_AND_ASSIGN(Iteration);
};
// The environment inside the try block of a try/catch statement.
class TryCatch : public NestedStatement {
public:
explicit TryCatch(FullCodeGenerator* codegen, Label* catch_entry)
: NestedStatement(codegen), catch_entry_(catch_entry) { }
virtual ~TryCatch() {}
virtual TryCatch* AsTryCatch() { return this; }
Label* catch_entry() { return catch_entry_; }
virtual int Exit(int stack_depth);
private:
Label* catch_entry_;
DISALLOW_COPY_AND_ASSIGN(TryCatch);
};
// The environment inside the try block of a try/finally statement.
class TryFinally : public NestedStatement {
public:
explicit TryFinally(FullCodeGenerator* codegen, Label* finally_entry)
: NestedStatement(codegen), finally_entry_(finally_entry) { }
virtual ~TryFinally() {}
virtual TryFinally* AsTryFinally() { return this; }
Label* finally_entry() { return finally_entry_; }
virtual int Exit(int stack_depth);
private:
Label* finally_entry_;
DISALLOW_COPY_AND_ASSIGN(TryFinally);
};
// A FinallyEnvironment represents being inside a finally block.
// Abnormal termination of the finally block needs to clean up
// the block's parameters from the stack.
class Finally : public NestedStatement {
public:
explicit Finally(FullCodeGenerator* codegen) : NestedStatement(codegen) { }
virtual ~Finally() {}
virtual Finally* AsFinally() { return this; }
virtual int Exit(int stack_depth) {
return stack_depth + kFinallyStackElementCount;
}
private:
// Number of extra stack slots occupied during a finally block.
static const int kFinallyStackElementCount = 2;
DISALLOW_COPY_AND_ASSIGN(Finally);
};
// A ForInEnvironment represents being inside a for-in loop.
// Abnormal termination of the for-in block needs to clean up
// the block's temporary storage from the stack.
class ForIn : public Iteration {
public:
ForIn(FullCodeGenerator* codegen,
ForInStatement* statement)
: Iteration(codegen, statement) { }
virtual ~ForIn() {}
virtual ForIn* AsForIn() { return this; }
virtual int Exit(int stack_depth) {
return stack_depth + kForInStackElementCount;
}
private:
// TODO(lrn): Check that this value is correct when implementing
// for-in.
static const int kForInStackElementCount = 5;
DISALLOW_COPY_AND_ASSIGN(ForIn);
};
enum Location {
kAccumulator,
kStack
};
int SlotOffset(Slot* slot);
// Emit code to convert a pure value (in a register, slot, as a literal,
// or on top of the stack) into the result expected according to an
// expression context.
void Apply(Expression::Context context, Register reg);
// Slot cannot have type Slot::LOOKUP.
void Apply(Expression::Context context, Slot* slot);
void Apply(Expression::Context context, Literal* lit);
void ApplyTOS(Expression::Context context);
// Emit code to discard count elements from the top of stack, then convert
// a pure value into the result expected according to an expression
// context.
void DropAndApply(int count, Expression::Context context, Register reg);
// Emit code to convert pure control flow to a pair of labels into the
// result expected according to an expression context.
void Apply(Expression::Context context,
Label* materialize_true,
Label* materialize_false);
// Helper function to convert a pure value into a test context. The value
// is expected on the stack or the accumulator, depending on the platform.
// See the platform-specific implementation for details.
void DoTest(Expression::Context context);
void Move(Slot* dst, Register source, Register scratch1, Register scratch2);
void Move(Register dst, Slot* source);
// Return an operand used to read/write to a known (ie, non-LOOKUP) slot.
// May emit code to traverse the context chain, destroying the scratch
// register.
MemOperand EmitSlotSearch(Slot* slot, Register scratch);
void VisitForEffect(Expression* expr) {
Expression::Context saved_context = context_;
context_ = Expression::kEffect;
Visit(expr);
context_ = saved_context;
}
void VisitForValue(Expression* expr, Location where) {
Expression::Context saved_context = context_;
Location saved_location = location_;
context_ = Expression::kValue;
location_ = where;
Visit(expr);
context_ = saved_context;
location_ = saved_location;
}
void VisitForControl(Expression* expr, Label* if_true, Label* if_false) {
Expression::Context saved_context = context_;
Label* saved_true = true_label_;
Label* saved_false = false_label_;
context_ = Expression::kTest;
true_label_ = if_true;
false_label_ = if_false;
Visit(expr);
context_ = saved_context;
true_label_ = saved_true;
false_label_ = saved_false;
}
void VisitForValueControl(Expression* expr,
Location where,
Label* if_true,
Label* if_false) {
Expression::Context saved_context = context_;
Location saved_location = location_;
Label* saved_true = true_label_;
Label* saved_false = false_label_;
context_ = Expression::kValueTest;
location_ = where;
true_label_ = if_true;
false_label_ = if_false;
Visit(expr);
context_ = saved_context;
location_ = saved_location;
true_label_ = saved_true;
false_label_ = saved_false;
}
void VisitForControlValue(Expression* expr,
Location where,
Label* if_true,
Label* if_false) {
Expression::Context saved_context = context_;
Location saved_location = location_;
Label* saved_true = true_label_;
Label* saved_false = false_label_;
context_ = Expression::kTestValue;
location_ = where;
true_label_ = if_true;
false_label_ = if_false;
Visit(expr);
context_ = saved_context;
location_ = saved_location;
true_label_ = saved_true;
false_label_ = saved_false;
}
void VisitDeclarations(ZoneList<Declaration*>* declarations);
void DeclareGlobals(Handle<FixedArray> pairs);
// Platform-specific return sequence
void EmitReturnSequence(int position);
// Platform-specific code sequences for calls
void EmitCallWithStub(Call* expr);
void EmitCallWithIC(Call* expr, Handle<Object> name, RelocInfo::Mode mode);
// Platform-specific code for loading variables.
void EmitVariableLoad(Variable* expr, Expression::Context context);
// Platform-specific support for compiling assignments.
// Load a value from a named property.
// The receiver is left on the stack by the IC.
void EmitNamedPropertyLoad(Property* expr);
// Load a value from a keyed property.
// The receiver and the key is left on the stack by the IC.
void EmitKeyedPropertyLoad(Property* expr);
// Apply the compound assignment operator. Expects the left operand on top
// of the stack and the right one in the accumulator.
void EmitBinaryOp(Token::Value op, Expression::Context context);
// Complete a variable assignment. The right-hand-side value is expected
// in the accumulator.
void EmitVariableAssignment(Variable* var, Expression::Context context);
// Complete a named property assignment. The receiver is expected on top
// of the stack and the right-hand-side value in the accumulator.
void EmitNamedPropertyAssignment(Assignment* expr);
// Complete a keyed property assignment. The receiver and key are
// expected on top of the stack and the right-hand-side value in the
// accumulator.
void EmitKeyedPropertyAssignment(Assignment* expr);
void SetFunctionPosition(FunctionLiteral* fun);
void SetReturnPosition(FunctionLiteral* fun);
void SetStatementPosition(Statement* stmt);
void SetStatementPosition(int pos);
void SetSourcePosition(int pos);
// Non-local control flow support.
void EnterFinallyBlock();
void ExitFinallyBlock();
// Loop nesting counter.
int loop_depth() { return loop_depth_; }
void increment_loop_depth() { loop_depth_++; }
void decrement_loop_depth() {
ASSERT(loop_depth_ > 0);
loop_depth_--;
}
MacroAssembler* masm() { return masm_; }
static Register result_register();
static Register context_register();
// Set fields in the stack frame. Offsets are the frame pointer relative
// offsets defined in, e.g., StandardFrameConstants.
void StoreToFrameField(int frame_offset, Register value);
// Load a value from the current context. Indices are defined as an enum
// in v8::internal::Context.
void LoadContextField(Register dst, int context_index);
// AST node visit functions.
#define DECLARE_VISIT(type) virtual void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
// Handles the shortcutted logical binary operations in VisitBinaryOperation.
void EmitLogicalOperation(BinaryOperation* expr);
MacroAssembler* masm_;
Handle<Script> script_;
bool is_eval_;
FunctionLiteral* function_;
Label return_label_;
NestedStatement* nesting_stack_;
int loop_depth_;
Expression::Context context_;
Location location_;
Label* true_label_;
Label* false_label_;
friend class NestedStatement;
DISALLOW_COPY_AND_ASSIGN(FullCodeGenerator);
};
} } // namespace v8::internal
#endif // V8_FULL_CODEGEN_H_

7
deps/v8/src/globals.h

@ -204,6 +204,7 @@ class AccessorInfo;
class Allocation; class Allocation;
class Arguments; class Arguments;
class Assembler; class Assembler;
class AssertNoAllocation;
class BreakableStatement; class BreakableStatement;
class Code; class Code;
class CodeGenerator; class CodeGenerator;
@ -379,6 +380,12 @@ enum InLoopFlag {
}; };
enum CallFunctionFlags {
NO_CALL_FUNCTION_FLAGS = 0,
RECEIVER_MIGHT_BE_VALUE = 1 << 0 // Receiver might not be a JSObject.
};
// Type of properties. // Type of properties.
// Order of properties is significant. // Order of properties is significant.
// Must fit in the BitField PropertyDetails::TypeField. // Must fit in the BitField PropertyDetails::TypeField.

44
deps/v8/src/handles.cc

@ -31,6 +31,7 @@
#include "api.h" #include "api.h"
#include "arguments.h" #include "arguments.h"
#include "bootstrapper.h" #include "bootstrapper.h"
#include "codegen.h"
#include "compiler.h" #include "compiler.h"
#include "debug.h" #include "debug.h"
#include "execution.h" #include "execution.h"
@ -666,31 +667,52 @@ Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object,
} }
bool CompileLazyShared(Handle<SharedFunctionInfo> shared, bool EnsureCompiled(Handle<SharedFunctionInfo> shared,
ClearExceptionFlag flag, ClearExceptionFlag flag) {
int loop_nesting) { return shared->is_compiled() || CompileLazyShared(shared, flag);
}
static bool CompileLazyHelper(CompilationInfo* info,
ClearExceptionFlag flag) {
// Compile the source information to a code object. // Compile the source information to a code object.
ASSERT(!shared->is_compiled()); ASSERT(!info->shared_info()->is_compiled());
bool result = Compiler::CompileLazy(shared, loop_nesting); bool result = Compiler::CompileLazy(info);
ASSERT(result != Top::has_pending_exception()); ASSERT(result != Top::has_pending_exception());
if (!result && flag == CLEAR_EXCEPTION) Top::clear_pending_exception(); if (!result && flag == CLEAR_EXCEPTION) Top::clear_pending_exception();
return result; return result;
} }
bool CompileLazy(Handle<JSFunction> function, ClearExceptionFlag flag) { bool CompileLazyShared(Handle<SharedFunctionInfo> shared,
// Compile the source information to a code object. ClearExceptionFlag flag) {
CompilationInfo info(shared, Handle<Object>::null(), 0);
return CompileLazyHelper(&info, flag);
}
bool CompileLazy(Handle<JSFunction> function,
Handle<Object> receiver,
ClearExceptionFlag flag) {
Handle<SharedFunctionInfo> shared(function->shared()); Handle<SharedFunctionInfo> shared(function->shared());
return CompileLazyShared(shared, flag, 0); CompilationInfo info(shared, receiver, 0);
bool result = CompileLazyHelper(&info, flag);
LOG(FunctionCreateEvent(*function));
return result;
} }
bool CompileLazyInLoop(Handle<JSFunction> function, ClearExceptionFlag flag) { bool CompileLazyInLoop(Handle<JSFunction> function,
// Compile the source information to a code object. Handle<Object> receiver,
ClearExceptionFlag flag) {
Handle<SharedFunctionInfo> shared(function->shared()); Handle<SharedFunctionInfo> shared(function->shared());
return CompileLazyShared(shared, flag, 1); CompilationInfo info(shared, receiver, 1);
bool result = CompileLazyHelper(&info, flag);
LOG(FunctionCreateEvent(*function));
return result;
} }
OptimizedObjectForAddingMultipleProperties:: OptimizedObjectForAddingMultipleProperties::
OptimizedObjectForAddingMultipleProperties(Handle<JSObject> object, OptimizedObjectForAddingMultipleProperties(Handle<JSObject> object,
int expected_additional_properties, int expected_additional_properties,

15
deps/v8/src/handles.h

@ -313,12 +313,19 @@ Handle<Object> SetPrototype(Handle<JSFunction> function,
// false if the compilation resulted in a stack overflow. // false if the compilation resulted in a stack overflow.
enum ClearExceptionFlag { KEEP_EXCEPTION, CLEAR_EXCEPTION }; enum ClearExceptionFlag { KEEP_EXCEPTION, CLEAR_EXCEPTION };
bool EnsureCompiled(Handle<SharedFunctionInfo> shared,
ClearExceptionFlag flag);
bool CompileLazyShared(Handle<SharedFunctionInfo> shared, bool CompileLazyShared(Handle<SharedFunctionInfo> shared,
ClearExceptionFlag flag, ClearExceptionFlag flag);
int loop_nesting);
bool CompileLazy(Handle<JSFunction> function,
Handle<Object> receiver,
ClearExceptionFlag flag);
bool CompileLazy(Handle<JSFunction> function, ClearExceptionFlag flag); bool CompileLazyInLoop(Handle<JSFunction> function,
bool CompileLazyInLoop(Handle<JSFunction> function, ClearExceptionFlag flag); Handle<Object> receiver,
ClearExceptionFlag flag);
// Returns the lazy compilation stub for argc arguments. // Returns the lazy compilation stub for argc arguments.
Handle<Code> ComputeLazyCompile(int argc); Handle<Code> ComputeLazyCompile(int argc);

6
deps/v8/src/heap-inl.h

@ -152,7 +152,11 @@ Object* Heap::AllocateRawCell() {
bool Heap::InNewSpace(Object* object) { bool Heap::InNewSpace(Object* object) {
return new_space_.Contains(object); bool result = new_space_.Contains(object);
ASSERT(!result || // Either not in new space
gc_state_ != NOT_IN_GC || // ... or in the middle of GC
InToSpace(object)); // ... or in to-space (where we allocate).
return result;
} }

3
deps/v8/src/heap-profiler.cc

@ -625,8 +625,7 @@ void HeapProfiler::WriteSample() {
ConstructorHeapProfile js_cons_profile; ConstructorHeapProfile js_cons_profile;
RetainerHeapProfile js_retainer_profile; RetainerHeapProfile js_retainer_profile;
HeapIterator iterator; HeapIterator iterator;
while (iterator.has_next()) { for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
HeapObject* obj = iterator.next();
CollectStats(obj, info); CollectStats(obj, info);
js_cons_profile.CollectStats(obj); js_cons_profile.CollectStats(obj);
js_retainer_profile.CollectStats(obj); js_retainer_profile.CollectStats(obj);

112
deps/v8/src/heap.cc

@ -76,8 +76,8 @@ int Heap::amount_of_external_allocated_memory_at_last_global_gc_ = 0;
// semispace_size_ should be a power of 2 and old_generation_size_ should be // semispace_size_ should be a power of 2 and old_generation_size_ should be
// a multiple of Page::kPageSize. // a multiple of Page::kPageSize.
#if defined(ANDROID) #if defined(ANDROID)
int Heap::max_semispace_size_ = 512*KB; int Heap::max_semispace_size_ = 2*MB;
int Heap::max_old_generation_size_ = 128*MB; int Heap::max_old_generation_size_ = 192*MB;
int Heap::initial_semispace_size_ = 128*KB; int Heap::initial_semispace_size_ = 128*KB;
size_t Heap::code_range_size_ = 0; size_t Heap::code_range_size_ = 0;
#elif defined(V8_TARGET_ARCH_X64) #elif defined(V8_TARGET_ARCH_X64)
@ -327,7 +327,7 @@ void Heap::GarbageCollectionPrologue() {
int Heap::SizeOfObjects() { int Heap::SizeOfObjects() {
int total = 0; int total = 0;
AllSpaces spaces; AllSpaces spaces;
while (Space* space = spaces.next()) { for (Space* space = spaces.next(); space != NULL; space = spaces.next()) {
total += space->Size(); total += space->Size();
} }
return total; return total;
@ -732,13 +732,14 @@ static void VerifyNonPointerSpacePointers() {
// do not expect them. // do not expect them.
VerifyNonPointerSpacePointersVisitor v; VerifyNonPointerSpacePointersVisitor v;
HeapObjectIterator code_it(Heap::code_space()); HeapObjectIterator code_it(Heap::code_space());
while (code_it.has_next()) { for (HeapObject* object = code_it.next();
HeapObject* object = code_it.next(); object != NULL; object = code_it.next())
object->Iterate(&v); object->Iterate(&v);
}
HeapObjectIterator data_it(Heap::old_data_space()); HeapObjectIterator data_it(Heap::old_data_space());
while (data_it.has_next()) data_it.next()->Iterate(&v); for (HeapObject* object = data_it.next();
object != NULL; object = data_it.next())
object->Iterate(&v);
} }
#endif #endif
@ -804,8 +805,8 @@ void Heap::Scavenge() {
// Copy objects reachable from cells by scavenging cell values directly. // Copy objects reachable from cells by scavenging cell values directly.
HeapObjectIterator cell_iterator(cell_space_); HeapObjectIterator cell_iterator(cell_space_);
while (cell_iterator.has_next()) { for (HeapObject* cell = cell_iterator.next();
HeapObject* cell = cell_iterator.next(); cell != NULL; cell = cell_iterator.next()) {
if (cell->IsJSGlobalPropertyCell()) { if (cell->IsJSGlobalPropertyCell()) {
Address value_address = Address value_address =
reinterpret_cast<Address>(cell) + reinterpret_cast<Address>(cell) +
@ -1013,13 +1014,15 @@ void Heap::RebuildRSets() {
void Heap::RebuildRSets(PagedSpace* space) { void Heap::RebuildRSets(PagedSpace* space) {
HeapObjectIterator it(space); HeapObjectIterator it(space);
while (it.has_next()) Heap::UpdateRSet(it.next()); for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
Heap::UpdateRSet(obj);
} }
void Heap::RebuildRSets(LargeObjectSpace* space) { void Heap::RebuildRSets(LargeObjectSpace* space) {
LargeObjectIterator it(space); LargeObjectIterator it(space);
while (it.has_next()) Heap::UpdateRSet(it.next()); for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
Heap::UpdateRSet(obj);
} }
@ -1182,7 +1185,10 @@ Object* Heap::AllocatePartialMap(InstanceType instance_type,
reinterpret_cast<Map*>(result)->set_instance_type(instance_type); reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
reinterpret_cast<Map*>(result)->set_instance_size(instance_size); reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
reinterpret_cast<Map*>(result)->set_inobject_properties(0); reinterpret_cast<Map*>(result)->set_inobject_properties(0);
reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
reinterpret_cast<Map*>(result)->set_unused_property_fields(0); reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
reinterpret_cast<Map*>(result)->set_bit_field(0);
reinterpret_cast<Map*>(result)->set_bit_field2(0);
return result; return result;
} }
@ -1203,7 +1209,7 @@ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
map->set_code_cache(empty_fixed_array()); map->set_code_cache(empty_fixed_array());
map->set_unused_property_fields(0); map->set_unused_property_fields(0);
map->set_bit_field(0); map->set_bit_field(0);
map->set_bit_field2(0); map->set_bit_field2(1 << Map::kIsExtensible);
// If the map object is aligned fill the padding area with Smi 0 objects. // If the map object is aligned fill the padding area with Smi 0 objects.
if (Map::kPadStart < Map::kSize) { if (Map::kPadStart < Map::kSize) {
@ -1493,8 +1499,8 @@ void Heap::CreateRegExpCEntryStub() {
void Heap::CreateCEntryDebugBreakStub() { void Heap::CreateCEntryDebugBreakStub() {
CEntryDebugBreakStub stub; DebuggerStatementStub stub;
set_c_entry_debug_break_code(*stub.GetCode()); set_debugger_statement_code(*stub.GetCode());
} }
@ -1520,8 +1526,8 @@ void Heap::CreateFixedStubs() {
// { CEntryStub stub; // { CEntryStub stub;
// c_entry_code_ = *stub.GetCode(); // c_entry_code_ = *stub.GetCode();
// } // }
// { CEntryDebugBreakStub stub; // { DebuggerStatementStub stub;
// c_entry_debug_break_code_ = *stub.GetCode(); // debugger_statement_code_ = *stub.GetCode();
// } // }
// To workaround the problem, make separate functions without inlining. // To workaround the problem, make separate functions without inlining.
Heap::CreateCEntryStub(); Heap::CreateCEntryStub();
@ -1723,7 +1729,7 @@ void Heap::SetNumberStringCache(Object* number, String* string) {
int mask = (number_string_cache()->length() >> 1) - 1; int mask = (number_string_cache()->length() >> 1) - 1;
if (number->IsSmi()) { if (number->IsSmi()) {
hash = smi_get_hash(Smi::cast(number)) & mask; hash = smi_get_hash(Smi::cast(number)) & mask;
number_string_cache()->set(hash * 2, number, SKIP_WRITE_BARRIER); number_string_cache()->set(hash * 2, Smi::cast(number));
} else { } else {
hash = double_get_hash(number->Number()) & mask; hash = double_get_hash(number->Number()) & mask;
number_string_cache()->set(hash * 2, number); number_string_cache()->set(hash * 2, number);
@ -1980,8 +1986,10 @@ Object* Heap::AllocateConsString(String* first, String* second) {
Object* result = Allocate(map, NEW_SPACE); Object* result = Allocate(map, NEW_SPACE);
if (result->IsFailure()) return result; if (result->IsFailure()) return result;
AssertNoAllocation no_gc;
ConsString* cons_string = ConsString::cast(result); ConsString* cons_string = ConsString::cast(result);
WriteBarrierMode mode = cons_string->GetWriteBarrierMode(); WriteBarrierMode mode = cons_string->GetWriteBarrierMode(no_gc);
cons_string->set_length(length); cons_string->set_length(length);
cons_string->set_hash_field(String::kEmptyHashField); cons_string->set_hash_field(String::kEmptyHashField);
cons_string->set_first(first, mode); cons_string->set_first(first, mode);
@ -2279,7 +2287,7 @@ Object* Heap::InitializeFunction(JSFunction* function,
function->set_shared(shared); function->set_shared(shared);
function->set_prototype_or_initial_map(prototype); function->set_prototype_or_initial_map(prototype);
function->set_context(undefined_value()); function->set_context(undefined_value());
function->set_literals(empty_fixed_array(), SKIP_WRITE_BARRIER); function->set_literals(empty_fixed_array());
return function; return function;
} }
@ -2398,8 +2406,10 @@ Object* Heap::AllocateInitialMap(JSFunction* fun) {
String* name = fun->shared()->GetThisPropertyAssignmentName(i); String* name = fun->shared()->GetThisPropertyAssignmentName(i);
ASSERT(name->IsSymbol()); ASSERT(name->IsSymbol());
FieldDescriptor field(name, i, NONE); FieldDescriptor field(name, i, NONE);
field.SetEnumerationIndex(i);
descriptors->Set(i, &field); descriptors->Set(i, &field);
} }
descriptors->SetNextEnumerationIndex(count);
descriptors->Sort(); descriptors->Sort();
map->set_instance_descriptors(descriptors); map->set_instance_descriptors(descriptors);
map->set_pre_allocated_property_fields(count); map->set_pre_allocated_property_fields(count);
@ -2880,8 +2890,10 @@ Object* Heap::CopyFixedArray(FixedArray* src) {
HeapObject::cast(obj)->set_map(src->map()); HeapObject::cast(obj)->set_map(src->map());
FixedArray* result = FixedArray::cast(obj); FixedArray* result = FixedArray::cast(obj);
result->set_length(len); result->set_length(len);
// Copy the content // Copy the content
WriteBarrierMode mode = result->GetWriteBarrierMode(); AssertNoAllocation no_gc;
WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
for (int i = 0; i < len; i++) result->set(i, src->get(i), mode); for (int i = 0; i < len; i++) result->set(i, src->get(i), mode);
return result; return result;
} }
@ -2899,6 +2911,7 @@ Object* Heap::AllocateFixedArray(int length) {
Object* value = undefined_value(); Object* value = undefined_value();
// Initialize body. // Initialize body.
for (int index = 0; index < length; index++) { for (int index = 0; index < length; index++) {
ASSERT(!Heap::InNewSpace(value)); // value = undefined
array->set(index, value, SKIP_WRITE_BARRIER); array->set(index, value, SKIP_WRITE_BARRIER);
} }
} }
@ -2954,6 +2967,7 @@ Object* Heap::AllocateFixedArray(int length, PretenureFlag pretenure) {
array->set_length(length); array->set_length(length);
Object* value = undefined_value(); Object* value = undefined_value();
for (int index = 0; index < length; index++) { for (int index = 0; index < length; index++) {
ASSERT(!Heap::InNewSpace(value)); // value = undefined
array->set(index, value, SKIP_WRITE_BARRIER); array->set(index, value, SKIP_WRITE_BARRIER);
} }
return array; return array;
@ -2971,6 +2985,7 @@ Object* Heap::AllocateFixedArrayWithHoles(int length) {
// Initialize body. // Initialize body.
Object* value = the_hole_value(); Object* value = the_hole_value();
for (int index = 0; index < length; index++) { for (int index = 0; index < length; index++) {
ASSERT(!Heap::InNewSpace(value)); // value = the hole
array->set(index, value, SKIP_WRITE_BARRIER); array->set(index, value, SKIP_WRITE_BARRIER);
} }
} }
@ -3106,7 +3121,8 @@ void Heap::Print() {
if (!HasBeenSetup()) return; if (!HasBeenSetup()) return;
Top::PrintStack(); Top::PrintStack();
AllSpaces spaces; AllSpaces spaces;
while (Space* space = spaces.next()) space->Print(); for (Space* space = spaces.next(); space != NULL; space = spaces.next())
space->Print();
} }
@ -3340,6 +3356,11 @@ void Heap::IterateRSet(PagedSpace* space, ObjectSlotCallback copy_object_func) {
void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) { void Heap::IterateRoots(ObjectVisitor* v, VisitMode mode) {
IterateStrongRoots(v, mode); IterateStrongRoots(v, mode);
IterateWeakRoots(v, mode);
}
void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex])); v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
v->Synchronize("symbol_table"); v->Synchronize("symbol_table");
if (mode != VISIT_ALL_IN_SCAVENGE) { if (mode != VISIT_ALL_IN_SCAVENGE) {
@ -3394,6 +3415,20 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) {
// Iterate over pointers being held by inactive threads. // Iterate over pointers being held by inactive threads.
ThreadManager::Iterate(v); ThreadManager::Iterate(v);
v->Synchronize("threadmanager"); v->Synchronize("threadmanager");
// Iterate over the pointers the Serialization/Deserialization code is
// holding.
// During garbage collection this keeps the partial snapshot cache alive.
// During deserialization of the startup snapshot this creates the partial
// snapshot cache and deserializes the objects it refers to. During
// serialization this does nothing, since the partial snapshot cache is
// empty. However the next thing we do is create the partial snapshot,
// filling up the partial snapshot cache with objects it needs as we go.
SerializerDeserializer::Iterate(v);
// We don't do a v->Synchronize call here, because in debug mode that will
// output a flag to the snapshot. However at this point the serializer and
// deserializer are deliberately a little unsynchronized (see above) so the
// checking of the sync flag in the snapshot would fail.
} }
@ -3544,7 +3579,8 @@ bool Heap::Setup(bool create_heap_objects) {
// Initialize map space. // Initialize map space.
map_space_ = new MapSpace(FLAG_use_big_map_space map_space_ = new MapSpace(FLAG_use_big_map_space
? max_old_generation_size_ ? max_old_generation_size_
: (MapSpace::kMaxMapPageIndex + 1) * Page::kPageSize, : MapSpace::kMaxMapPageIndex * Page::kPageSize,
FLAG_max_map_space_pages,
MAP_SPACE); MAP_SPACE);
if (map_space_ == NULL) return false; if (map_space_ == NULL) return false;
if (!map_space_->Setup(NULL, 0)) return false; if (!map_space_->Setup(NULL, 0)) return false;
@ -3647,7 +3683,8 @@ void Heap::TearDown() {
void Heap::Shrink() { void Heap::Shrink() {
// Try to shrink all paged spaces. // Try to shrink all paged spaces.
PagedSpaces spaces; PagedSpaces spaces;
while (PagedSpace* space = spaces.next()) space->Shrink(); for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
space->Shrink();
} }
@ -3656,7 +3693,8 @@ void Heap::Shrink() {
void Heap::Protect() { void Heap::Protect() {
if (HasBeenSetup()) { if (HasBeenSetup()) {
AllSpaces spaces; AllSpaces spaces;
while (Space* space = spaces.next()) space->Protect(); for (Space* space = spaces.next(); space != NULL; space = spaces.next())
space->Protect();
} }
} }
@ -3664,7 +3702,8 @@ void Heap::Protect() {
void Heap::Unprotect() { void Heap::Unprotect() {
if (HasBeenSetup()) { if (HasBeenSetup()) {
AllSpaces spaces; AllSpaces spaces;
while (Space* space = spaces.next()) space->Unprotect(); for (Space* space = spaces.next(); space != NULL; space = spaces.next())
space->Unprotect();
} }
} }
@ -3836,34 +3875,25 @@ void HeapIterator::Shutdown() {
} }
bool HeapIterator::has_next() { HeapObject* HeapIterator::next() {
// No iterator means we are done. // No iterator means we are done.
if (object_iterator_ == NULL) return false; if (object_iterator_ == NULL) return NULL;
if (object_iterator_->has_next_object()) { if (HeapObject* obj = object_iterator_->next_object()) {
// If the current iterator has more objects we are fine. // If the current iterator has more objects we are fine.
return true; return obj;
} else { } else {
// Go though the spaces looking for one that has objects. // Go though the spaces looking for one that has objects.
while (space_iterator_->has_next()) { while (space_iterator_->has_next()) {
object_iterator_ = space_iterator_->next(); object_iterator_ = space_iterator_->next();
if (object_iterator_->has_next_object()) { if (HeapObject* obj = object_iterator_->next_object()) {
return true; return obj;
} }
} }
} }
// Done with the last space. // Done with the last space.
object_iterator_ = NULL; object_iterator_ = NULL;
return false; return NULL;
}
HeapObject* HeapIterator::next() {
if (has_next()) {
return object_iterator_->next_object();
} else {
return NULL;
}
} }

5
deps/v8/src/heap.h

@ -101,7 +101,7 @@ namespace internal {
V(Code, js_entry_code, JsEntryCode) \ V(Code, js_entry_code, JsEntryCode) \
V(Code, js_construct_entry_code, JsConstructEntryCode) \ V(Code, js_construct_entry_code, JsConstructEntryCode) \
V(Code, c_entry_code, CEntryCode) \ V(Code, c_entry_code, CEntryCode) \
V(Code, c_entry_debug_break_code, CEntryDebugBreakCode) \ V(Code, debugger_statement_code, DebuggerStatementCode) \
V(FixedArray, number_string_cache, NumberStringCache) \ V(FixedArray, number_string_cache, NumberStringCache) \
V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \ V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
V(FixedArray, natives_source_cache, NativesSourceCache) \ V(FixedArray, natives_source_cache, NativesSourceCache) \
@ -690,6 +690,8 @@ class Heap : public AllStatic {
static void IterateRoots(ObjectVisitor* v, VisitMode mode); static void IterateRoots(ObjectVisitor* v, VisitMode mode);
// Iterates over all strong roots in the heap. // Iterates over all strong roots in the heap.
static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode); static void IterateStrongRoots(ObjectVisitor* v, VisitMode mode);
// Iterates over all the other roots in the heap.
static void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
// Iterates remembered set of an old space. // Iterates remembered set of an old space.
static void IterateRSet(PagedSpace* space, ObjectSlotCallback callback); static void IterateRSet(PagedSpace* space, ObjectSlotCallback callback);
@ -1290,7 +1292,6 @@ class HeapIterator BASE_EMBEDDED {
explicit HeapIterator(); explicit HeapIterator();
virtual ~HeapIterator(); virtual ~HeapIterator();
bool has_next();
HeapObject* next(); HeapObject* next();
void reset(); void reset();

26
deps/v8/src/ia32/assembler-ia32.cc

@ -860,6 +860,24 @@ void Assembler::cmpb(const Operand& op, int8_t imm8) {
} }
void Assembler::cmpb(const Operand& dst, Register src) {
ASSERT(src.is_byte_register());
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x38);
emit_operand(src, dst);
}
void Assembler::cmpb(Register dst, const Operand& src) {
ASSERT(dst.is_byte_register());
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x3A);
emit_operand(dst, src);
}
void Assembler::cmpw(const Operand& op, Immediate imm16) { void Assembler::cmpw(const Operand& op, Immediate imm16) {
ASSERT(imm16.is_int16()); ASSERT(imm16.is_int16());
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
@ -1261,6 +1279,14 @@ void Assembler::test(Register reg, const Operand& op) {
} }
void Assembler::test_b(Register reg, const Operand& op) {
EnsureSpace ensure_space(this);
last_pc_ = pc_;
EMIT(0x84);
emit_operand(reg, op);
}
void Assembler::test(const Operand& op, const Immediate& imm) { void Assembler::test(const Operand& op, const Immediate& imm) {
EnsureSpace ensure_space(this); EnsureSpace ensure_space(this);
last_pc_ = pc_; last_pc_ = pc_;

8
deps/v8/src/ia32/assembler-ia32.h

@ -229,8 +229,9 @@ enum ScaleFactor {
times_2 = 1, times_2 = 1,
times_4 = 2, times_4 = 2,
times_8 = 3, times_8 = 3,
times_pointer_size = times_4, times_int_size = times_4,
times_half_pointer_size = times_2 times_half_pointer_size = times_2,
times_pointer_size = times_4
}; };
@ -559,6 +560,8 @@ class Assembler : public Malloced {
void and_(const Operand& dst, const Immediate& x); void and_(const Operand& dst, const Immediate& x);
void cmpb(const Operand& op, int8_t imm8); void cmpb(const Operand& op, int8_t imm8);
void cmpb(Register src, const Operand& dst);
void cmpb(const Operand& dst, Register src);
void cmpb_al(const Operand& op); void cmpb_al(const Operand& op);
void cmpw_ax(const Operand& op); void cmpw_ax(const Operand& op);
void cmpw(const Operand& op, Immediate imm16); void cmpw(const Operand& op, Immediate imm16);
@ -624,6 +627,7 @@ class Assembler : public Malloced {
void test(Register reg, const Immediate& imm); void test(Register reg, const Immediate& imm);
void test(Register reg, const Operand& op); void test(Register reg, const Operand& op);
void test_b(Register reg, const Operand& op);
void test(const Operand& op, const Immediate& imm); void test(const Operand& op, const Immediate& imm);
void xor_(Register dst, int32_t imm32); void xor_(Register dst, int32_t imm32);

65
deps/v8/src/ia32/builtins-ia32.cc

@ -36,15 +36,36 @@ namespace internal {
#define __ ACCESS_MASM(masm) #define __ ACCESS_MASM(masm)
void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) { void Builtins::Generate_Adaptor(MacroAssembler* masm,
// TODO(428): Don't pass the function in a static variable. CFunctionId id,
ExternalReference passed = ExternalReference::builtin_passed_function(); BuiltinExtraArguments extra_args) {
__ mov(Operand::StaticVariable(passed), edi); // ----------- S t a t e -------------
// -- eax : number of arguments excluding receiver
// The actual argument count has already been loaded into register // -- edi : called function (only guaranteed when
// eax, but JumpToRuntime expects eax to contain the number of // extra_args requires it)
// arguments including the receiver. // -- esi : context
__ inc(eax); // -- esp[0] : return address
// -- esp[4] : last argument
// -- ...
// -- esp[4 * argc] : first argument (argc == eax)
// -- esp[4 * (argc +1)] : receiver
// -----------------------------------
// Insert extra arguments.
int num_extra_args = 0;
if (extra_args == NEEDS_CALLED_FUNCTION) {
num_extra_args = 1;
Register scratch = ebx;
__ pop(scratch); // Save return address.
__ push(edi);
__ push(scratch); // Restore return address.
} else {
ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
}
// JumpToRuntime expects eax to contain the number of arguments
// including the receiver and the extra arguments.
__ add(Operand(eax), Immediate(num_extra_args + 1));
__ JumpToRuntime(ExternalReference(id)); __ JumpToRuntime(ExternalReference(id));
} }
@ -81,7 +102,8 @@ void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
} }
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { static void Generate_JSConstructStubHelper(MacroAssembler* masm,
bool is_api_function) {
// Enter a construct frame. // Enter a construct frame.
__ EnterConstructFrame(); __ EnterConstructFrame();
@ -277,8 +299,17 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
__ j(greater_equal, &loop); __ j(greater_equal, &loop);
// Call the function. // Call the function.
ParameterCount actual(eax); if (is_api_function) {
__ InvokeFunction(edi, actual, CALL_FUNCTION); __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
Handle<Code> code = Handle<Code>(
Builtins::builtin(Builtins::HandleApiCallConstruct));
ParameterCount expected(0);
__ InvokeCode(code, expected, expected,
RelocInfo::CODE_TARGET, CALL_FUNCTION);
} else {
ParameterCount actual(eax);
__ InvokeFunction(edi, actual, CALL_FUNCTION);
}
// Restore context from the frame. // Restore context from the frame.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset)); __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
@ -319,6 +350,16 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
} }
void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, false);
}
void Builtins::Generate_JSConstructStubApi(MacroAssembler* masm) {
Generate_JSConstructStubHelper(masm, true);
}
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
bool is_construct) { bool is_construct) {
// Clear the context before we push it when entering the JS frame. // Clear the context before we push it when entering the JS frame.

2047
deps/v8/src/ia32/codegen-ia32.cc

File diff suppressed because it is too large

154
deps/v8/src/ia32/codegen-ia32.h

@ -1,4 +1,4 @@
// Copyright 2006-2008 the V8 project authors. All rights reserved. // Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -32,6 +32,7 @@ namespace v8 {
namespace internal { namespace internal {
// Forward declarations // Forward declarations
class CompilationInfo;
class DeferredCode; class DeferredCode;
class RegisterAllocator; class RegisterAllocator;
class RegisterFile; class RegisterFile;
@ -43,57 +44,70 @@ enum TypeofState { INSIDE_TYPEOF, NOT_INSIDE_TYPEOF };
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
// Reference support // Reference support
// A reference is a C++ stack-allocated object that keeps an ECMA // A reference is a C++ stack-allocated object that puts a
// reference on the execution stack while in scope. For variables // reference on the virtual frame. The reference may be consumed
// the reference is empty, indicating that it isn't necessary to // by GetValue, TakeValue, SetValue, and Codegen::UnloadReference.
// store state on the stack for keeping track of references to those. // When the lifetime (scope) of a valid reference ends, it must have
// For properties, we keep either one (named) or two (indexed) values // been consumed, and be in state UNLOADED.
// on the execution stack to represent the reference.
class Reference BASE_EMBEDDED { class Reference BASE_EMBEDDED {
public: public:
// The values of the types is important, see size(). // The values of the types is important, see size().
enum Type { ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 }; enum Type { UNLOADED = -2, ILLEGAL = -1, SLOT = 0, NAMED = 1, KEYED = 2 };
Reference(CodeGenerator* cgen, Expression* expression); Reference(CodeGenerator* cgen,
Expression* expression,
bool persist_after_get = false);
~Reference(); ~Reference();
Expression* expression() const { return expression_; } Expression* expression() const { return expression_; }
Type type() const { return type_; } Type type() const { return type_; }
void set_type(Type value) { void set_type(Type value) {
ASSERT(type_ == ILLEGAL); ASSERT_EQ(ILLEGAL, type_);
type_ = value; type_ = value;
} }
void set_unloaded() {
ASSERT_NE(ILLEGAL, type_);
ASSERT_NE(UNLOADED, type_);
type_ = UNLOADED;
}
// The size the reference takes up on the stack. // The size the reference takes up on the stack.
int size() const { return (type_ == ILLEGAL) ? 0 : type_; } int size() const {
return (type_ < SLOT) ? 0 : type_;
}
bool is_illegal() const { return type_ == ILLEGAL; } bool is_illegal() const { return type_ == ILLEGAL; }
bool is_slot() const { return type_ == SLOT; } bool is_slot() const { return type_ == SLOT; }
bool is_property() const { return type_ == NAMED || type_ == KEYED; } bool is_property() const { return type_ == NAMED || type_ == KEYED; }
bool is_unloaded() const { return type_ == UNLOADED; }
// Return the name. Only valid for named property references. // Return the name. Only valid for named property references.
Handle<String> GetName(); Handle<String> GetName();
// Generate code to push the value of the reference on top of the // Generate code to push the value of the reference on top of the
// expression stack. The reference is expected to be already on top of // expression stack. The reference is expected to be already on top of
// the expression stack, and it is left in place with its value above it. // the expression stack, and it is consumed by the call unless the
// reference is for a compound assignment.
// If the reference is not consumed, it is left in place under its value.
void GetValue(); void GetValue();
// Like GetValue except that the slot is expected to be written to before // Like GetValue except that the slot is expected to be written to before
// being read from again. Thae value of the reference may be invalidated, // being read from again. The value of the reference may be invalidated,
// causing subsequent attempts to read it to fail. // causing subsequent attempts to read it to fail.
void TakeValue(); void TakeValue();
// Generate code to store the value on top of the expression stack in the // Generate code to store the value on top of the expression stack in the
// reference. The reference is expected to be immediately below the value // reference. The reference is expected to be immediately below the value
// on the expression stack. The stored value is left in place (with the // on the expression stack. The value is stored in the location specified
// reference intact below it) to support chained assignments. // by the reference, and is left on top of the stack, after the reference
// is popped from beneath it (unloaded).
void SetValue(InitState init_state); void SetValue(InitState init_state);
private: private:
CodeGenerator* cgen_; CodeGenerator* cgen_;
Expression* expression_; Expression* expression_;
Type type_; Type type_;
// Keep the reference on the stack after get, so it can be used by set later.
bool persist_after_get_;
}; };
@ -266,7 +280,7 @@ class CodeGenState BASE_EMBEDDED {
// ------------------------------------------------------------------------- // -------------------------------------------------------------------------
// Arguments allocation mode // Arguments allocation mode.
enum ArgumentsAllocationMode { enum ArgumentsAllocationMode {
NO_ARGUMENTS_ALLOCATION, NO_ARGUMENTS_ALLOCATION,
@ -280,11 +294,21 @@ enum ArgumentsAllocationMode {
class CodeGenerator: public AstVisitor { class CodeGenerator: public AstVisitor {
public: public:
// Compilation mode. Either the compiler is used as the primary
// compiler and needs to setup everything or the compiler is used as
// the secondary compiler for split compilation and has to handle
// bailouts.
enum Mode {
PRIMARY,
SECONDARY
};
// Takes a function literal, generates code for it. This function should only // Takes a function literal, generates code for it. This function should only
// be called by compiler.cc. // be called by compiler.cc.
static Handle<Code> MakeCode(FunctionLiteral* fun, static Handle<Code> MakeCode(FunctionLiteral* fun,
Handle<Script> script, Handle<Script> script,
bool is_eval); bool is_eval,
CompilationInfo* info);
// Printing of AST, etc. as requested by flags. // Printing of AST, etc. as requested by flags.
static void MakeCodePrologue(FunctionLiteral* fun); static void MakeCodePrologue(FunctionLiteral* fun);
@ -328,8 +352,7 @@ class CodeGenerator: public AstVisitor {
private: private:
// Construction/Destruction // Construction/Destruction
CodeGenerator(int buffer_size, Handle<Script> script, bool is_eval); CodeGenerator(MacroAssembler* masm, Handle<Script> script, bool is_eval);
virtual ~CodeGenerator() { delete masm_; }
// Accessors // Accessors
Scope* scope() const { return scope_; } Scope* scope() const { return scope_; }
@ -365,7 +388,7 @@ class CodeGenerator: public AstVisitor {
void VisitStatementsAndSpill(ZoneList<Statement*>* statements); void VisitStatementsAndSpill(ZoneList<Statement*>* statements);
// Main code generation function // Main code generation function
void GenCode(FunctionLiteral* fun); void Generate(FunctionLiteral* fun, Mode mode, CompilationInfo* info);
// Generate the return sequence code. Should be called no more than // Generate the return sequence code. Should be called no more than
// once per compiled function, immediately after binding the return // once per compiled function, immediately after binding the return
@ -420,6 +443,11 @@ class CodeGenerator: public AstVisitor {
// value in place. // value in place.
void StoreToSlot(Slot* slot, InitState init_state); void StoreToSlot(Slot* slot, InitState init_state);
// Load a property of an object, returning it in a Result.
// The object and the property name are passed on the stack, and
// not changed.
Result EmitKeyedLoad(bool is_global);
// Special code for typeof expressions: Unfortunately, we must // Special code for typeof expressions: Unfortunately, we must
// be careful when loading the expression in 'typeof' // be careful when loading the expression in 'typeof'
// expressions. We are not allowed to throw reference errors for // expressions. We are not allowed to throw reference errors for
@ -444,20 +472,20 @@ class CodeGenerator: public AstVisitor {
// Emit code to perform a binary operation on a constant // Emit code to perform a binary operation on a constant
// smi and a likely smi. Consumes the Result *operand. // smi and a likely smi. Consumes the Result *operand.
void ConstantSmiBinaryOperation(Token::Value op, Result ConstantSmiBinaryOperation(Token::Value op,
Result* operand, Result* operand,
Handle<Object> constant_operand, Handle<Object> constant_operand,
StaticType* type, StaticType* type,
bool reversed, bool reversed,
OverwriteMode overwrite_mode); OverwriteMode overwrite_mode);
// Emit code to perform a binary operation on two likely smis. // Emit code to perform a binary operation on two likely smis.
// The code to handle smi arguments is produced inline. // The code to handle smi arguments is produced inline.
// Consumes the Results *left and *right. // Consumes the Results *left and *right.
void LikelySmiBinaryOperation(Token::Value op, Result LikelySmiBinaryOperation(Token::Value op,
Result* left, Result* left,
Result* right, Result* right,
OverwriteMode overwrite_mode); OverwriteMode overwrite_mode);
void Comparison(AstNode* node, void Comparison(AstNode* node,
Condition cc, Condition cc,
@ -475,12 +503,14 @@ class CodeGenerator: public AstVisitor {
void StoreUnsafeSmiToLocal(int offset, Handle<Object> value); void StoreUnsafeSmiToLocal(int offset, Handle<Object> value);
void PushUnsafeSmi(Handle<Object> value); void PushUnsafeSmi(Handle<Object> value);
void CallWithArguments(ZoneList<Expression*>* arguments, int position); void CallWithArguments(ZoneList<Expression*>* arguments,
CallFunctionFlags flags,
int position);
// Use an optimized version of Function.prototype.apply that avoid // An optimized implementation of expressions of the form
// allocating the arguments object and just copies the arguments // x.apply(y, arguments). We call x the applicand and y the receiver.
// from the stack. // The optimization avoids allocating an arguments object if possible.
void CallApplyLazy(Property* apply, void CallApplyLazy(Expression* applicand,
Expression* receiver, Expression* receiver,
VariableProxy* arguments, VariableProxy* arguments,
int position); int position);
@ -515,6 +545,7 @@ class CodeGenerator: public AstVisitor {
void GenerateIsArray(ZoneList<Expression*>* args); void GenerateIsArray(ZoneList<Expression*>* args);
void GenerateIsObject(ZoneList<Expression*>* args); void GenerateIsObject(ZoneList<Expression*>* args);
void GenerateIsFunction(ZoneList<Expression*>* args); void GenerateIsFunction(ZoneList<Expression*>* args);
void GenerateIsUndetectableObject(ZoneList<Expression*>* args);
// Support for construct call checks. // Support for construct call checks.
void GenerateIsConstructCall(ZoneList<Expression*>* args); void GenerateIsConstructCall(ZoneList<Expression*>* args);
@ -612,7 +643,8 @@ class CodeGenerator: public AstVisitor {
friend class Reference; friend class Reference;
friend class Result; friend class Result;
friend class FastCodeGenerator; friend class FastCodeGenerator;
friend class CodeGenSelector; friend class FullCodeGenerator;
friend class FullCodeGenSyntaxChecker;
friend class CodeGeneratorPatcher; // Used in test-log-stack-tracer.cc friend class CodeGeneratorPatcher; // Used in test-log-stack-tracer.cc
@ -620,39 +652,6 @@ class CodeGenerator: public AstVisitor {
}; };
class CallFunctionStub: public CodeStub {
public:
CallFunctionStub(int argc, InLoopFlag in_loop)
: argc_(argc), in_loop_(in_loop) { }
void Generate(MacroAssembler* masm);
private:
int argc_;
InLoopFlag in_loop_;
#ifdef DEBUG
void Print() { PrintF("CallFunctionStub (args %d)\n", argc_); }
#endif
Major MajorKey() { return CallFunction; }
int MinorKey() { return argc_; }
InLoopFlag InLoop() { return in_loop_; }
};
class ToBooleanStub: public CodeStub {
public:
ToBooleanStub() { }
void Generate(MacroAssembler* masm);
private:
Major MajorKey() { return ToBoolean; }
int MinorKey() { return 0; }
};
// Flag that indicates how to generate code for the stub GenericBinaryOpStub. // Flag that indicates how to generate code for the stub GenericBinaryOpStub.
enum GenericBinaryFlags { enum GenericBinaryFlags {
NO_GENERIC_BINARY_FLAGS = 0, NO_GENERIC_BINARY_FLAGS = 0,
@ -682,6 +681,11 @@ class GenericBinaryOpStub: public CodeStub {
void GenerateCall(MacroAssembler* masm, Register left, Smi* right); void GenerateCall(MacroAssembler* masm, Register left, Smi* right);
void GenerateCall(MacroAssembler* masm, Smi* left, Register right); void GenerateCall(MacroAssembler* masm, Smi* left, Register right);
Result GenerateCall(MacroAssembler* masm,
VirtualFrame* frame,
Result* left,
Result* right);
private: private:
Token::Value op_; Token::Value op_;
OverwriteMode mode_; OverwriteMode mode_;
@ -728,11 +732,11 @@ class GenericBinaryOpStub: public CodeStub {
void GenerateSmiCode(MacroAssembler* masm, Label* slow); void GenerateSmiCode(MacroAssembler* masm, Label* slow);
void GenerateLoadArguments(MacroAssembler* masm); void GenerateLoadArguments(MacroAssembler* masm);
void GenerateReturn(MacroAssembler* masm); void GenerateReturn(MacroAssembler* masm);
void GenerateHeapResultAllocation(MacroAssembler* masm, Label* alloc_failure);
bool ArgsInRegistersSupported() { bool ArgsInRegistersSupported() {
return ((op_ == Token::ADD) || (op_ == Token::SUB) return op_ == Token::ADD || op_ == Token::SUB
|| (op_ == Token::MUL) || (op_ == Token::DIV)) || op_ == Token::MUL || op_ == Token::DIV;
&& flags_ != NO_SMI_CODE_IN_STUB;
} }
bool IsOperationCommutative() { bool IsOperationCommutative() {
return (op_ == Token::ADD) || (op_ == Token::MUL); return (op_ == Token::ADD) || (op_ == Token::MUL);
@ -741,8 +745,8 @@ class GenericBinaryOpStub: public CodeStub {
void SetArgsInRegisters() { args_in_registers_ = true; } void SetArgsInRegisters() { args_in_registers_ = true; }
void SetArgsReversed() { args_reversed_ = true; } void SetArgsReversed() { args_reversed_ = true; }
bool HasSmiCodeInStub() { return (flags_ & NO_SMI_CODE_IN_STUB) == 0; } bool HasSmiCodeInStub() { return (flags_ & NO_SMI_CODE_IN_STUB) == 0; }
bool HasArgumentsInRegisters() { return args_in_registers_; } bool HasArgsInRegisters() { return args_in_registers_; }
bool HasArgumentsReversed() { return args_reversed_; } bool HasArgsReversed() { return args_reversed_; }
}; };

9
deps/v8/src/ia32/debug-ia32.cc

@ -1,4 +1,4 @@
// Copyright 2006-2008 the V8 project authors. All rights reserved. // Copyright 2010 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without // Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are // modification, are permitted provided that the following conditions are
// met: // met:
@ -94,7 +94,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
__ Set(eax, Immediate(0)); // no arguments __ Set(eax, Immediate(0)); // no arguments
__ mov(ebx, Immediate(ExternalReference::debug_break())); __ mov(ebx, Immediate(ExternalReference::debug_break()));
CEntryDebugBreakStub ceb; CEntryStub ceb(1, ExitFrame::MODE_DEBUG);
__ CallStub(&ceb); __ CallStub(&ceb);
// Restore the register values containing object pointers from the expression // Restore the register values containing object pointers from the expression
@ -132,12 +132,13 @@ void Debug::GenerateLoadICDebugBreak(MacroAssembler* masm) {
void Debug::GenerateStoreICDebugBreak(MacroAssembler* masm) { void Debug::GenerateStoreICDebugBreak(MacroAssembler* masm) {
// REgister state for IC store call (from ic-ia32.cc). // Register state for IC store call (from ic-ia32.cc).
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- eax : value // -- eax : value
// -- ecx : name // -- ecx : name
// -- edx : receiver
// ----------------------------------- // -----------------------------------
Generate_DebugBreakCallHelper(masm, eax.bit() | ecx.bit(), false); Generate_DebugBreakCallHelper(masm, eax.bit() | ecx.bit() | edx.bit(), false);
} }

13
deps/v8/src/ia32/disasm-ia32.cc

@ -53,22 +53,25 @@ struct ByteMnemonic {
static ByteMnemonic two_operands_instr[] = { static ByteMnemonic two_operands_instr[] = {
{0x03, "add", REG_OPER_OP_ORDER}, {0x03, "add", REG_OPER_OP_ORDER},
{0x21, "and", OPER_REG_OP_ORDER},
{0x23, "and", REG_OPER_OP_ORDER},
{0x3B, "cmp", REG_OPER_OP_ORDER},
{0x8D, "lea", REG_OPER_OP_ORDER},
{0x09, "or", OPER_REG_OP_ORDER}, {0x09, "or", OPER_REG_OP_ORDER},
{0x0B, "or", REG_OPER_OP_ORDER}, {0x0B, "or", REG_OPER_OP_ORDER},
{0x1B, "sbb", REG_OPER_OP_ORDER}, {0x1B, "sbb", REG_OPER_OP_ORDER},
{0x21, "and", OPER_REG_OP_ORDER},
{0x23, "and", REG_OPER_OP_ORDER},
{0x29, "sub", OPER_REG_OP_ORDER}, {0x29, "sub", OPER_REG_OP_ORDER},
{0x2A, "subb", REG_OPER_OP_ORDER}, {0x2A, "subb", REG_OPER_OP_ORDER},
{0x2B, "sub", REG_OPER_OP_ORDER}, {0x2B, "sub", REG_OPER_OP_ORDER},
{0x85, "test", REG_OPER_OP_ORDER},
{0x31, "xor", OPER_REG_OP_ORDER}, {0x31, "xor", OPER_REG_OP_ORDER},
{0x33, "xor", REG_OPER_OP_ORDER}, {0x33, "xor", REG_OPER_OP_ORDER},
{0x38, "cmpb", OPER_REG_OP_ORDER},
{0x3A, "cmpb", REG_OPER_OP_ORDER},
{0x3B, "cmp", REG_OPER_OP_ORDER},
{0x84, "test_b", REG_OPER_OP_ORDER},
{0x85, "test", REG_OPER_OP_ORDER},
{0x87, "xchg", REG_OPER_OP_ORDER}, {0x87, "xchg", REG_OPER_OP_ORDER},
{0x8A, "mov_b", REG_OPER_OP_ORDER}, {0x8A, "mov_b", REG_OPER_OP_ORDER},
{0x8B, "mov", REG_OPER_OP_ORDER}, {0x8B, "mov", REG_OPER_OP_ORDER},
{0x8D, "lea", REG_OPER_OP_ORDER},
{-1, "", UNSET_OP_ORDER} {-1, "", UNSET_OP_ORDER}
}; };

1714
deps/v8/src/ia32/fast-codegen-ia32.cc

File diff suppressed because it is too large

1900
deps/v8/src/ia32/full-codegen-ia32.cc

File diff suppressed because it is too large

33
deps/v8/src/ia32/ic-ia32.cc

@ -180,7 +180,6 @@ void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
// -- esp[0] : return address // -- esp[0] : return address
// -- esp[4] : receiver // -- esp[4] : receiver
// ----------------------------------- // -----------------------------------
Label miss; Label miss;
__ mov(eax, Operand(esp, kPointerSize)); __ mov(eax, Operand(esp, kPointerSize));
@ -197,7 +196,6 @@ void LoadIC::GenerateStringLength(MacroAssembler* masm) {
// -- esp[0] : return address // -- esp[0] : return address
// -- esp[4] : receiver // -- esp[4] : receiver
// ----------------------------------- // -----------------------------------
Label miss; Label miss;
__ mov(eax, Operand(esp, kPointerSize)); __ mov(eax, Operand(esp, kPointerSize));
@ -214,7 +212,6 @@ void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
// -- esp[0] : return address // -- esp[0] : return address
// -- esp[4] : receiver // -- esp[4] : receiver
// ----------------------------------- // -----------------------------------
Label miss; Label miss;
__ mov(eax, Operand(esp, kPointerSize)); __ mov(eax, Operand(esp, kPointerSize));
@ -244,11 +241,10 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// Get the map of the receiver. // Get the map of the receiver.
__ mov(edx, FieldOperand(ecx, HeapObject::kMapOffset)); __ mov(edx, FieldOperand(ecx, HeapObject::kMapOffset));
// Check that the receiver does not require access checks. We need
// to check this explicitly since this generic stub does not perform // Check bit field.
// map checks.
__ movzx_b(ebx, FieldOperand(edx, Map::kBitFieldOffset)); __ movzx_b(ebx, FieldOperand(edx, Map::kBitFieldOffset));
__ test(ebx, Immediate(1 << Map::kIsAccessCheckNeeded)); __ test(ebx, Immediate(kSlowCaseBitFieldMask));
__ j(not_zero, &slow, not_taken); __ j(not_zero, &slow, not_taken);
// Check that the object is some kind of JS object EXCEPT JS Value type. // Check that the object is some kind of JS object EXCEPT JS Value type.
// In the case that the object is a value-wrapper object, // In the case that the object is a value-wrapper object,
@ -1040,7 +1036,6 @@ void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
// -- ... // -- ...
// -- esp[(argc + 1) * 4] : receiver // -- esp[(argc + 1) * 4] : receiver
// ----------------------------------- // -----------------------------------
Label miss, global_object, non_global_object; Label miss, global_object, non_global_object;
// Get the receiver of the function from the stack; 1 ~ return address. // Get the receiver of the function from the stack; 1 ~ return address.
@ -1179,7 +1174,6 @@ void LoadIC::GenerateNormal(MacroAssembler* masm) {
// -- esp[0] : return address // -- esp[0] : return address
// -- esp[4] : receiver // -- esp[4] : receiver
// ----------------------------------- // -----------------------------------
Label miss, probe, global; Label miss, probe, global;
__ mov(eax, Operand(esp, kPointerSize)); __ mov(eax, Operand(esp, kPointerSize));
@ -1385,19 +1379,17 @@ void StoreIC::GenerateMegamorphic(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- eax : value // -- eax : value
// -- ecx : name // -- ecx : name
// -- edx : receiver
// -- esp[0] : return address // -- esp[0] : return address
// -- esp[4] : receiver
// ----------------------------------- // -----------------------------------
// Get the receiver from the stack and probe the stub cache.
__ mov(edx, Operand(esp, 4));
Code::Flags flags = Code::ComputeFlags(Code::STORE_IC, Code::Flags flags = Code::ComputeFlags(Code::STORE_IC,
NOT_IN_LOOP, NOT_IN_LOOP,
MONOMORPHIC); MONOMORPHIC);
StubCache::GenerateProbe(masm, flags, edx, ecx, ebx, no_reg); StubCache::GenerateProbe(masm, flags, edx, ecx, ebx, no_reg);
// Cache miss: Jump to runtime. // Cache miss: Jump to runtime.
Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss))); GenerateMiss(masm);
} }
@ -1405,12 +1397,12 @@ void StoreIC::GenerateExtendStorage(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- eax : value // -- eax : value
// -- ecx : transition map // -- ecx : transition map
// -- edx : receiver
// -- esp[0] : return address // -- esp[0] : return address
// -- esp[4] : receiver
// ----------------------------------- // -----------------------------------
__ pop(ebx); __ pop(ebx);
__ push(Operand(esp, 0)); // receiver __ push(edx); // receiver
__ push(ecx); // transition map __ push(ecx); // transition map
__ push(eax); // value __ push(eax); // value
__ push(ebx); // return address __ push(ebx); // return address
@ -1421,23 +1413,22 @@ void StoreIC::GenerateExtendStorage(MacroAssembler* masm) {
} }
void StoreIC::Generate(MacroAssembler* masm, const ExternalReference& f) { void StoreIC::GenerateMiss(MacroAssembler* masm) {
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- eax : value // -- eax : value
// -- ecx : name // -- ecx : name
// -- edx : receiver
// -- esp[0] : return address // -- esp[0] : return address
// -- esp[4] : receiver
// ----------------------------------- // -----------------------------------
// Move the return address below the arguments.
__ pop(ebx); __ pop(ebx);
__ push(Operand(esp, 0)); __ push(edx);
__ push(ecx); __ push(ecx);
__ push(eax); __ push(eax);
__ push(ebx); __ push(ebx);
// Perform tail call to the entry. // Perform tail call to the entry.
__ TailCallRuntime(f, 3, 1); __ TailCallRuntime(ExternalReference(IC_Utility(kStoreIC_Miss)), 3, 1);
} }
@ -1452,7 +1443,6 @@ void KeyedStoreIC::Generate(MacroAssembler* masm, const ExternalReference& f) {
// -- esp[8] : receiver // -- esp[8] : receiver
// ----------------------------------- // -----------------------------------
// Move the return address below the arguments.
__ pop(ecx); __ pop(ecx);
__ push(Operand(esp, 1 * kPointerSize)); __ push(Operand(esp, 1 * kPointerSize));
__ push(Operand(esp, 1 * kPointerSize)); __ push(Operand(esp, 1 * kPointerSize));
@ -1473,7 +1463,6 @@ void KeyedStoreIC::GenerateExtendStorage(MacroAssembler* masm) {
// -- esp[8] : receiver // -- esp[8] : receiver
// ----------------------------------- // -----------------------------------
// Move the return address below the arguments.
__ pop(ebx); __ pop(ebx);
__ push(Operand(esp, 1 * kPointerSize)); __ push(Operand(esp, 1 * kPointerSize));
__ push(ecx); __ push(ecx);

99
deps/v8/src/ia32/macro-assembler-ia32.cc

@ -147,6 +147,11 @@ void RecordWriteStub::Generate(MacroAssembler* masm) {
// All registers are clobbered by the operation. // All registers are clobbered by the operation.
void MacroAssembler::RecordWrite(Register object, int offset, void MacroAssembler::RecordWrite(Register object, int offset,
Register value, Register scratch) { Register value, Register scratch) {
// The compiled code assumes that record write doesn't change the
// context register, so we check that none of the clobbered
// registers are esi.
ASSERT(!object.is(esi) && !value.is(esi) && !scratch.is(esi));
// First, check if a remembered set write is even needed. The tests below // First, check if a remembered set write is even needed. The tests below
// catch stores of Smis and stores into young gen (which does not have space // catch stores of Smis and stores into young gen (which does not have space
// for the remembered set bits. // for the remembered set bits.
@ -210,6 +215,14 @@ void MacroAssembler::RecordWrite(Register object, int offset,
} }
bind(&done); bind(&done);
// Clobber all input registers when running with the debug-code flag
// turned on to provoke errors.
if (FLAG_debug_code) {
mov(object, Immediate(bit_cast<int32_t>(kZapValue)));
mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
mov(scratch, Immediate(bit_cast<int32_t>(kZapValue)));
}
} }
@ -1098,10 +1111,14 @@ void MacroAssembler::CallRuntime(Runtime::Function* f, int num_arguments) {
return; return;
} }
Runtime::FunctionId function_id = // TODO(1236192): Most runtime routines don't need the number of
static_cast<Runtime::FunctionId>(f->stub_id); // arguments passed in because it is constant. At some point we
RuntimeStub stub(function_id, num_arguments); // should remove this need and make the runtime routine entry code
CallStub(&stub); // smarter.
Set(eax, Immediate(num_arguments));
mov(ebx, Immediate(ExternalReference(f)));
CEntryStub ces(1);
CallStub(&ces);
} }
@ -1114,10 +1131,14 @@ Object* MacroAssembler::TryCallRuntime(Runtime::Function* f,
return Heap::undefined_value(); return Heap::undefined_value();
} }
Runtime::FunctionId function_id = // TODO(1236192): Most runtime routines don't need the number of
static_cast<Runtime::FunctionId>(f->stub_id); // arguments passed in because it is constant. At some point we
RuntimeStub stub(function_id, num_arguments); // should remove this need and make the runtime routine entry code
return TryCallStub(&stub); // smarter.
Set(eax, Immediate(num_arguments));
mov(ebx, Immediate(ExternalReference(f)));
CEntryStub ces(1);
return TryCallStub(&ces);
} }
@ -1454,6 +1475,36 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
} }
void MacroAssembler::IncrementCounter(Condition cc,
StatsCounter* counter,
int value) {
ASSERT(value > 0);
if (FLAG_native_code_counters && counter->Enabled()) {
Label skip;
j(NegateCondition(cc), &skip);
pushfd();
IncrementCounter(counter, value);
popfd();
bind(&skip);
}
}
void MacroAssembler::DecrementCounter(Condition cc,
StatsCounter* counter,
int value) {
ASSERT(value > 0);
if (FLAG_native_code_counters && counter->Enabled()) {
Label skip;
j(NegateCondition(cc), &skip);
pushfd();
DecrementCounter(counter, value);
popfd();
bind(&skip);
}
}
void MacroAssembler::Assert(Condition cc, const char* msg) { void MacroAssembler::Assert(Condition cc, const char* msg) {
if (FLAG_debug_code) Check(cc, msg); if (FLAG_debug_code) Check(cc, msg);
} }
@ -1495,6 +1546,38 @@ void MacroAssembler::Abort(const char* msg) {
} }
void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
Register object2,
Register scratch1,
Register scratch2,
Label* failure) {
// Check that both objects are not smis.
ASSERT_EQ(0, kSmiTag);
mov(scratch1, Operand(object1));
and_(scratch1, Operand(object2));
test(scratch1, Immediate(kSmiTagMask));
j(zero, failure);
// Load instance type for both strings.
mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
// Check that both are flat ascii strings.
const int kFlatAsciiStringMask =
kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
// Interleave bits from both instance types and compare them in one check.
ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
and_(scratch1, kFlatAsciiStringMask);
and_(scratch2, kFlatAsciiStringMask);
lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
j(not_equal, failure);
}
CodePatcher::CodePatcher(byte* address, int size) CodePatcher::CodePatcher(byte* address, int size)
: address_(address), size_(size), masm_(address, size + Assembler::kGap) { : address_(address), size_(size), masm_(address, size + Assembler::kGap) {
// Create a new macro assembler pointing to the address of the code to patch. // Create a new macro assembler pointing to the address of the code to patch.

15
deps/v8/src/ia32/macro-assembler-ia32.h

@ -331,7 +331,7 @@ class MacroAssembler: public Assembler {
// Eventually this should be used for all C calls. // Eventually this should be used for all C calls.
void CallRuntime(Runtime::Function* f, int num_arguments); void CallRuntime(Runtime::Function* f, int num_arguments);
// Call a runtime function, returning the RuntimeStub object called. // Call a runtime function, returning the CodeStub object called.
// Try to generate the stub code if necessary. Do not perform a GC // Try to generate the stub code if necessary. Do not perform a GC
// but instead return a retry after GC failure. // but instead return a retry after GC failure.
Object* TryCallRuntime(Runtime::Function* f, int num_arguments); Object* TryCallRuntime(Runtime::Function* f, int num_arguments);
@ -392,6 +392,8 @@ class MacroAssembler: public Assembler {
void SetCounter(StatsCounter* counter, int value); void SetCounter(StatsCounter* counter, int value);
void IncrementCounter(StatsCounter* counter, int value); void IncrementCounter(StatsCounter* counter, int value);
void DecrementCounter(StatsCounter* counter, int value); void DecrementCounter(StatsCounter* counter, int value);
void IncrementCounter(Condition cc, StatsCounter* counter, int value);
void DecrementCounter(Condition cc, StatsCounter* counter, int value);
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
@ -413,6 +415,17 @@ class MacroAssembler: public Assembler {
void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; } void set_allow_stub_calls(bool value) { allow_stub_calls_ = value; }
bool allow_stub_calls() { return allow_stub_calls_; } bool allow_stub_calls() { return allow_stub_calls_; }
// ---------------------------------------------------------------------------
// String utilities.
// Checks if both objects are sequential ASCII strings, and jumps to label
// if either is not.
void JumpIfNotBothSequentialAsciiStrings(Register object1,
Register object2,
Register scratch1,
Register scratch2,
Label *on_not_flat_ascii_strings);
private: private:
List<Unresolved> unresolved_; List<Unresolved> unresolved_;
bool generating_stub_; bool generating_stub_;

75
deps/v8/src/ia32/regexp-macro-assembler-ia32.cc

@ -59,8 +59,6 @@ namespace internal {
* call through the runtime system) * call through the runtime system)
* - stack_area_base (High end of the memory area to use as * - stack_area_base (High end of the memory area to use as
* backtracking stack) * backtracking stack)
* - at_start (if 1, we are starting at the start of the
* string, otherwise 0)
* - int* capture_array (int[num_saved_registers_], for output). * - int* capture_array (int[num_saved_registers_], for output).
* - end of input (Address of end of string) * - end of input (Address of end of string)
* - start of input (Address of first character in string) * - start of input (Address of first character in string)
@ -74,6 +72,8 @@ namespace internal {
* - backup of caller ebx * - backup of caller ebx
* - Offset of location before start of input (effectively character * - Offset of location before start of input (effectively character
* position -1). Used to initialize capture registers to a non-position. * position -1). Used to initialize capture registers to a non-position.
* - Boolean at start (if 1, we are starting at the start of the string,
* otherwise 0)
* - register 0 ebp[-4] (Only positions must be stored in the first * - register 0 ebp[-4] (Only positions must be stored in the first
* - register 1 ebp[-8] num_saved_registers_ registers) * - register 1 ebp[-8] num_saved_registers_ registers)
* - ... * - ...
@ -539,46 +539,33 @@ bool RegExpMacroAssemblerIA32::CheckSpecialCharacterClass(uc16 type,
return true; return true;
} }
case 'w': { case 'w': {
Label done, check_digits; if (mode_ != ASCII) {
__ cmp(Operand(current_character()), Immediate('9')); // Table is 128 entries, so all ASCII characters can be tested.
__ j(less_equal, &check_digits); __ cmp(Operand(current_character()), Immediate('z'));
__ cmp(Operand(current_character()), Immediate('_')); BranchOrBacktrack(above, on_no_match);
__ j(equal, &done); }
// Convert to lower case if letter. ASSERT_EQ(0, word_character_map[0]); // Character '\0' is not a word char.
__ mov(Operand(eax), current_character()); ExternalReference word_map = ExternalReference::re_word_character_map();
__ or_(eax, 0x20); __ test_b(current_character(),
// check current character in range ['a'..'z'], nondestructively. Operand::StaticArray(current_character(), times_1, word_map));
__ sub(Operand(eax), Immediate('a')); BranchOrBacktrack(zero, on_no_match);
__ cmp(Operand(eax), Immediate('z' - 'a'));
BranchOrBacktrack(above, on_no_match);
__ jmp(&done);
__ bind(&check_digits);
// Check current character in range ['0'..'9'].
__ cmp(Operand(current_character()), Immediate('0'));
BranchOrBacktrack(below, on_no_match);
__ bind(&done);
return true; return true;
} }
case 'W': { case 'W': {
Label done, check_digits; Label done;
__ cmp(Operand(current_character()), Immediate('9')); if (mode_ != ASCII) {
__ j(less_equal, &check_digits); // Table is 128 entries, so all ASCII characters can be tested.
__ cmp(Operand(current_character()), Immediate('_')); __ cmp(Operand(current_character()), Immediate('z'));
BranchOrBacktrack(equal, on_no_match); __ j(above, &done);
// Convert to lower case if letter. }
__ mov(Operand(eax), current_character()); ASSERT_EQ(0, word_character_map[0]); // Character '\0' is not a word char.
__ or_(eax, 0x20); ExternalReference word_map = ExternalReference::re_word_character_map();
// check current character in range ['a'..'z'], nondestructively. __ test_b(current_character(),
__ sub(Operand(eax), Immediate('a')); Operand::StaticArray(current_character(), times_1, word_map));
__ cmp(Operand(eax), Immediate('z' - 'a')); BranchOrBacktrack(not_zero, on_no_match);
BranchOrBacktrack(below_equal, on_no_match); if (mode_ != ASCII) {
__ jmp(&done); __ bind(&done);
__ bind(&check_digits); }
// Check current character in range ['0'..'9'].
__ cmp(Operand(current_character()), Immediate('0'));
BranchOrBacktrack(above_equal, on_no_match);
__ bind(&done);
return true; return true;
} }
// Non-standard classes (with no syntactic shorthand) used internally. // Non-standard classes (with no syntactic shorthand) used internally.
@ -638,6 +625,7 @@ Handle<Object> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
__ push(edi); __ push(edi);
__ push(ebx); // Callee-save on MacOS. __ push(ebx); // Callee-save on MacOS.
__ push(Immediate(0)); // Make room for "input start - 1" constant. __ push(Immediate(0)); // Make room for "input start - 1" constant.
__ push(Immediate(0)); // Make room for "at start" constant.
// Check if we have space on the stack for registers. // Check if we have space on the stack for registers.
Label stack_limit_hit; Label stack_limit_hit;
@ -680,6 +668,15 @@ Handle<Object> RegExpMacroAssemblerIA32::GetCode(Handle<String> source) {
// Store this value in a local variable, for use when clearing // Store this value in a local variable, for use when clearing
// position registers. // position registers.
__ mov(Operand(ebp, kInputStartMinusOne), eax); __ mov(Operand(ebp, kInputStartMinusOne), eax);
// Determine whether the start index is zero, that is at the start of the
// string, and store that value in a local variable.
__ mov(ebx, Operand(ebp, kStartIndex));
__ xor_(Operand(ecx), ecx); // setcc only operates on cl (lower byte of ecx).
__ test(ebx, Operand(ebx));
__ setcc(zero, ecx); // 1 if 0 (start of string), 0 if positive.
__ mov(Operand(ebp, kAtStart), ecx);
if (num_saved_registers_ > 0) { // Always is, if generated from a regexp. if (num_saved_registers_ > 0) { // Always is, if generated from a regexp.
// Fill saved registers with initial value = start offset - 1 // Fill saved registers with initial value = start offset - 1
// Fill in stack push order, to avoid accessing across an unwritten // Fill in stack push order, to avoid accessing across an unwritten

6
deps/v8/src/ia32/regexp-macro-assembler-ia32.h

@ -123,8 +123,7 @@ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
static const int kInputStart = kStartIndex + kPointerSize; static const int kInputStart = kStartIndex + kPointerSize;
static const int kInputEnd = kInputStart + kPointerSize; static const int kInputEnd = kInputStart + kPointerSize;
static const int kRegisterOutput = kInputEnd + kPointerSize; static const int kRegisterOutput = kInputEnd + kPointerSize;
static const int kAtStart = kRegisterOutput + kPointerSize; static const int kStackHighEnd = kRegisterOutput + kPointerSize;
static const int kStackHighEnd = kAtStart + kPointerSize;
static const int kDirectCall = kStackHighEnd + kPointerSize; static const int kDirectCall = kStackHighEnd + kPointerSize;
// Below the frame pointer - local stack variables. // Below the frame pointer - local stack variables.
// When adding local variables remember to push space for them in // When adding local variables remember to push space for them in
@ -133,8 +132,9 @@ class RegExpMacroAssemblerIA32: public NativeRegExpMacroAssembler {
static const int kBackup_edi = kBackup_esi - kPointerSize; static const int kBackup_edi = kBackup_esi - kPointerSize;
static const int kBackup_ebx = kBackup_edi - kPointerSize; static const int kBackup_ebx = kBackup_edi - kPointerSize;
static const int kInputStartMinusOne = kBackup_ebx - kPointerSize; static const int kInputStartMinusOne = kBackup_ebx - kPointerSize;
static const int kAtStart = kInputStartMinusOne - kPointerSize;
// First register address. Following registers are below it on the stack. // First register address. Following registers are below it on the stack.
static const int kRegisterZero = kInputStartMinusOne - kPointerSize; static const int kRegisterZero = kAtStart - kPointerSize;
// Initial size of code buffer. // Initial size of code buffer.
static const size_t kRegExpCodeSize = 1024; static const size_t kRegExpCodeSize = 1024;

4
deps/v8/src/ia32/simulator-ia32.h

@ -53,8 +53,8 @@ class SimulatorStack : public v8::internal::AllStatic {
// Call the generated regexp code directly. The entry function pointer should // Call the generated regexp code directly. The entry function pointer should
// expect eight int/pointer sized arguments and return an int. // expect eight int/pointer sized arguments and return an int.
#define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6, p7) \ #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \
entry(p0, p1, p2, p3, p4, p5, p6, p7) entry(p0, p1, p2, p3, p4, p5, p6)
#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \ #define TRY_CATCH_FROM_ADDRESS(try_catch_address) \
reinterpret_cast<TryCatch*>(try_catch_address) reinterpret_cast<TryCatch*>(try_catch_address)

144
deps/v8/src/ia32/stub-cache-ia32.cc

@ -161,6 +161,7 @@ static void PushInterceptorArguments(MacroAssembler* masm,
__ push(holder); __ push(holder);
__ push(name); __ push(name);
InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor(); InterceptorInfo* interceptor = holder_obj->GetNamedInterceptor();
ASSERT(!Heap::InNewSpace(interceptor));
__ mov(receiver, Immediate(Handle<Object>(interceptor))); __ mov(receiver, Immediate(Handle<Object>(interceptor)));
__ push(receiver); __ push(receiver);
__ push(FieldOperand(receiver, InterceptorInfo::kDataOffset)); __ push(FieldOperand(receiver, InterceptorInfo::kDataOffset));
@ -347,19 +348,6 @@ static void CompileLoadInterceptor(Compiler* compiler,
} }
static void LookupPostInterceptor(JSObject* holder,
String* name,
LookupResult* lookup) {
holder->LocalLookupRealNamedProperty(name, lookup);
if (lookup->IsNotFound()) {
Object* proto = holder->GetPrototype();
if (proto != Heap::null_value()) {
proto->Lookup(name, lookup);
}
}
}
class LoadInterceptorCompiler BASE_EMBEDDED { class LoadInterceptorCompiler BASE_EMBEDDED {
public: public:
explicit LoadInterceptorCompiler(Register name) : name_(name) {} explicit LoadInterceptorCompiler(Register name) : name_(name) {}
@ -559,7 +547,6 @@ class CallInterceptorCompiler BASE_EMBEDDED {
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
// Jump to the cached code (tail call). // Jump to the cached code (tail call).
ASSERT(function->is_compiled());
Handle<Code> code(function->code()); Handle<Code> code(function->code());
ParameterCount expected(function->shared()->formal_parameter_count()); ParameterCount expected(function->shared()->formal_parameter_count());
__ InvokeCode(code, expected, arguments_, __ InvokeCode(code, expected, arguments_,
@ -997,50 +984,65 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
break; break;
case STRING_CHECK: case STRING_CHECK:
// Check that the object is a two-byte string or a symbol. if (!function->IsBuiltin()) {
__ mov(eax, FieldOperand(edx, HeapObject::kMapOffset)); // Calling non-builtins with a value as receiver requires boxing.
__ movzx_b(eax, FieldOperand(eax, Map::kInstanceTypeOffset)); __ jmp(&miss);
__ cmp(eax, FIRST_NONSTRING_TYPE); } else {
__ j(above_equal, &miss, not_taken); // Check that the object is a string or a symbol.
// Check that the maps starting from the prototype haven't changed. __ mov(eax, FieldOperand(edx, HeapObject::kMapOffset));
GenerateLoadGlobalFunctionPrototype(masm(), __ movzx_b(eax, FieldOperand(eax, Map::kInstanceTypeOffset));
Context::STRING_FUNCTION_INDEX, __ cmp(eax, FIRST_NONSTRING_TYPE);
eax); __ j(above_equal, &miss, not_taken);
CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder, // Check that the maps starting from the prototype haven't changed.
ebx, edx, name, &miss); GenerateLoadGlobalFunctionPrototype(masm(),
Context::STRING_FUNCTION_INDEX,
eax);
CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
ebx, edx, name, &miss);
}
break; break;
case NUMBER_CHECK: { case NUMBER_CHECK: {
Label fast; if (!function->IsBuiltin()) {
// Check that the object is a smi or a heap number. // Calling non-builtins with a value as receiver requires boxing.
__ test(edx, Immediate(kSmiTagMask)); __ jmp(&miss);
__ j(zero, &fast, taken); } else {
__ CmpObjectType(edx, HEAP_NUMBER_TYPE, eax); Label fast;
__ j(not_equal, &miss, not_taken); // Check that the object is a smi or a heap number.
__ bind(&fast); __ test(edx, Immediate(kSmiTagMask));
// Check that the maps starting from the prototype haven't changed. __ j(zero, &fast, taken);
GenerateLoadGlobalFunctionPrototype(masm(), __ CmpObjectType(edx, HEAP_NUMBER_TYPE, eax);
Context::NUMBER_FUNCTION_INDEX, __ j(not_equal, &miss, not_taken);
eax); __ bind(&fast);
CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder, // Check that the maps starting from the prototype haven't changed.
ebx, edx, name, &miss); GenerateLoadGlobalFunctionPrototype(masm(),
Context::NUMBER_FUNCTION_INDEX,
eax);
CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
ebx, edx, name, &miss);
}
break; break;
} }
case BOOLEAN_CHECK: { case BOOLEAN_CHECK: {
Label fast; if (!function->IsBuiltin()) {
// Check that the object is a boolean. // Calling non-builtins with a value as receiver requires boxing.
__ cmp(edx, Factory::true_value()); __ jmp(&miss);
__ j(equal, &fast, taken); } else {
__ cmp(edx, Factory::false_value()); Label fast;
__ j(not_equal, &miss, not_taken); // Check that the object is a boolean.
__ bind(&fast); __ cmp(edx, Factory::true_value());
// Check that the maps starting from the prototype haven't changed. __ j(equal, &fast, taken);
GenerateLoadGlobalFunctionPrototype(masm(), __ cmp(edx, Factory::false_value());
Context::BOOLEAN_FUNCTION_INDEX, __ j(not_equal, &miss, not_taken);
eax); __ bind(&fast);
CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder, // Check that the maps starting from the prototype haven't changed.
ebx, edx, name, &miss); GenerateLoadGlobalFunctionPrototype(masm(),
Context::BOOLEAN_FUNCTION_INDEX,
eax);
CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
ebx, edx, name, &miss);
}
break; break;
} }
@ -1240,21 +1242,18 @@ Object* StoreStubCompiler::CompileStoreField(JSObject* object,
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- eax : value // -- eax : value
// -- ecx : name // -- ecx : name
// -- edx : receiver
// -- esp[0] : return address // -- esp[0] : return address
// -- esp[4] : receiver
// ----------------------------------- // -----------------------------------
Label miss; Label miss;
// Get the object from the stack.
__ mov(ebx, Operand(esp, 1 * kPointerSize));
// Generate store field code. Trashes the name register. // Generate store field code. Trashes the name register.
GenerateStoreField(masm(), GenerateStoreField(masm(),
Builtins::StoreIC_ExtendStorage, Builtins::StoreIC_ExtendStorage,
object, object,
index, index,
transition, transition,
ebx, ecx, edx, edx, ecx, ebx,
&miss); &miss);
// Handle store cache miss. // Handle store cache miss.
@ -1274,26 +1273,23 @@ Object* StoreStubCompiler::CompileStoreCallback(JSObject* object,
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- eax : value // -- eax : value
// -- ecx : name // -- ecx : name
// -- edx : receiver
// -- esp[0] : return address // -- esp[0] : return address
// -- esp[4] : receiver
// ----------------------------------- // -----------------------------------
Label miss; Label miss;
// Get the object from the stack.
__ mov(ebx, Operand(esp, 1 * kPointerSize));
// Check that the object isn't a smi. // Check that the object isn't a smi.
__ test(ebx, Immediate(kSmiTagMask)); __ test(edx, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken); __ j(zero, &miss, not_taken);
// Check that the map of the object hasn't changed. // Check that the map of the object hasn't changed.
__ cmp(FieldOperand(ebx, HeapObject::kMapOffset), __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Immediate(Handle<Map>(object->map()))); Immediate(Handle<Map>(object->map())));
__ j(not_equal, &miss, not_taken); __ j(not_equal, &miss, not_taken);
// Perform global security token check if needed. // Perform global security token check if needed.
if (object->IsJSGlobalProxy()) { if (object->IsJSGlobalProxy()) {
__ CheckAccessGlobalProxy(ebx, edx, &miss); __ CheckAccessGlobalProxy(edx, ebx, &miss);
} }
// Stub never generated for non-global objects that require access // Stub never generated for non-global objects that require access
@ -1301,7 +1297,7 @@ Object* StoreStubCompiler::CompileStoreCallback(JSObject* object,
ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded()); ASSERT(object->IsJSGlobalProxy() || !object->IsAccessCheckNeeded());
__ pop(ebx); // remove the return address __ pop(ebx); // remove the return address
__ push(Operand(esp, 0)); // receiver __ push(edx); // receiver
__ push(Immediate(Handle<AccessorInfo>(callback))); // callback info __ push(Immediate(Handle<AccessorInfo>(callback))); // callback info
__ push(ecx); // name __ push(ecx); // name
__ push(eax); // value __ push(eax); // value
@ -1314,7 +1310,6 @@ Object* StoreStubCompiler::CompileStoreCallback(JSObject* object,
// Handle store cache miss. // Handle store cache miss.
__ bind(&miss); __ bind(&miss);
__ mov(ecx, Immediate(Handle<String>(name))); // restore name
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
__ jmp(ic, RelocInfo::CODE_TARGET); __ jmp(ic, RelocInfo::CODE_TARGET);
@ -1328,26 +1323,23 @@ Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- eax : value // -- eax : value
// -- ecx : name // -- ecx : name
// -- edx : receiver
// -- esp[0] : return address // -- esp[0] : return address
// -- esp[4] : receiver
// ----------------------------------- // -----------------------------------
Label miss; Label miss;
// Get the object from the stack.
__ mov(ebx, Operand(esp, 1 * kPointerSize));
// Check that the object isn't a smi. // Check that the object isn't a smi.
__ test(ebx, Immediate(kSmiTagMask)); __ test(edx, Immediate(kSmiTagMask));
__ j(zero, &miss, not_taken); __ j(zero, &miss, not_taken);
// Check that the map of the object hasn't changed. // Check that the map of the object hasn't changed.
__ cmp(FieldOperand(ebx, HeapObject::kMapOffset), __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
Immediate(Handle<Map>(receiver->map()))); Immediate(Handle<Map>(receiver->map())));
__ j(not_equal, &miss, not_taken); __ j(not_equal, &miss, not_taken);
// Perform global security token check if needed. // Perform global security token check if needed.
if (receiver->IsJSGlobalProxy()) { if (receiver->IsJSGlobalProxy()) {
__ CheckAccessGlobalProxy(ebx, edx, &miss); __ CheckAccessGlobalProxy(edx, ebx, &miss);
} }
// Stub never generated for non-global objects that require access // Stub never generated for non-global objects that require access
@ -1355,7 +1347,7 @@ Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded()); ASSERT(receiver->IsJSGlobalProxy() || !receiver->IsAccessCheckNeeded());
__ pop(ebx); // remove the return address __ pop(ebx); // remove the return address
__ push(Operand(esp, 0)); // receiver __ push(edx); // receiver
__ push(ecx); // name __ push(ecx); // name
__ push(eax); // value __ push(eax); // value
__ push(ebx); // restore return address __ push(ebx); // restore return address
@ -1367,7 +1359,6 @@ Object* StoreStubCompiler::CompileStoreInterceptor(JSObject* receiver,
// Handle store cache miss. // Handle store cache miss.
__ bind(&miss); __ bind(&miss);
__ mov(ecx, Immediate(Handle<String>(name))); // restore name
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss)); Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Miss));
__ jmp(ic, RelocInfo::CODE_TARGET); __ jmp(ic, RelocInfo::CODE_TARGET);
@ -1382,14 +1373,13 @@ Object* StoreStubCompiler::CompileStoreGlobal(GlobalObject* object,
// ----------- S t a t e ------------- // ----------- S t a t e -------------
// -- eax : value // -- eax : value
// -- ecx : name // -- ecx : name
// -- edx : receiver
// -- esp[0] : return address // -- esp[0] : return address
// -- esp[4] : receiver
// ----------------------------------- // -----------------------------------
Label miss; Label miss;
// Check that the map of the global has not changed. // Check that the map of the global has not changed.
__ mov(ebx, Operand(esp, kPointerSize)); __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
__ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
Immediate(Handle<Map>(object->map()))); Immediate(Handle<Map>(object->map())));
__ j(not_equal, &miss, not_taken); __ j(not_equal, &miss, not_taken);

36
deps/v8/src/ia32/virtual-frame-ia32.cc

@ -899,31 +899,45 @@ Result VirtualFrame::CallKeyedLoadIC(RelocInfo::Mode mode) {
Result VirtualFrame::CallStoreIC() { Result VirtualFrame::CallStoreIC() {
// Name, value, and receiver are on top of the frame. The IC // Name, value, and receiver are on top of the frame. The IC
// expects name in ecx, value in eax, and receiver on the stack. It // expects name in ecx, value in eax, and receiver in edx.
// does not drop the receiver.
Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
Result name = Pop(); Result name = Pop();
Result value = Pop(); Result value = Pop();
PrepareForCall(1, 0); // One stack arg, not callee-dropped. Result receiver = Pop();
PrepareForCall(0, 0);
if (value.is_register() && value.reg().is(ecx)) { // Optimized for case in which name is a constant value.
if (name.is_register() && name.reg().is(eax)) { if (name.is_register() && (name.reg().is(edx) || name.reg().is(eax))) {
if (!is_used(ecx)) {
name.ToRegister(ecx);
} else if (!is_used(ebx)) {
name.ToRegister(ebx);
} else {
ASSERT(!is_used(edi)); // Only three results are live, so edi is free.
name.ToRegister(edi);
}
}
// Now name is not in edx or eax, so we can fix them, then move name to ecx.
if (value.is_register() && value.reg().is(edx)) {
if (receiver.is_register() && receiver.reg().is(eax)) {
// Wrong registers. // Wrong registers.
__ xchg(eax, ecx); __ xchg(eax, edx);
} else { } else {
// Register eax is free for value, which frees ecx for name. // Register eax is free for value, which frees edx for receiver.
value.ToRegister(eax); value.ToRegister(eax);
name.ToRegister(ecx); receiver.ToRegister(edx);
} }
} else { } else {
// Register ecx is free for name, which guarantees eax is free for // Register edx is free for receiver, which guarantees eax is free for
// value. // value.
name.ToRegister(ecx); receiver.ToRegister(edx);
value.ToRegister(eax); value.ToRegister(eax);
} }
// Receiver and value are in the right place, so ecx is free for name.
name.ToRegister(ecx);
name.Unuse(); name.Unuse();
value.Unuse(); value.Unuse();
receiver.Unuse();
return RawCallCodeObject(ic, RelocInfo::CODE_TARGET); return RawCallCodeObject(ic, RelocInfo::CODE_TARGET);
} }

30
deps/v8/src/ic.cc

@ -378,6 +378,18 @@ Object* CallIC::TryCallAsFunction(Object* object) {
return *delegate; return *delegate;
} }
void CallIC::ReceiverToObject(Handle<Object> object) {
HandleScope scope;
Handle<Object> receiver(object);
// Change the receiver to the result of calling ToObject on it.
const int argc = this->target()->arguments_count();
StackFrameLocator locator;
JavaScriptFrame* frame = locator.FindJavaScriptFrame(0);
int index = frame->ComputeExpressionsCount() - (argc + 1);
frame->SetExpression(index, *Factory::ToObject(object));
}
Object* CallIC::LoadFunction(State state, Object* CallIC::LoadFunction(State state,
Handle<Object> object, Handle<Object> object,
@ -388,6 +400,10 @@ Object* CallIC::LoadFunction(State state,
return TypeError("non_object_property_call", object, name); return TypeError("non_object_property_call", object, name);
} }
if (object->IsString() || object->IsNumber() || object->IsBoolean()) {
ReceiverToObject(object);
}
// Check if the name is trivially convertible to an index and get // Check if the name is trivially convertible to an index and get
// the element if so. // the element if so.
uint32_t index; uint32_t index;
@ -1286,9 +1302,9 @@ Object* CallIC_Miss(Arguments args) {
Handle<JSFunction> function = Handle<JSFunction>(JSFunction::cast(result)); Handle<JSFunction> function = Handle<JSFunction>(JSFunction::cast(result));
InLoopFlag in_loop = ic.target()->ic_in_loop(); InLoopFlag in_loop = ic.target()->ic_in_loop();
if (in_loop == IN_LOOP) { if (in_loop == IN_LOOP) {
CompileLazyInLoop(function, CLEAR_EXCEPTION); CompileLazyInLoop(function, args.at<Object>(0), CLEAR_EXCEPTION);
} else { } else {
CompileLazy(function, CLEAR_EXCEPTION); CompileLazy(function, args.at<Object>(0), CLEAR_EXCEPTION);
} }
return *function; return *function;
} }
@ -1379,16 +1395,6 @@ Object* SharedStoreIC_ExtendStorage(Arguments args) {
} }
void StoreIC::GenerateInitialize(MacroAssembler* masm) {
Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss)));
}
void StoreIC::GenerateMiss(MacroAssembler* masm) {
Generate(masm, ExternalReference(IC_Utility(kStoreIC_Miss)));
}
// Used from ic_<arch>.cc. // Used from ic_<arch>.cc.
Object* KeyedStoreIC_Miss(Arguments args) { Object* KeyedStoreIC_Miss(Arguments args) {
NoHandleAllocation na; NoHandleAllocation na;

13
deps/v8/src/ic.h

@ -209,6 +209,8 @@ class CallIC: public IC {
// Otherwise, it returns the undefined value. // Otherwise, it returns the undefined value.
Object* TryCallAsFunction(Object* object); Object* TryCallAsFunction(Object* object);
void ReceiverToObject(Handle<Object> object);
static void Clear(Address address, Code* target); static void Clear(Address address, Code* target);
friend class IC; friend class IC;
}; };
@ -293,6 +295,13 @@ class KeyedLoadIC: public IC {
static void ClearInlinedVersion(Address address); static void ClearInlinedVersion(Address address);
private: private:
// Bit mask to be tested against bit field for the cases when
// generic stub should go into slow case.
// Access check is necessary explicitly since generic stub does not perform
// map checks.
static const int kSlowCaseBitFieldMask =
(1 << Map::kIsAccessCheckNeeded) | (1 << Map::kHasIndexedInterceptor);
static void Generate(MacroAssembler* masm, const ExternalReference& f); static void Generate(MacroAssembler* masm, const ExternalReference& f);
// Update the inline cache. // Update the inline cache.
@ -339,14 +348,12 @@ class StoreIC: public IC {
Handle<Object> value); Handle<Object> value);
// Code generators for stub routines. Only called once at startup. // Code generators for stub routines. Only called once at startup.
static void GenerateInitialize(MacroAssembler* masm); static void GenerateInitialize(MacroAssembler* masm) { GenerateMiss(masm); }
static void GenerateMiss(MacroAssembler* masm); static void GenerateMiss(MacroAssembler* masm);
static void GenerateMegamorphic(MacroAssembler* masm); static void GenerateMegamorphic(MacroAssembler* masm);
static void GenerateExtendStorage(MacroAssembler* masm); static void GenerateExtendStorage(MacroAssembler* masm);
private: private:
static void Generate(MacroAssembler* masm, const ExternalReference& f);
// Update the inline cache and the global stub cache based on the // Update the inline cache and the global stub cache based on the
// lookup result. // lookup result.
void UpdateCaches(LookupResult* lookup, void UpdateCaches(LookupResult* lookup,

2
deps/v8/src/json-delay.js

@ -29,7 +29,7 @@ var $JSON = global.JSON;
function ParseJSONUnfiltered(text) { function ParseJSONUnfiltered(text) {
var s = $String(text); var s = $String(text);
var f = %CompileString("(" + text + ")", true); var f = %CompileString(text, true);
return f(); return f();
} }

5
deps/v8/src/jsregexp.cc

@ -4462,10 +4462,13 @@ void CharacterRange::Merge(ZoneList<CharacterRange>* first_set,
while (i1 < n1 || i2 < n2) { while (i1 < n1 || i2 < n2) {
CharacterRange next_range; CharacterRange next_range;
int range_source; int range_source;
if (i2 == n2 || first_set->at(i1).from() < second_set->at(i2).from()) { if (i2 == n2 ||
(i1 < n1 && first_set->at(i1).from() < second_set->at(i2).from())) {
// Next smallest element is in first set.
next_range = first_set->at(i1++); next_range = first_set->at(i1++);
range_source = kInsideFirst; range_source = kInsideFirst;
} else { } else {
// Next smallest element is in second set.
next_range = second_set->at(i2++); next_range = second_set->at(i2++);
range_source = kInsideSecond; range_source = kInsideSecond;
} }

3
deps/v8/src/list.h

@ -68,7 +68,8 @@ class List {
// not safe to use after operations that can change the list's // not safe to use after operations that can change the list's
// backing store (eg, Add). // backing store (eg, Add).
inline T& operator[](int i) const { inline T& operator[](int i) const {
ASSERT(0 <= i && i < length_); ASSERT(0 <= i);
ASSERT(i < length_);
return data_[i]; return data_[i];
} }
inline T& at(int i) const { return operator[](i); } inline T& at(int i) const { return operator[](i); }

122
deps/v8/src/log.cc

@ -155,6 +155,13 @@ void StackTracer::Trace(TickSample* sample) {
return; return;
} }
const Address functionAddr =
sample->fp + JavaScriptFrameConstants::kFunctionOffset;
if (SafeStackFrameIterator::IsWithinBounds(sample->sp, js_entry_sp,
functionAddr)) {
sample->function = Memory::Address_at(functionAddr) - kHeapObjectTag;
}
int i = 0; int i = 0;
const Address callback = Logger::current_state_ != NULL ? const Address callback = Logger::current_state_ != NULL ?
Logger::current_state_->external_callback() : NULL; Logger::current_state_->external_callback() : NULL;
@ -162,11 +169,8 @@ void StackTracer::Trace(TickSample* sample) {
sample->stack[i++] = callback; sample->stack[i++] = callback;
} }
SafeStackTraceFrameIterator it( SafeStackTraceFrameIterator it(sample->fp, sample->sp,
reinterpret_cast<Address>(sample->fp), sample->sp, js_entry_sp);
reinterpret_cast<Address>(sample->sp),
reinterpret_cast<Address>(sample->sp),
js_entry_sp);
while (!it.done() && i < TickSample::kMaxFramesCount) { while (!it.done() && i < TickSample::kMaxFramesCount) {
sample->stack[i++] = it.frame()->pc(); sample->stack[i++] = it.frame()->pc();
it.Advance(); it.Advance();
@ -837,10 +841,77 @@ void Logger::RegExpCodeCreateEvent(Code* code, String* source) {
void Logger::CodeMoveEvent(Address from, Address to) { void Logger::CodeMoveEvent(Address from, Address to) {
#ifdef ENABLE_LOGGING_AND_PROFILING #ifdef ENABLE_LOGGING_AND_PROFILING
MoveEventInternal(CODE_MOVE_EVENT, from, to);
#endif
}
void Logger::CodeDeleteEvent(Address from) {
#ifdef ENABLE_LOGGING_AND_PROFILING
DeleteEventInternal(CODE_DELETE_EVENT, from);
#endif
}
void Logger::SnapshotPositionEvent(Address addr, int pos) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!Log::IsEnabled() || !FLAG_log_snapshot_positions) return;
LogMessageBuilder msg;
msg.Append("%s,", log_events_[SNAPSHOT_POSITION_EVENT]);
msg.AppendAddress(addr);
msg.Append(",%d", pos);
if (FLAG_compress_log) {
ASSERT(compression_helper_ != NULL);
if (!compression_helper_->HandleMessage(&msg)) return;
}
msg.Append('\n');
msg.WriteToLogFile();
#endif
}
void Logger::FunctionCreateEvent(JSFunction* function) {
#ifdef ENABLE_LOGGING_AND_PROFILING
static Address prev_code = NULL;
if (!Log::IsEnabled() || !FLAG_log_code) return;
LogMessageBuilder msg;
msg.Append("%s,", log_events_[FUNCTION_CREATION_EVENT]);
msg.AppendAddress(function->address());
msg.Append(',');
msg.AppendAddress(function->code()->address(), prev_code);
prev_code = function->code()->address();
if (FLAG_compress_log) {
ASSERT(compression_helper_ != NULL);
if (!compression_helper_->HandleMessage(&msg)) return;
}
msg.Append('\n');
msg.WriteToLogFile();
#endif
}
void Logger::FunctionMoveEvent(Address from, Address to) {
#ifdef ENABLE_LOGGING_AND_PROFILING
MoveEventInternal(FUNCTION_MOVE_EVENT, from, to);
#endif
}
void Logger::FunctionDeleteEvent(Address from) {
#ifdef ENABLE_LOGGING_AND_PROFILING
DeleteEventInternal(FUNCTION_DELETE_EVENT, from);
#endif
}
#ifdef ENABLE_LOGGING_AND_PROFILING
void Logger::MoveEventInternal(LogEventsAndTags event,
Address from,
Address to) {
static Address prev_to_ = NULL; static Address prev_to_ = NULL;
if (!Log::IsEnabled() || !FLAG_log_code) return; if (!Log::IsEnabled() || !FLAG_log_code) return;
LogMessageBuilder msg; LogMessageBuilder msg;
msg.Append("%s,", log_events_[CODE_MOVE_EVENT]); msg.Append("%s,", log_events_[event]);
msg.AppendAddress(from); msg.AppendAddress(from);
msg.Append(','); msg.Append(',');
msg.AppendAddress(to, prev_to_); msg.AppendAddress(to, prev_to_);
@ -851,15 +922,15 @@ void Logger::CodeMoveEvent(Address from, Address to) {
} }
msg.Append('\n'); msg.Append('\n');
msg.WriteToLogFile(); msg.WriteToLogFile();
#endif
} }
#endif
void Logger::CodeDeleteEvent(Address from) {
#ifdef ENABLE_LOGGING_AND_PROFILING #ifdef ENABLE_LOGGING_AND_PROFILING
void Logger::DeleteEventInternal(LogEventsAndTags event, Address from) {
if (!Log::IsEnabled() || !FLAG_log_code) return; if (!Log::IsEnabled() || !FLAG_log_code) return;
LogMessageBuilder msg; LogMessageBuilder msg;
msg.Append("%s,", log_events_[CODE_DELETE_EVENT]); msg.Append("%s,", log_events_[event]);
msg.AppendAddress(from); msg.AppendAddress(from);
if (FLAG_compress_log) { if (FLAG_compress_log) {
ASSERT(compression_helper_ != NULL); ASSERT(compression_helper_ != NULL);
@ -867,8 +938,8 @@ void Logger::CodeDeleteEvent(Address from) {
} }
msg.Append('\n'); msg.Append('\n');
msg.WriteToLogFile(); msg.WriteToLogFile();
#endif
} }
#endif
void Logger::ResourceEvent(const char* name, const char* tag) { void Logger::ResourceEvent(const char* name, const char* tag) {
@ -1052,13 +1123,17 @@ void Logger::DebugEvent(const char* event_type, Vector<uint16_t> parameter) {
void Logger::TickEvent(TickSample* sample, bool overflow) { void Logger::TickEvent(TickSample* sample, bool overflow) {
if (!Log::IsEnabled() || !FLAG_prof) return; if (!Log::IsEnabled() || !FLAG_prof) return;
static Address prev_sp = NULL; static Address prev_sp = NULL;
static Address prev_function = NULL;
LogMessageBuilder msg; LogMessageBuilder msg;
msg.Append("%s,", log_events_[TICK_EVENT]); msg.Append("%s,", log_events_[TICK_EVENT]);
Address prev_addr = reinterpret_cast<Address>(sample->pc); Address prev_addr = sample->pc;
msg.AppendAddress(prev_addr); msg.AppendAddress(prev_addr);
msg.Append(','); msg.Append(',');
msg.AppendAddress(reinterpret_cast<Address>(sample->sp), prev_sp); msg.AppendAddress(sample->sp, prev_sp);
prev_sp = reinterpret_cast<Address>(sample->sp); prev_sp = sample->sp;
msg.Append(',');
msg.AppendAddress(sample->function, prev_function);
prev_function = sample->function;
msg.Append(",%d", static_cast<int>(sample->state)); msg.Append(",%d", static_cast<int>(sample->state));
if (overflow) { if (overflow) {
msg.Append(",overflow"); msg.Append(",overflow");
@ -1127,6 +1202,7 @@ void Logger::ResumeProfiler(int flags) {
LOG(UncheckedStringEvent("profiler", "resume")); LOG(UncheckedStringEvent("profiler", "resume"));
FLAG_log_code = true; FLAG_log_code = true;
LogCompiledFunctions(); LogCompiledFunctions();
LogFunctionObjects();
LogAccessorCallbacks(); LogAccessorCallbacks();
if (!FLAG_sliding_state_window) ticker_->Start(); if (!FLAG_sliding_state_window) ticker_->Start();
} }
@ -1161,9 +1237,7 @@ static int EnumerateCompiledFunctions(Handle<SharedFunctionInfo>* sfis) {
AssertNoAllocation no_alloc; AssertNoAllocation no_alloc;
int compiled_funcs_count = 0; int compiled_funcs_count = 0;
HeapIterator iterator; HeapIterator iterator;
while (iterator.has_next()) { for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
HeapObject* obj = iterator.next();
ASSERT(obj != NULL);
if (!obj->IsSharedFunctionInfo()) continue; if (!obj->IsSharedFunctionInfo()) continue;
SharedFunctionInfo* sfi = SharedFunctionInfo::cast(obj); SharedFunctionInfo* sfi = SharedFunctionInfo::cast(obj);
if (sfi->is_compiled() if (sfi->is_compiled()
@ -1273,12 +1347,22 @@ void Logger::LogCompiledFunctions() {
} }
void Logger::LogFunctionObjects() {
AssertNoAllocation no_alloc;
HeapIterator iterator;
for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
if (!obj->IsJSFunction()) continue;
JSFunction* jsf = JSFunction::cast(obj);
if (!jsf->is_compiled()) continue;
LOG(FunctionCreateEvent(jsf));
}
}
void Logger::LogAccessorCallbacks() { void Logger::LogAccessorCallbacks() {
AssertNoAllocation no_alloc; AssertNoAllocation no_alloc;
HeapIterator iterator; HeapIterator iterator;
while (iterator.has_next()) { for (HeapObject* obj = iterator.next(); obj != NULL; obj = iterator.next()) {
HeapObject* obj = iterator.next();
ASSERT(obj != NULL);
if (!obj->IsAccessorInfo()) continue; if (!obj->IsAccessorInfo()) continue;
AccessorInfo* ai = AccessorInfo::cast(obj); AccessorInfo* ai = AccessorInfo::cast(obj);
if (!ai->name()->IsString()) continue; if (!ai->name()->IsString()) continue;

23
deps/v8/src/log.h

@ -116,6 +116,10 @@ class VMState BASE_EMBEDDED {
V(CODE_CREATION_EVENT, "code-creation", "cc") \ V(CODE_CREATION_EVENT, "code-creation", "cc") \
V(CODE_MOVE_EVENT, "code-move", "cm") \ V(CODE_MOVE_EVENT, "code-move", "cm") \
V(CODE_DELETE_EVENT, "code-delete", "cd") \ V(CODE_DELETE_EVENT, "code-delete", "cd") \
V(FUNCTION_CREATION_EVENT, "function-creation", "fc") \
V(FUNCTION_MOVE_EVENT, "function-move", "fm") \
V(FUNCTION_DELETE_EVENT, "function-delete", "fd") \
V(SNAPSHOT_POSITION_EVENT, "snapshot-pos", "sp") \
V(TICK_EVENT, "tick", "t") \ V(TICK_EVENT, "tick", "t") \
V(REPEAT_META_EVENT, "repeat", "r") \ V(REPEAT_META_EVENT, "repeat", "r") \
V(BUILTIN_TAG, "Builtin", "bi") \ V(BUILTIN_TAG, "Builtin", "bi") \
@ -223,6 +227,14 @@ class Logger {
static void CodeMoveEvent(Address from, Address to); static void CodeMoveEvent(Address from, Address to);
// Emits a code delete event. // Emits a code delete event.
static void CodeDeleteEvent(Address from); static void CodeDeleteEvent(Address from);
// Emits a function object create event.
static void FunctionCreateEvent(JSFunction* function);
// Emits a function move event.
static void FunctionMoveEvent(Address from, Address to);
// Emits a function delete event.
static void FunctionDeleteEvent(Address from);
static void SnapshotPositionEvent(Address addr, int pos);
// ==== Events logged by --log-gc. ==== // ==== Events logged by --log-gc. ====
// Heap sampling events: start, end, and individual types. // Heap sampling events: start, end, and individual types.
@ -275,6 +287,8 @@ class Logger {
// Logs all compiled functions found in the heap. // Logs all compiled functions found in the heap.
static void LogCompiledFunctions(); static void LogCompiledFunctions();
// Logs all compiled JSFunction objects found in the heap.
static void LogFunctionObjects();
// Logs all accessor callbacks found in the heap. // Logs all accessor callbacks found in the heap.
static void LogAccessorCallbacks(); static void LogAccessorCallbacks();
// Used for logging stubs found in the snapshot. // Used for logging stubs found in the snapshot.
@ -296,6 +310,15 @@ class Logger {
const char* name, const char* name,
Address entry_point); Address entry_point);
// Internal configurable move event.
static void MoveEventInternal(LogEventsAndTags event,
Address from,
Address to);
// Internal configurable move event.
static void DeleteEventInternal(LogEventsAndTags event,
Address from);
// Emits aliases for compressed messages. // Emits aliases for compressed messages.
static void LogAliases(); static void LogAliases();

1
deps/v8/src/macros.py

@ -92,6 +92,7 @@ macro IS_ERROR(arg) = (%_ClassOf(arg) === 'Error');
macro IS_SCRIPT(arg) = (%_ClassOf(arg) === 'Script'); macro IS_SCRIPT(arg) = (%_ClassOf(arg) === 'Script');
macro IS_ARGUMENTS(arg) = (%_ClassOf(arg) === 'Arguments'); macro IS_ARGUMENTS(arg) = (%_ClassOf(arg) === 'Arguments');
macro IS_GLOBAL(arg) = (%_ClassOf(arg) === 'global'); macro IS_GLOBAL(arg) = (%_ClassOf(arg) === 'global');
macro IS_UNDETECTABLE(arg) = (%_IsUndetectableObject(arg));
macro FLOOR(arg) = $floor(arg); macro FLOOR(arg) = $floor(arg);
# Inline macros. Use %IS_VAR to make sure arg is evaluated only once. # Inline macros. Use %IS_VAR to make sure arg is evaluated only once.

76
deps/v8/src/mark-compact.cc

@ -129,7 +129,8 @@ void MarkCompactCollector::Prepare(GCTracer* tracer) {
#endif #endif
PagedSpaces spaces; PagedSpaces spaces;
while (PagedSpace* space = spaces.next()) { for (PagedSpace* space = spaces.next();
space != NULL; space = spaces.next()) {
space->PrepareForMarkCompact(compacting_collection_); space->PrepareForMarkCompact(compacting_collection_);
} }
@ -172,7 +173,7 @@ void MarkCompactCollector::Finish() {
int old_gen_used = 0; int old_gen_used = 0;
OldSpaces spaces; OldSpaces spaces;
while (OldSpace* space = spaces.next()) { for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
old_gen_recoverable += space->Waste() + space->AvailableFree(); old_gen_recoverable += space->Waste() + space->AvailableFree();
old_gen_used += space->Size(); old_gen_used += space->Size();
} }
@ -475,8 +476,8 @@ void MarkCompactCollector::MarkDescriptorArray(
void MarkCompactCollector::CreateBackPointers() { void MarkCompactCollector::CreateBackPointers() {
HeapObjectIterator iterator(Heap::map_space()); HeapObjectIterator iterator(Heap::map_space());
while (iterator.has_next()) { for (HeapObject* next_object = iterator.next();
Object* next_object = iterator.next(); next_object != NULL; next_object = iterator.next()) {
if (next_object->IsMap()) { // Could also be ByteArray on free list. if (next_object->IsMap()) { // Could also be ByteArray on free list.
Map* map = Map::cast(next_object); Map* map = Map::cast(next_object);
if (map->instance_type() >= FIRST_JS_OBJECT_TYPE && if (map->instance_type() >= FIRST_JS_OBJECT_TYPE &&
@ -509,8 +510,7 @@ static void ScanOverflowedObjects(T* it) {
// so that we don't waste effort pointlessly scanning for objects. // so that we don't waste effort pointlessly scanning for objects.
ASSERT(!marking_stack.is_full()); ASSERT(!marking_stack.is_full());
while (it->has_next()) { for (HeapObject* object = it->next(); object != NULL; object = it->next()) {
HeapObject* object = it->next();
if (object->IsOverflowed()) { if (object->IsOverflowed()) {
object->ClearOverflow(); object->ClearOverflow();
ASSERT(object->IsMarked()); ASSERT(object->IsMarked());
@ -793,8 +793,9 @@ void MarkCompactCollector::ClearNonLiveTransitions() {
// scan the descriptor arrays of those maps, not all maps. // scan the descriptor arrays of those maps, not all maps.
// All of these actions are carried out only on maps of JSObjects // All of these actions are carried out only on maps of JSObjects
// and related subtypes. // and related subtypes.
while (map_iterator.has_next()) { for (HeapObject* obj = map_iterator.next();
Map* map = reinterpret_cast<Map*>(map_iterator.next()); obj != NULL; obj = map_iterator.next()) {
Map* map = reinterpret_cast<Map*>(obj);
if (!map->IsMarked() && map->IsByteArray()) continue; if (!map->IsMarked() && map->IsByteArray()) continue;
ASSERT(SafeIsMap(map)); ASSERT(SafeIsMap(map));
@ -969,12 +970,6 @@ inline void EncodeForwardingAddressInPagedSpace(HeapObject* old_object,
inline void IgnoreNonLiveObject(HeapObject* object) {} inline void IgnoreNonLiveObject(HeapObject* object) {}
// A code deletion event is logged for non-live code objects.
inline void LogNonLiveCodeObject(HeapObject* object) {
if (object->IsCode()) LOG(CodeDeleteEvent(object->address()));
}
// Function template that, given a range of addresses (eg, a semispace or a // Function template that, given a range of addresses (eg, a semispace or a
// paged space page), iterates through the objects in the range to clear // paged space page), iterates through the objects in the range to clear
// mark bits and compute and encode forwarding addresses. As a side effect, // mark bits and compute and encode forwarding addresses. As a side effect,
@ -1122,10 +1117,7 @@ static void SweepSpace(PagedSpace* space, DeallocateFunction dealloc) {
is_previous_alive = true; is_previous_alive = true;
} }
} else { } else {
if (object->IsCode()) { MarkCompactCollector::ReportDeleteIfNeeded(object);
// Notify the logger that compiled code has been collected.
LOG(CodeDeleteEvent(Code::cast(object)->address()));
}
if (is_previous_alive) { // Transition from live to free. if (is_previous_alive) { // Transition from live to free.
free_start = current; free_start = current;
is_previous_alive = false; is_previous_alive = false;
@ -1204,7 +1196,7 @@ void MarkCompactCollector::EncodeForwardingAddresses() {
// Compute the forwarding pointers in each space. // Compute the forwarding pointers in each space.
EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace, EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldPointerSpace,
IgnoreNonLiveObject>( ReportDeleteIfNeeded>(
Heap::old_pointer_space()); Heap::old_pointer_space());
EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldDataSpace, EncodeForwardingAddressesInPagedSpace<MCAllocateFromOldDataSpace,
@ -1212,7 +1204,7 @@ void MarkCompactCollector::EncodeForwardingAddresses() {
Heap::old_data_space()); Heap::old_data_space());
EncodeForwardingAddressesInPagedSpace<MCAllocateFromCodeSpace, EncodeForwardingAddressesInPagedSpace<MCAllocateFromCodeSpace,
LogNonLiveCodeObject>( ReportDeleteIfNeeded>(
Heap::code_space()); Heap::code_space());
EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace, EncodeForwardingAddressesInPagedSpace<MCAllocateFromCellSpace,
@ -1291,6 +1283,7 @@ class MapCompact {
MapIterator it; MapIterator it;
HeapObject* o = it.next(); HeapObject* o = it.next();
for (; o != first_map_to_evacuate_; o = it.next()) { for (; o != first_map_to_evacuate_; o = it.next()) {
ASSERT(o != NULL);
Map* map = reinterpret_cast<Map*>(o); Map* map = reinterpret_cast<Map*>(o);
ASSERT(!map->IsMarked()); ASSERT(!map->IsMarked());
ASSERT(!map->IsOverflowed()); ASSERT(!map->IsOverflowed());
@ -1316,10 +1309,8 @@ class MapCompact {
void UpdateMapPointersInLargeObjectSpace() { void UpdateMapPointersInLargeObjectSpace() {
LargeObjectIterator it(Heap::lo_space()); LargeObjectIterator it(Heap::lo_space());
while (true) { for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
if (!it.has_next()) break; UpdateMapPointersInObject(obj);
UpdateMapPointersInObject(it.next());
}
} }
void Finish() { void Finish() {
@ -1362,8 +1353,8 @@ class MapCompact {
static Map* NextMap(MapIterator* it, HeapObject* last, bool live) { static Map* NextMap(MapIterator* it, HeapObject* last, bool live) {
while (true) { while (true) {
ASSERT(it->has_next());
HeapObject* next = it->next(); HeapObject* next = it->next();
ASSERT(next != NULL);
if (next == last) if (next == last)
return NULL; return NULL;
ASSERT(!next->IsOverflowed()); ASSERT(!next->IsOverflowed());
@ -1452,8 +1443,9 @@ class MapCompact {
if (!FLAG_enable_slow_asserts) if (!FLAG_enable_slow_asserts)
return; return;
while (map_to_evacuate_it_.has_next()) for (HeapObject* obj = map_to_evacuate_it_.next();
ASSERT(FreeListNode::IsFreeListNode(map_to_evacuate_it_.next())); obj != NULL; obj = map_to_evacuate_it_.next())
ASSERT(FreeListNode::IsFreeListNode(obj));
} }
#endif #endif
}; };
@ -1486,7 +1478,8 @@ void MarkCompactCollector::SweepSpaces() {
map_compact.FinishMapSpace(); map_compact.FinishMapSpace();
PagedSpaces spaces; PagedSpaces spaces;
while (PagedSpace* space = spaces.next()) { for (PagedSpace* space = spaces.next();
space != NULL; space = spaces.next()) {
if (space == Heap::map_space()) continue; if (space == Heap::map_space()) continue;
map_compact.UpdateMapPointersInPagedSpace(space); map_compact.UpdateMapPointersInPagedSpace(space);
} }
@ -1661,7 +1654,8 @@ void MarkCompactCollector::UpdatePointers() {
// Large objects do not move, the map word can be updated directly. // Large objects do not move, the map word can be updated directly.
LargeObjectIterator it(Heap::lo_space()); LargeObjectIterator it(Heap::lo_space());
while (it.has_next()) UpdatePointersInNewObject(it.next()); for (HeapObject* obj = it.next(); obj != NULL; obj = it.next())
UpdatePointersInNewObject(obj);
USE(live_maps); USE(live_maps);
USE(live_pointer_olds); USE(live_pointer_olds);
@ -1825,7 +1819,8 @@ void MarkCompactCollector::RelocateObjects() {
Page::set_rset_state(Page::IN_USE); Page::set_rset_state(Page::IN_USE);
#endif #endif
PagedSpaces spaces; PagedSpaces spaces;
while (PagedSpace* space = spaces.next()) space->MCCommitRelocationInfo(); for (PagedSpace* space = spaces.next(); space != NULL; space = spaces.next())
space->MCCommitRelocationInfo();
} }
@ -1906,6 +1901,11 @@ int MarkCompactCollector::RelocateOldNonCodeObject(HeapObject* obj,
ASSERT(!HeapObject::FromAddress(new_addr)->IsCode()); ASSERT(!HeapObject::FromAddress(new_addr)->IsCode());
HeapObject* copied_to = HeapObject::FromAddress(new_addr);
if (copied_to->IsJSFunction()) {
LOG(FunctionMoveEvent(old_addr, new_addr));
}
return obj_size; return obj_size;
} }
@ -1986,6 +1986,11 @@ int MarkCompactCollector::RelocateNewObject(HeapObject* obj) {
} }
#endif #endif
HeapObject* copied_to = HeapObject::FromAddress(new_addr);
if (copied_to->IsJSFunction()) {
LOG(FunctionMoveEvent(old_addr, new_addr));
}
return obj_size; return obj_size;
} }
@ -2001,4 +2006,15 @@ void MarkCompactCollector::RebuildRSets() {
Heap::RebuildRSets(); Heap::RebuildRSets();
} }
void MarkCompactCollector::ReportDeleteIfNeeded(HeapObject* obj) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (obj->IsCode()) {
LOG(CodeDeleteEvent(obj->address()));
} else if (obj->IsJSFunction()) {
LOG(FunctionDeleteEvent(obj->address()));
}
#endif
}
} } // namespace v8::internal } } // namespace v8::internal

3
deps/v8/src/mark-compact.h

@ -115,6 +115,9 @@ class MarkCompactCollector: public AllStatic {
static bool in_use() { return state_ > PREPARE_GC; } static bool in_use() { return state_ > PREPARE_GC; }
#endif #endif
// Determine type of object and emit deletion log event.
static void ReportDeleteIfNeeded(HeapObject* obj);
private: private:
#ifdef DEBUG #ifdef DEBUG
enum CollectorState { enum CollectorState {

3
deps/v8/src/messages.js

@ -178,8 +178,7 @@ function FormatMessage(message) {
result_not_primitive: "Result of %0 must be a primitive, was %1", result_not_primitive: "Result of %0 must be a primitive, was %1",
invalid_json: "String '%0' is not valid JSON", invalid_json: "String '%0' is not valid JSON",
circular_structure: "Converting circular structure to JSON", circular_structure: "Converting circular structure to JSON",
object_keys_non_object: "Object.keys called on non-object", obj_ctor_property_non_object: "Object.%0 called on non-object",
object_get_prototype_non_object: "Object.getPrototypeOf called on non-object",
array_indexof_not_defined: "Array.getIndexOf: Argument undefined" array_indexof_not_defined: "Array.getIndexOf: Argument undefined"
}; };
} }

18
deps/v8/src/mirror-delay.js

@ -600,14 +600,14 @@ ObjectMirror.prototype.protoObject = function() {
ObjectMirror.prototype.hasNamedInterceptor = function() { ObjectMirror.prototype.hasNamedInterceptor = function() {
// Get information on interceptors for this object. // Get information on interceptors for this object.
var x = %DebugInterceptorInfo(this.value_); var x = %GetInterceptorInfo(this.value_);
return (x & 2) != 0; return (x & 2) != 0;
}; };
ObjectMirror.prototype.hasIndexedInterceptor = function() { ObjectMirror.prototype.hasIndexedInterceptor = function() {
// Get information on interceptors for this object. // Get information on interceptors for this object.
var x = %DebugInterceptorInfo(this.value_); var x = %GetInterceptorInfo(this.value_);
return (x & 1) != 0; return (x & 1) != 0;
}; };
@ -631,13 +631,13 @@ ObjectMirror.prototype.propertyNames = function(kind, limit) {
// Find all the named properties. // Find all the named properties.
if (kind & PropertyKind.Named) { if (kind & PropertyKind.Named) {
// Get the local property names. // Get the local property names.
propertyNames = %DebugLocalPropertyNames(this.value_); propertyNames = %GetLocalPropertyNames(this.value_);
total += propertyNames.length; total += propertyNames.length;
// Get names for named interceptor properties if any. // Get names for named interceptor properties if any.
if (this.hasNamedInterceptor() && (kind & PropertyKind.Named)) { if (this.hasNamedInterceptor() && (kind & PropertyKind.Named)) {
var namedInterceptorNames = var namedInterceptorNames =
%DebugNamedInterceptorPropertyNames(this.value_); %GetNamedInterceptorPropertyNames(this.value_);
if (namedInterceptorNames) { if (namedInterceptorNames) {
propertyNames = propertyNames.concat(namedInterceptorNames); propertyNames = propertyNames.concat(namedInterceptorNames);
total += namedInterceptorNames.length; total += namedInterceptorNames.length;
@ -648,13 +648,13 @@ ObjectMirror.prototype.propertyNames = function(kind, limit) {
// Find all the indexed properties. // Find all the indexed properties.
if (kind & PropertyKind.Indexed) { if (kind & PropertyKind.Indexed) {
// Get the local element names. // Get the local element names.
elementNames = %DebugLocalElementNames(this.value_); elementNames = %GetLocalElementNames(this.value_);
total += elementNames.length; total += elementNames.length;
// Get names for indexed interceptor properties. // Get names for indexed interceptor properties.
if (this.hasIndexedInterceptor() && (kind & PropertyKind.Indexed)) { if (this.hasIndexedInterceptor() && (kind & PropertyKind.Indexed)) {
var indexedInterceptorNames = var indexedInterceptorNames =
%DebugIndexedInterceptorElementNames(this.value_); %GetIndexedInterceptorElementNames(this.value_);
if (indexedInterceptorNames) { if (indexedInterceptorNames) {
elementNames = elementNames.concat(indexedInterceptorNames); elementNames = elementNames.concat(indexedInterceptorNames);
total += indexedInterceptorNames.length; total += indexedInterceptorNames.length;
@ -2089,8 +2089,10 @@ JSONProtocolSerializer.prototype.serialize_ = function(mirror, reference,
content.evalFromScript = content.evalFromScript =
this.serializeReference(mirror.evalFromScript()); this.serializeReference(mirror.evalFromScript());
var evalFromLocation = mirror.evalFromLocation() var evalFromLocation = mirror.evalFromLocation()
content.evalFromLocation = { line: evalFromLocation.line, if (evalFromLocation) {
column: evalFromLocation.column} content.evalFromLocation = { line: evalFromLocation.line,
column: evalFromLocation.column };
}
if (mirror.evalFromFunctionName()) { if (mirror.evalFromFunctionName()) {
content.evalFromFunctionName = mirror.evalFromFunctionName(); content.evalFromFunctionName = mirror.evalFromFunctionName();
} }

6
deps/v8/src/mksnapshot.cc

@ -130,6 +130,10 @@ class CppByteSink : public i::SnapshotByteSink {
} }
} }
virtual int Position() {
return bytes_written_;
}
private: private:
FILE* fp_; FILE* fp_;
int bytes_written_; int bytes_written_;
@ -160,10 +164,10 @@ int main(int argc, char** argv) {
} }
context.Dispose(); context.Dispose();
CppByteSink sink(argv[1]); CppByteSink sink(argv[1]);
i::Serializer ser(&sink);
// This results in a somewhat smaller snapshot, probably because it gets rid // This results in a somewhat smaller snapshot, probably because it gets rid
// of some things that are cached between garbage collections. // of some things that are cached between garbage collections.
i::Heap::CollectAllGarbage(true); i::Heap::CollectAllGarbage(true);
i::StartupSerializer ser(&sink);
ser.Serialize(); ser.Serialize();
return 0; return 0;
} }

21
deps/v8/src/objects-inl.h

@ -1349,7 +1349,7 @@ void FixedArray::set(int index, Object* value) {
} }
WriteBarrierMode HeapObject::GetWriteBarrierMode() { WriteBarrierMode HeapObject::GetWriteBarrierMode(const AssertNoAllocation&) {
if (Heap::InNewSpace(this)) return SKIP_WRITE_BARRIER; if (Heap::InNewSpace(this)) return SKIP_WRITE_BARRIER;
return UPDATE_WRITE_BARRIER; return UPDATE_WRITE_BARRIER;
} }
@ -1367,6 +1367,7 @@ void FixedArray::set(int index,
void FixedArray::fast_set(FixedArray* array, int index, Object* value) { void FixedArray::fast_set(FixedArray* array, int index, Object* value) {
ASSERT(index >= 0 && index < array->length()); ASSERT(index >= 0 && index < array->length());
ASSERT(!Heap::InNewSpace(value));
WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value); WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
} }
@ -1547,9 +1548,7 @@ uint32_t NumberDictionary::max_number_key() {
} }
void NumberDictionary::set_requires_slow_elements() { void NumberDictionary::set_requires_slow_elements() {
set(kMaxNumberKeyIndex, set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
Smi::FromInt(kRequiresSlowElementsMask),
SKIP_WRITE_BARRIER);
} }
@ -2372,8 +2371,8 @@ BOOL_GETTER(SharedFunctionInfo, compiler_hints,
kHasOnlySimpleThisPropertyAssignments) kHasOnlySimpleThisPropertyAssignments)
BOOL_ACCESSORS(SharedFunctionInfo, BOOL_ACCESSORS(SharedFunctionInfo,
compiler_hints, compiler_hints,
try_fast_codegen, try_full_codegen,
kTryFastCodegen) kTryFullCodegen)
INT_ACCESSORS(SharedFunctionInfo, length, kLengthOffset) INT_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
INT_ACCESSORS(SharedFunctionInfo, formal_parameter_count, INT_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
@ -2972,7 +2971,8 @@ void Dictionary<Shape, Key>::SetEntry(int entry,
PropertyDetails details) { PropertyDetails details) {
ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0); ASSERT(!key->IsString() || details.IsDeleted() || details.index() > 0);
int index = HashTable<Shape, Key>::EntryToIndex(entry); int index = HashTable<Shape, Key>::EntryToIndex(entry);
WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(); AssertNoAllocation no_gc;
WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
FixedArray::set(index, key, mode); FixedArray::set(index, key, mode);
FixedArray::set(index+1, value, mode); FixedArray::set(index+1, value, mode);
FixedArray::fast_set(this, index+2, details.AsSmi()); FixedArray::fast_set(this, index+2, details.AsSmi());
@ -3006,8 +3006,13 @@ void JSArray::EnsureSize(int required_size) {
} }
void JSArray::set_length(Smi* length) {
set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
}
void JSArray::SetContent(FixedArray* storage) { void JSArray::SetContent(FixedArray* storage) {
set_length(Smi::FromInt(storage->length()), SKIP_WRITE_BARRIER); set_length(Smi::FromInt(storage->length()));
set_elements(storage); set_elements(storage);
} }

81
deps/v8/src/objects.cc

@ -2839,7 +2839,11 @@ Object* JSObject::DefineGetterSetter(String* name,
if (result.IsReadOnly()) return Heap::undefined_value(); if (result.IsReadOnly()) return Heap::undefined_value();
if (result.type() == CALLBACKS) { if (result.type() == CALLBACKS) {
Object* obj = result.GetCallbackObject(); Object* obj = result.GetCallbackObject();
if (obj->IsFixedArray()) return obj; if (obj->IsFixedArray()) {
PropertyDetails details = PropertyDetails(attributes, CALLBACKS);
SetNormalizedProperty(name, obj, details);
return obj;
}
} }
} }
} }
@ -3196,8 +3200,9 @@ Object* FixedArray::UnionOfKeys(FixedArray* other) {
Object* obj = Heap::AllocateFixedArray(len0 + extra); Object* obj = Heap::AllocateFixedArray(len0 + extra);
if (obj->IsFailure()) return obj; if (obj->IsFailure()) return obj;
// Fill in the content // Fill in the content
AssertNoAllocation no_gc;
FixedArray* result = FixedArray::cast(obj); FixedArray* result = FixedArray::cast(obj);
WriteBarrierMode mode = result->GetWriteBarrierMode(); WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
for (int i = 0; i < len0; i++) { for (int i = 0; i < len0; i++) {
result->set(i, get(i), mode); result->set(i, get(i), mode);
} }
@ -3221,10 +3226,11 @@ Object* FixedArray::CopySize(int new_length) {
if (obj->IsFailure()) return obj; if (obj->IsFailure()) return obj;
FixedArray* result = FixedArray::cast(obj); FixedArray* result = FixedArray::cast(obj);
// Copy the content // Copy the content
AssertNoAllocation no_gc;
int len = length(); int len = length();
if (new_length < len) len = new_length; if (new_length < len) len = new_length;
result->set_map(map()); result->set_map(map());
WriteBarrierMode mode = result->GetWriteBarrierMode(); WriteBarrierMode mode = result->GetWriteBarrierMode(no_gc);
for (int i = 0; i < len; i++) { for (int i = 0; i < len; i++) {
result->set(i, get(i), mode); result->set(i, get(i), mode);
} }
@ -3233,7 +3239,8 @@ Object* FixedArray::CopySize(int new_length) {
void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) { void FixedArray::CopyTo(int pos, FixedArray* dest, int dest_pos, int len) {
WriteBarrierMode mode = dest->GetWriteBarrierMode(); AssertNoAllocation no_gc;
WriteBarrierMode mode = dest->GetWriteBarrierMode(no_gc);
for (int index = 0; index < len; index++) { for (int index = 0; index < len; index++) {
dest->set(dest_pos+index, get(pos+index), mode); dest->set(dest_pos+index, get(pos+index), mode);
} }
@ -3267,8 +3274,7 @@ Object* DescriptorArray::Allocate(int number_of_descriptors) {
if (array->IsFailure()) return array; if (array->IsFailure()) return array;
result->set(kContentArrayIndex, array); result->set(kContentArrayIndex, array);
result->set(kEnumerationIndexIndex, result->set(kEnumerationIndexIndex,
Smi::FromInt(PropertyDetails::kInitialIndex), Smi::FromInt(PropertyDetails::kInitialIndex));
SKIP_WRITE_BARRIER);
return result; return result;
} }
@ -4696,8 +4702,8 @@ void Map::ClearNonLiveTransitions(Object* real_prototype) {
ASSERT(target->IsHeapObject()); ASSERT(target->IsHeapObject());
if (!target->IsMarked()) { if (!target->IsMarked()) {
ASSERT(target->IsMap()); ASSERT(target->IsMap());
contents->set(i + 1, NullDescriptorDetails, SKIP_WRITE_BARRIER); contents->set(i + 1, NullDescriptorDetails);
contents->set(i, Heap::null_value(), SKIP_WRITE_BARRIER); contents->set_null(i);
ASSERT(target->prototype() == this || ASSERT(target->prototype() == this ||
target->prototype() == real_prototype); target->prototype() == real_prototype);
// Getter prototype() is read-only, set_prototype() has side effects. // Getter prototype() is read-only, set_prototype() has side effects.
@ -5157,7 +5163,8 @@ void JSObject::SetFastElements(FixedArray* elems) {
uint32_t len = static_cast<uint32_t>(elems->length()); uint32_t len = static_cast<uint32_t>(elems->length());
for (uint32_t i = 0; i < len; i++) ASSERT(elems->get(i)->IsTheHole()); for (uint32_t i = 0; i < len; i++) ASSERT(elems->get(i)->IsTheHole());
#endif #endif
WriteBarrierMode mode = elems->GetWriteBarrierMode(); AssertNoAllocation no_gc;
WriteBarrierMode mode = elems->GetWriteBarrierMode(no_gc);
switch (GetElementsKind()) { switch (GetElementsKind()) {
case FAST_ELEMENTS: { case FAST_ELEMENTS: {
FixedArray* old_elements = FixedArray::cast(elements()); FixedArray* old_elements = FixedArray::cast(elements());
@ -5224,7 +5231,7 @@ Object* JSObject::SetSlowElements(Object* len) {
Object* JSArray::Initialize(int capacity) { Object* JSArray::Initialize(int capacity) {
ASSERT(capacity >= 0); ASSERT(capacity >= 0);
set_length(Smi::FromInt(0), SKIP_WRITE_BARRIER); set_length(Smi::FromInt(0));
FixedArray* new_elements; FixedArray* new_elements;
if (capacity == 0) { if (capacity == 0) {
new_elements = Heap::empty_fixed_array(); new_elements = Heap::empty_fixed_array();
@ -5284,7 +5291,7 @@ Object* JSObject::SetElementsLength(Object* len) {
for (int i = value; i < old_length; i++) { for (int i = value; i < old_length; i++) {
FixedArray::cast(elements())->set_the_hole(i); FixedArray::cast(elements())->set_the_hole(i);
} }
JSArray::cast(this)->set_length(smi_length, SKIP_WRITE_BARRIER); JSArray::cast(this)->set_length(Smi::cast(smi_length));
} }
return this; return this;
} }
@ -5294,8 +5301,9 @@ Object* JSObject::SetElementsLength(Object* len) {
!ShouldConvertToSlowElements(new_capacity)) { !ShouldConvertToSlowElements(new_capacity)) {
Object* obj = Heap::AllocateFixedArrayWithHoles(new_capacity); Object* obj = Heap::AllocateFixedArrayWithHoles(new_capacity);
if (obj->IsFailure()) return obj; if (obj->IsFailure()) return obj;
if (IsJSArray()) JSArray::cast(this)->set_length(smi_length, if (IsJSArray()) {
SKIP_WRITE_BARRIER); JSArray::cast(this)->set_length(Smi::cast(smi_length));
}
SetFastElements(FixedArray::cast(obj)); SetFastElements(FixedArray::cast(obj));
return this; return this;
} }
@ -5314,7 +5322,7 @@ Object* JSObject::SetElementsLength(Object* len) {
static_cast<uint32_t>(JSArray::cast(this)->length()->Number()); static_cast<uint32_t>(JSArray::cast(this)->length()->Number());
element_dictionary()->RemoveNumberEntries(value, old_length); element_dictionary()->RemoveNumberEntries(value, old_length);
} }
JSArray::cast(this)->set_length(smi_length, SKIP_WRITE_BARRIER); JSArray::cast(this)->set_length(Smi::cast(smi_length));
} }
return this; return this;
} }
@ -5339,8 +5347,7 @@ Object* JSObject::SetElementsLength(Object* len) {
Object* obj = Heap::AllocateFixedArray(1); Object* obj = Heap::AllocateFixedArray(1);
if (obj->IsFailure()) return obj; if (obj->IsFailure()) return obj;
FixedArray::cast(obj)->set(0, len); FixedArray::cast(obj)->set(0, len);
if (IsJSArray()) JSArray::cast(this)->set_length(Smi::FromInt(1), if (IsJSArray()) JSArray::cast(this)->set_length(Smi::FromInt(1));
SKIP_WRITE_BARRIER);
set_elements(FixedArray::cast(obj)); set_elements(FixedArray::cast(obj));
return this; return this;
} }
@ -5610,8 +5617,7 @@ Object* JSObject::SetFastElement(uint32_t index, Object* value) {
CHECK(Array::IndexFromObject(JSArray::cast(this)->length(), CHECK(Array::IndexFromObject(JSArray::cast(this)->length(),
&array_length)); &array_length));
if (index >= array_length) { if (index >= array_length) {
JSArray::cast(this)->set_length(Smi::FromInt(index + 1), JSArray::cast(this)->set_length(Smi::FromInt(index + 1));
SKIP_WRITE_BARRIER);
} }
} }
return value; return value;
@ -5627,8 +5633,9 @@ Object* JSObject::SetFastElement(uint32_t index, Object* value) {
Object* obj = Heap::AllocateFixedArrayWithHoles(new_capacity); Object* obj = Heap::AllocateFixedArrayWithHoles(new_capacity);
if (obj->IsFailure()) return obj; if (obj->IsFailure()) return obj;
SetFastElements(FixedArray::cast(obj)); SetFastElements(FixedArray::cast(obj));
if (IsJSArray()) JSArray::cast(this)->set_length(Smi::FromInt(index + 1), if (IsJSArray()) {
SKIP_WRITE_BARRIER); JSArray::cast(this)->set_length(Smi::FromInt(index + 1));
}
FixedArray::cast(elements())->set(index, value); FixedArray::cast(elements())->set(index, value);
return value; return value;
} }
@ -6125,7 +6132,8 @@ template<typename Shape, typename Key>
void Dictionary<Shape, Key>::CopyValuesTo(FixedArray* elements) { void Dictionary<Shape, Key>::CopyValuesTo(FixedArray* elements) {
int pos = 0; int pos = 0;
int capacity = HashTable<Shape, Key>::Capacity(); int capacity = HashTable<Shape, Key>::Capacity();
WriteBarrierMode mode = elements->GetWriteBarrierMode(); AssertNoAllocation no_gc;
WriteBarrierMode mode = elements->GetWriteBarrierMode(no_gc);
for (int i = 0; i < capacity; i++) { for (int i = 0; i < capacity; i++) {
Object* k = Dictionary<Shape, Key>::KeyAt(i); Object* k = Dictionary<Shape, Key>::KeyAt(i);
if (Dictionary<Shape, Key>::IsKey(k)) { if (Dictionary<Shape, Key>::IsKey(k)) {
@ -6496,7 +6504,7 @@ int JSObject::GetLocalElementKeys(FixedArray* storage,
for (int i = 0; i < length; i++) { for (int i = 0; i < length; i++) {
if (!FixedArray::cast(elements())->get(i)->IsTheHole()) { if (!FixedArray::cast(elements())->get(i)->IsTheHole()) {
if (storage != NULL) { if (storage != NULL) {
storage->set(counter, Smi::FromInt(i), SKIP_WRITE_BARRIER); storage->set(counter, Smi::FromInt(i));
} }
counter++; counter++;
} }
@ -6508,7 +6516,7 @@ int JSObject::GetLocalElementKeys(FixedArray* storage,
int length = PixelArray::cast(elements())->length(); int length = PixelArray::cast(elements())->length();
while (counter < length) { while (counter < length) {
if (storage != NULL) { if (storage != NULL) {
storage->set(counter, Smi::FromInt(counter), SKIP_WRITE_BARRIER); storage->set(counter, Smi::FromInt(counter));
} }
counter++; counter++;
} }
@ -6525,7 +6533,7 @@ int JSObject::GetLocalElementKeys(FixedArray* storage,
int length = ExternalArray::cast(elements())->length(); int length = ExternalArray::cast(elements())->length();
while (counter < length) { while (counter < length) {
if (storage != NULL) { if (storage != NULL) {
storage->set(counter, Smi::FromInt(counter), SKIP_WRITE_BARRIER); storage->set(counter, Smi::FromInt(counter));
} }
counter++; counter++;
} }
@ -6550,7 +6558,7 @@ int JSObject::GetLocalElementKeys(FixedArray* storage,
String* str = String::cast(val); String* str = String::cast(val);
if (storage) { if (storage) {
for (int i = 0; i < str->length(); i++) { for (int i = 0; i < str->length(); i++) {
storage->set(counter + i, Smi::FromInt(i), SKIP_WRITE_BARRIER); storage->set(counter + i, Smi::FromInt(i));
} }
} }
counter += str->length(); counter += str->length();
@ -6882,8 +6890,10 @@ Object* HashTable<Shape, Key>::EnsureCapacity(int n, Key key) {
Object* obj = Allocate(nof * 2); Object* obj = Allocate(nof * 2);
if (obj->IsFailure()) return obj; if (obj->IsFailure()) return obj;
AssertNoAllocation no_gc;
HashTable* table = HashTable::cast(obj); HashTable* table = HashTable::cast(obj);
WriteBarrierMode mode = table->GetWriteBarrierMode(); WriteBarrierMode mode = table->GetWriteBarrierMode(no_gc);
// Copy prefix to new array. // Copy prefix to new array.
for (int i = kPrefixStartIndex; for (int i = kPrefixStartIndex;
@ -7130,7 +7140,7 @@ Object* JSObject::PrepareElementsForSort(uint32_t limit) {
// Split elements into defined, undefined and the_hole, in that order. // Split elements into defined, undefined and the_hole, in that order.
// Only count locations for undefined and the hole, and fill them afterwards. // Only count locations for undefined and the hole, and fill them afterwards.
WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(); WriteBarrierMode write_barrier = elements->GetWriteBarrierMode(no_alloc);
unsigned int undefs = limit; unsigned int undefs = limit;
unsigned int holes = limit; unsigned int holes = limit;
// Assume most arrays contain no holes and undefined values, so minimize the // Assume most arrays contain no holes and undefined values, so minimize the
@ -7625,7 +7635,7 @@ Object* Dictionary<Shape, Key>::GenerateNewEnumerationIndices() {
if (obj->IsFailure()) return obj; if (obj->IsFailure()) return obj;
FixedArray* iteration_order = FixedArray::cast(obj); FixedArray* iteration_order = FixedArray::cast(obj);
for (int i = 0; i < length; i++) { for (int i = 0; i < length; i++) {
iteration_order->set(i, Smi::FromInt(i), SKIP_WRITE_BARRIER); iteration_order->set(i, Smi::FromInt(i));
} }
// Allocate array with enumeration order. // Allocate array with enumeration order.
@ -7638,9 +7648,7 @@ Object* Dictionary<Shape, Key>::GenerateNewEnumerationIndices() {
int pos = 0; int pos = 0;
for (int i = 0; i < capacity; i++) { for (int i = 0; i < capacity; i++) {
if (Dictionary<Shape, Key>::IsKey(Dictionary<Shape, Key>::KeyAt(i))) { if (Dictionary<Shape, Key>::IsKey(Dictionary<Shape, Key>::KeyAt(i))) {
enumeration_order->set(pos++, enumeration_order->set(pos++, Smi::FromInt(DetailsAt(i).index()));
Smi::FromInt(DetailsAt(i).index()),
SKIP_WRITE_BARRIER);
} }
} }
@ -7651,9 +7659,7 @@ Object* Dictionary<Shape, Key>::GenerateNewEnumerationIndices() {
for (int i = 0; i < length; i++) { for (int i = 0; i < length; i++) {
int index = Smi::cast(iteration_order->get(i))->value(); int index = Smi::cast(iteration_order->get(i))->value();
int enum_index = PropertyDetails::kInitialIndex + i; int enum_index = PropertyDetails::kInitialIndex + i;
enumeration_order->set(index, enumeration_order->set(index, Smi::FromInt(enum_index));
Smi::FromInt(enum_index),
SKIP_WRITE_BARRIER);
} }
// Update the dictionary with new indices. // Update the dictionary with new indices.
@ -7801,8 +7807,7 @@ void NumberDictionary::UpdateMaxNumberKey(uint32_t key) {
Object* max_index_object = get(kMaxNumberKeyIndex); Object* max_index_object = get(kMaxNumberKeyIndex);
if (!max_index_object->IsSmi() || max_number_key() < key) { if (!max_index_object->IsSmi() || max_number_key() < key) {
FixedArray::set(kMaxNumberKeyIndex, FixedArray::set(kMaxNumberKeyIndex,
Smi::FromInt(key << kRequiresSlowElementsTagSize), Smi::FromInt(key << kRequiresSlowElementsTagSize));
SKIP_WRITE_BARRIER);
} }
} }
@ -7893,9 +7898,7 @@ void StringDictionary::CopyEnumKeysTo(FixedArray* storage,
PropertyDetails details = DetailsAt(i); PropertyDetails details = DetailsAt(i);
if (details.IsDeleted() || details.IsDontEnum()) continue; if (details.IsDeleted() || details.IsDontEnum()) continue;
storage->set(index, k); storage->set(index, k);
sort_array->set(index, sort_array->set(index, Smi::FromInt(details.index()));
Smi::FromInt(details.index()),
SKIP_WRITE_BARRIER);
index++; index++;
} }
} }

32
deps/v8/src/objects.h

@ -1023,8 +1023,12 @@ class HeapObject: public Object {
// Casting. // Casting.
static inline HeapObject* cast(Object* obj); static inline HeapObject* cast(Object* obj);
// Return the write barrier mode for this. // Return the write barrier mode for this. Callers of this function
inline WriteBarrierMode GetWriteBarrierMode(); // must be able to present a reference to an AssertNoAllocation
// object as a sign that they are not going to use this function
// from code that allocates and thus invalidates the returned write
// barrier mode.
inline WriteBarrierMode GetWriteBarrierMode(const AssertNoAllocation&);
// Dispatched behavior. // Dispatched behavior.
void HeapObjectShortPrint(StringStream* accumulator); void HeapObjectShortPrint(StringStream* accumulator);
@ -1669,7 +1673,8 @@ class FixedArray: public Array {
void SortPairs(FixedArray* numbers, uint32_t len); void SortPairs(FixedArray* numbers, uint32_t len);
protected: protected:
// Set operation on FixedArray without using write barriers. // Set operation on FixedArray without using write barriers. Can
// only be used for storing old space objects or smis.
static inline void fast_set(FixedArray* array, int index, Object* value); static inline void fast_set(FixedArray* array, int index, Object* value);
private: private:
@ -2889,6 +2894,14 @@ class Map: public HeapObject {
return ((1 << kHasInstanceCallHandler) & bit_field()) != 0; return ((1 << kHasInstanceCallHandler) & bit_field()) != 0;
} }
inline void set_is_extensible() {
set_bit_field2(bit_field2() | (1 << kIsExtensible));
}
inline bool is_extensible() {
return ((1 << kIsExtensible) & bit_field2()) != 0;
}
// Tells whether the instance needs security checks when accessing its // Tells whether the instance needs security checks when accessing its
// properties. // properties.
inline void set_is_access_check_needed(bool access_check_needed); inline void set_is_access_check_needed(bool access_check_needed);
@ -3006,6 +3019,7 @@ class Map: public HeapObject {
// Bit positions for bit field 2 // Bit positions for bit field 2
static const int kNeedsLoading = 0; static const int kNeedsLoading = 0;
static const int kIsExtensible = 1;
private: private:
DISALLOW_IMPLICIT_CONSTRUCTORS(Map); DISALLOW_IMPLICIT_CONSTRUCTORS(Map);
@ -3213,8 +3227,8 @@ class SharedFunctionInfo: public HeapObject {
// this.x = y; where y is either a constant or refers to an argument. // this.x = y; where y is either a constant or refers to an argument.
inline bool has_only_simple_this_property_assignments(); inline bool has_only_simple_this_property_assignments();
inline bool try_fast_codegen(); inline bool try_full_codegen();
inline void set_try_fast_codegen(bool flag); inline void set_try_full_codegen(bool flag);
// For functions which only contains this property assignments this provides // For functions which only contains this property assignments this provides
// access to the names for the properties assigned. // access to the names for the properties assigned.
@ -3295,7 +3309,7 @@ class SharedFunctionInfo: public HeapObject {
// Bit positions in compiler_hints. // Bit positions in compiler_hints.
static const int kHasOnlySimpleThisPropertyAssignments = 0; static const int kHasOnlySimpleThisPropertyAssignments = 0;
static const int kTryFastCodegen = 1; static const int kTryFullCodegen = 1;
DISALLOW_IMPLICIT_CONSTRUCTORS(SharedFunctionInfo); DISALLOW_IMPLICIT_CONSTRUCTORS(SharedFunctionInfo);
}; };
@ -3640,6 +3654,8 @@ class JSRegExp: public JSObject {
FixedArray::kHeaderSize + kTagIndex * kPointerSize; FixedArray::kHeaderSize + kTagIndex * kPointerSize;
static const int kDataAsciiCodeOffset = static const int kDataAsciiCodeOffset =
FixedArray::kHeaderSize + kIrregexpASCIICodeIndex * kPointerSize; FixedArray::kHeaderSize + kIrregexpASCIICodeIndex * kPointerSize;
static const int kDataUC16CodeOffset =
FixedArray::kHeaderSize + kIrregexpUC16CodeIndex * kPointerSize;
static const int kIrregexpCaptureCountOffset = static const int kIrregexpCaptureCountOffset =
FixedArray::kHeaderSize + kIrregexpCaptureCountIndex * kPointerSize; FixedArray::kHeaderSize + kIrregexpCaptureCountIndex * kPointerSize;
}; };
@ -4463,6 +4479,10 @@ class JSArray: public JSObject {
// [length]: The length property. // [length]: The length property.
DECL_ACCESSORS(length, Object) DECL_ACCESSORS(length, Object)
// Overload the length setter to skip write barrier when the length
// is set to a smi. This matches the set function on FixedArray.
inline void set_length(Smi* length);
Object* JSArrayUpdateLengthFromIndex(uint32_t index, Object* value); Object* JSArrayUpdateLengthFromIndex(uint32_t index, Object* value);
// Initialize the array with the given capacity. The function may // Initialize the array with the given capacity. The function may

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save