Browse Source

Upgrade v8 to 1.3.2

v0.7.4-release
Ryan 16 years ago
parent
commit
8bb8b4b570
  1. 15
      deps/v8/ChangeLog
  2. 8
      deps/v8/SConstruct
  3. 2
      deps/v8/include/v8-debug.h
  4. 14
      deps/v8/include/v8.h
  5. 37
      deps/v8/src/SConscript
  6. 40
      deps/v8/src/api.cc
  7. 124
      deps/v8/src/arm/cfg-arm.cc
  8. 3
      deps/v8/src/ast.h
  9. 485
      deps/v8/src/cfg.cc
  10. 385
      deps/v8/src/cfg.h
  11. 17
      deps/v8/src/compiler.cc
  12. 10
      deps/v8/src/date-delay.js
  13. 2
      deps/v8/src/execution.h
  14. 2
      deps/v8/src/flag-definitions.h
  15. 2
      deps/v8/src/heap-inl.h
  16. 27
      deps/v8/src/heap.cc
  17. 4
      deps/v8/src/heap.h
  18. 4
      deps/v8/src/ia32/assembler-ia32.h
  19. 137
      deps/v8/src/ia32/cfg-ia32.cc
  20. 7
      deps/v8/src/ia32/codegen-ia32.cc
  21. 2
      deps/v8/src/ia32/codegen-ia32.h
  22. 43
      deps/v8/src/ia32/macro-assembler-ia32.cc
  23. 47
      deps/v8/src/log.cc
  24. 8
      deps/v8/src/log.h
  25. 2
      deps/v8/src/platform-nullos.cc
  26. 8
      deps/v8/src/platform-posix.cc
  27. 2
      deps/v8/src/platform-win32.cc
  28. 2
      deps/v8/src/platform.h
  29. 2
      deps/v8/src/runtime.cc
  30. 6
      deps/v8/src/spaces-inl.h
  31. 23
      deps/v8/src/spaces.h
  32. 9
      deps/v8/src/string-stream.cc
  33. 16
      deps/v8/src/string-stream.h
  34. 4
      deps/v8/src/variables.h
  35. 4
      deps/v8/src/version.cc
  36. 14
      deps/v8/src/x64/assembler-x64.h
  37. 146
      deps/v8/src/x64/cfg-x64.cc
  38. 64
      deps/v8/src/x64/codegen-x64.cc
  39. 4
      deps/v8/src/x64/codegen-x64.h
  40. 40
      deps/v8/src/x64/disasm-x64.cc
  41. 38
      deps/v8/src/x64/ic-x64.cc
  42. 156
      deps/v8/src/x64/macro-assembler-x64.cc
  43. 4
      deps/v8/test/cctest/SConscript
  44. 4
      deps/v8/test/cctest/cctest.status
  45. 376
      deps/v8/test/cctest/test-log-stack-tracer.cc
  46. 4
      deps/v8/test/cctest/test-log.cc
  47. 2
      deps/v8/test/mjsunit/mjsunit.status
  48. 38
      deps/v8/test/mjsunit/regress/regress-416.js
  49. 27
      deps/v8/test/mozilla/mozilla.status
  50. 5
      deps/v8/tools/gyp/v8.gyp
  51. 12
      deps/v8/tools/visual_studio/v8_base.vcproj
  52. 12
      deps/v8/tools/visual_studio/v8_base_arm.vcproj
  53. 2
      deps/v8/tools/visual_studio/v8_cctest.vcproj

15
deps/v8/ChangeLog

@ -1,3 +1,18 @@
2009-08-05: Version 1.3.2
Started new compiler infrastructure for two-pass compilation using a
control flow graph constructed from the AST.
Profiler stack sampling for X64.
Safe handling of NaN to Posix platform-dependent time functions.
Added a new profiler control API to unify controlling various aspects
of profiling.
Fixed issue 392.
2009-07-30: Version 1.3.1
Speed improvements to accessors and interceptors.

8
deps/v8/SConstruct

@ -126,6 +126,7 @@ LIBRARY_FLAGS = {
'os:linux': {
'CCFLAGS': ['-ansi'] + GCC_EXTRA_CCFLAGS,
'library:shared': {
'CPPDEFINES': ['V8_SHARED'],
'LIBS': ['pthread']
}
},
@ -218,8 +219,11 @@ V8_EXTRA_FLAGS = {
'gcc': {
'all': {
'CXXFLAGS': [], #['-fvisibility=hidden'],
'WARNINGFLAGS': ['-Wall', '-Werror', '-W',
'-Wno-unused-parameter']
'WARNINGFLAGS': ['-Wall',
'-Werror',
'-W',
'-Wno-unused-parameter',
'-Wnon-virtual-dtor']
},
'os:win32': {
'WARNINGFLAGS': ['-pedantic', '-Wno-long-long']

2
deps/v8/include/v8-debug.h

@ -55,7 +55,7 @@ typedef long long int64_t; // NOLINT
// Setup for Linux shared library export. See v8.h in this directory for
// information on how to build/use V8 as shared library.
#if defined(__GNUC__) && (__GNUC__ >= 4)
#if defined(__GNUC__) && (__GNUC__ >= 4) && defined(V8_SHARED)
#define EXPORT __attribute__ ((visibility("default")))
#else // defined(__GNUC__) && (__GNUC__ >= 4)
#define EXPORT

14
deps/v8/include/v8.h

@ -85,11 +85,10 @@ typedef unsigned __int64 uint64_t;
#include <stdint.h>
// Setup for Linux shared library export. There is no need to destinguish
// neither between building or using the V8 shared library nor between using
// the shared or static V8 library as there is on Windows. Therefore there is
// no checking of BUILDING_V8_SHARED and USING_V8_SHARED.
#if defined(__GNUC__) && (__GNUC__ >= 4)
// Setup for Linux shared library export. There is no need to distinguish
// between building or using the V8 shared library, but we should not
// export symbols when we are building a static library.
#if defined(__GNUC__) && (__GNUC__ >= 4) && defined(V8_SHARED)
#define V8EXPORT __attribute__ ((visibility("default")))
#define V8EXPORT_INLINE __attribute__ ((visibility("default")))
#else // defined(__GNUC__) && (__GNUC__ >= 4)
@ -1971,12 +1970,15 @@ typedef Persistent<Context> (*ContextGenerator)();
*
* In V8, profiler consists of several modules: CPU profiler, and different
* kinds of heap profiling. Each can be turned on / off independently.
* When PROFILER_MODULE_HEAP_SNAPSHOT flag is passed to ResumeProfilerEx,
* modules are enabled only temporarily for making a snapshot of the heap.
*/
enum ProfilerModules {
PROFILER_MODULE_NONE = 0,
PROFILER_MODULE_CPU = 1,
PROFILER_MODULE_HEAP_STATS = 1 << 1,
PROFILER_MODULE_JS_CONSTRUCTORS = 1 << 2
PROFILER_MODULE_JS_CONSTRUCTORS = 1 << 2,
PROFILER_MODULE_HEAP_SNAPSHOT = 1 << 16
};

37
deps/v8/src/SConscript

@ -36,25 +36,26 @@ Import('context')
SOURCES = {
'all': [
'accessors.cc', 'allocation.cc', 'api.cc', 'assembler.cc', 'ast.cc',
'bootstrapper.cc', 'builtins.cc', 'checks.cc', 'code-stubs.cc',
'codegen.cc', 'compilation-cache.cc', 'compiler.cc', 'contexts.cc',
'conversions.cc', 'counters.cc', 'dateparser.cc', 'debug.cc',
'debug-agent.cc', 'disassembler.cc', 'execution.cc', 'factory.cc',
'flags.cc', 'frame-element.cc', 'frames.cc', 'func-name-inferrer.cc',
'global-handles.cc', 'handles.cc', 'hashmap.cc',
'heap.cc', 'ic.cc', 'interpreter-irregexp.cc', 'jsregexp.cc',
'jump-target.cc', 'log.cc', 'log-utils.cc', 'mark-compact.cc', 'messages.cc',
'objects.cc', 'oprofile-agent.cc', 'parser.cc', 'property.cc',
'regexp-macro-assembler.cc', 'regexp-macro-assembler-irregexp.cc',
'regexp-stack.cc', 'register-allocator.cc', 'rewriter.cc', 'runtime.cc',
'scanner.cc', 'scopeinfo.cc', 'scopes.cc', 'serialize.cc',
'snapshot-common.cc', 'spaces.cc', 'string-stream.cc', 'stub-cache.cc',
'token.cc', 'top.cc', 'unicode.cc', 'usage-analyzer.cc', 'utils.cc',
'v8-counters.cc', 'v8.cc', 'v8threads.cc', 'variables.cc', 'version.cc',
'bootstrapper.cc', 'builtins.cc', 'checks.cc', 'cfg.cc',
'code-stubs.cc', 'codegen.cc', 'compilation-cache.cc', 'compiler.cc',
'contexts.cc', 'conversions.cc', 'counters.cc', 'dateparser.cc',
'debug.cc', 'debug-agent.cc', 'disassembler.cc', 'execution.cc',
'factory.cc', 'flags.cc', 'frame-element.cc', 'frames.cc',
'func-name-inferrer.cc', 'global-handles.cc', 'handles.cc',
'hashmap.cc', 'heap.cc', 'ic.cc', 'interpreter-irregexp.cc',
'jsregexp.cc', 'jump-target.cc', 'log.cc', 'log-utils.cc',
'mark-compact.cc', 'messages.cc', 'objects.cc', 'oprofile-agent.cc',
'parser.cc', 'property.cc', 'regexp-macro-assembler.cc',
'regexp-macro-assembler-irregexp.cc', 'regexp-stack.cc',
'register-allocator.cc', 'rewriter.cc', 'runtime.cc', 'scanner.cc',
'scopeinfo.cc', 'scopes.cc', 'serialize.cc', 'snapshot-common.cc',
'spaces.cc', 'string-stream.cc', 'stub-cache.cc', 'token.cc', 'top.cc',
'unicode.cc', 'usage-analyzer.cc', 'utils.cc', 'v8-counters.cc',
'v8.cc', 'v8threads.cc', 'variables.cc', 'version.cc',
'virtual-frame.cc', 'zone.cc'
],
'arch:arm': [
'arm/assembler-arm.cc', 'arm/builtins-arm.cc',
'arm/assembler-arm.cc', 'arm/builtins-arm.cc', 'arm/cfg-arm.cc',
'arm/codegen-arm.cc', 'arm/cpu-arm.cc', 'arm/disasm-arm.cc',
'arm/debug-arm.cc', 'arm/frames-arm.cc', 'arm/ic-arm.cc',
'arm/jump-target-arm.cc', 'arm/macro-assembler-arm.cc',
@ -63,7 +64,7 @@ SOURCES = {
'arm/virtual-frame-arm.cc'
],
'arch:ia32': [
'ia32/assembler-ia32.cc', 'ia32/builtins-ia32.cc',
'ia32/assembler-ia32.cc', 'ia32/builtins-ia32.cc', 'ia32/cfg-ia32.cc',
'ia32/codegen-ia32.cc', 'ia32/cpu-ia32.cc', 'ia32/disasm-ia32.cc',
'ia32/debug-ia32.cc', 'ia32/frames-ia32.cc', 'ia32/ic-ia32.cc',
'ia32/jump-target-ia32.cc', 'ia32/macro-assembler-ia32.cc',
@ -72,7 +73,7 @@ SOURCES = {
'ia32/virtual-frame-ia32.cc'
],
'arch:x64': [
'x64/assembler-x64.cc', 'x64/builtins-x64.cc',
'x64/assembler-x64.cc', 'x64/builtins-x64.cc', 'x64/cfg-x64.cc',
'x64/codegen-x64.cc', 'x64/cpu-x64.cc', 'x64/disasm-x64.cc',
'x64/debug-x64.cc', 'x64/frames-x64.cc', 'x64/ic-x64.cc',
'x64/jump-target-x64.cc', 'x64/macro-assembler-x64.cc',

40
deps/v8/src/api.cc

@ -3214,21 +3214,21 @@ void V8::SetGlobalGCEpilogueCallback(GCCallback callback) {
void V8::PauseProfiler() {
#ifdef ENABLE_LOGGING_AND_PROFILING
i::Logger::PauseProfiler();
i::Logger::PauseProfiler(PROFILER_MODULE_CPU);
#endif
}
void V8::ResumeProfiler() {
#ifdef ENABLE_LOGGING_AND_PROFILING
i::Logger::ResumeProfiler();
i::Logger::ResumeProfiler(PROFILER_MODULE_CPU);
#endif
}
bool V8::IsProfilerPaused() {
#ifdef ENABLE_LOGGING_AND_PROFILING
return i::Logger::IsProfilerPaused();
return i::Logger::GetActiveProfilerModules() & PROFILER_MODULE_CPU;
#else
return true;
#endif
@ -3237,11 +3237,19 @@ bool V8::IsProfilerPaused() {
void V8::ResumeProfilerEx(int flags) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (flags & PROFILER_MODULE_CPU) {
i::Logger::ResumeProfiler();
}
if (flags & (PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
i::FLAG_log_gc = true;
if (flags & PROFILER_MODULE_HEAP_SNAPSHOT) {
// Snapshot mode: resume modules, perform GC, then pause only
// those modules which haven't been started prior to making a
// snapshot.
// Reset snapshot flag and CPU module flags.
flags &= ~(PROFILER_MODULE_HEAP_SNAPSHOT | PROFILER_MODULE_CPU);
const int current_flags = i::Logger::GetActiveProfilerModules();
i::Logger::ResumeProfiler(flags);
i::Heap::CollectAllGarbage();
i::Logger::PauseProfiler(~current_flags & flags);
} else {
i::Logger::ResumeProfiler(flags);
}
#endif
}
@ -3249,26 +3257,14 @@ void V8::ResumeProfilerEx(int flags) {
void V8::PauseProfilerEx(int flags) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (flags & PROFILER_MODULE_CPU) {
i::Logger::PauseProfiler();
}
if (flags & (PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
i::FLAG_log_gc = false;
}
i::Logger::PauseProfiler(flags);
#endif
}
int V8::GetActiveProfilerModules() {
#ifdef ENABLE_LOGGING_AND_PROFILING
int result = PROFILER_MODULE_NONE;
if (!i::Logger::IsProfilerPaused()) {
result |= PROFILER_MODULE_CPU;
}
if (i::FLAG_log_gc) {
result |= PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS;
}
return result;
return i::Logger::GetActiveProfilerModules();
#else
return PROFILER_MODULE_NONE;
#endif

124
deps/v8/src/arm/cfg-arm.cc

@ -0,0 +1,124 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "cfg.h"
#include "codegen-inl.h"
#include "macro-assembler-arm.h"
namespace v8 {
namespace internal {
#define __ ACCESS_MASM(masm)
void InstructionBlock::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
{
Comment cmt(masm, "[ InstructionBlock");
for (int i = 0, len = instructions_.length(); i < len; i++) {
instructions_[i]->Compile(masm);
}
}
successor_->Compile(masm);
}
void EntryNode::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
{
Comment cmnt(masm, "[ EntryNode");
__ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
__ add(fp, sp, Operand(2 * kPointerSize));
int count = CfgGlobals::current()->fun()->scope()->num_stack_slots();
if (count > 0) {
__ mov(ip, Operand(Factory::undefined_value()));
for (int i = 0; i < count; i++) {
__ push(ip);
}
}
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceEnter, 0);
}
if (FLAG_check_stack) {
StackCheckStub stub;
__ CallStub(&stub);
}
}
successor_->Compile(masm);
}
void ExitNode::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
Comment cmnt(masm, "[ ExitNode");
if (FLAG_trace) {
__ push(r0);
__ CallRuntime(Runtime::kTraceExit, 1);
}
__ mov(sp, fp);
__ ldm(ia_w, sp, fp.bit() | lr.bit());
int count = CfgGlobals::current()->fun()->scope()->num_parameters();
__ add(sp, sp, Operand((count + 1) * kPointerSize));
__ Jump(lr);
}
void ReturnInstr::Compile(MacroAssembler* masm) {
Comment cmnt(masm, "[ ReturnInstr");
value_->ToRegister(masm, r0);
}
void Constant::ToRegister(MacroAssembler* masm, Register reg) {
__ mov(reg, Operand(handle_));
}
void SlotLocation::ToRegister(MacroAssembler* masm, Register reg) {
switch (type_) {
case Slot::PARAMETER: {
int count = CfgGlobals::current()->fun()->scope()->num_parameters();
__ ldr(reg, MemOperand(fp, (1 + count - index_) * kPointerSize));
break;
}
case Slot::LOCAL: {
const int kOffset = JavaScriptFrameConstants::kLocal0Offset;
__ ldr(reg, MemOperand(fp, kOffset - index_ * kPointerSize));
break;
}
default:
UNREACHABLE();
}
}
#undef __
} } // namespace v8::internal

3
deps/v8/src/ast.h

@ -821,9 +821,6 @@ class VariableProxy: public Expression {
return (variable == NULL) ? false : variable->is_arguments();
}
// If this assertion fails it means that some code has tried to
// treat the special "this" variable as an ordinary variable with
// the name "this".
Handle<String> name() const { return name_; }
Variable* var() const { return var_; }
UseCount* var_uses() { return &var_uses_; }

485
deps/v8/src/cfg.cc

@ -0,0 +1,485 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "bootstrapper.h"
#include "cfg.h"
#include "scopeinfo.h"
#include "scopes.h"
namespace v8 {
namespace internal {
CfgGlobals* CfgGlobals::top_ = NULL;
CfgGlobals::CfgGlobals(FunctionLiteral* fun)
: global_fun_(fun),
global_exit_(new ExitNode()),
#ifdef DEBUG
node_counter_(0),
#endif
previous_(top_) {
top_ = this;
}
#define BAILOUT(reason) \
do { return NULL; } while (false)
Cfg* Cfg::Build() {
FunctionLiteral* fun = CfgGlobals::current()->fun();
if (fun->scope()->num_heap_slots() > 0) {
BAILOUT("function has context slots");
}
if (fun->scope()->arguments() != NULL) {
BAILOUT("function uses .arguments");
}
ZoneList<Statement*>* body = fun->body();
if (body->is_empty()) {
BAILOUT("empty function body");
}
StatementBuilder builder;
builder.VisitStatements(body);
Cfg* cfg = builder.cfg();
if (cfg == NULL) {
BAILOUT("unsupported statement type");
}
if (cfg->has_exit()) {
BAILOUT("control path without explicit return");
}
cfg->PrependEntryNode();
return cfg;
}
#undef BAILOUT
void Cfg::PrependEntryNode() {
ASSERT(!is_empty());
entry_ = new EntryNode(InstructionBlock::cast(entry()));
}
void Cfg::Append(Instruction* instr) {
ASSERT(has_exit());
ASSERT(!is_empty());
InstructionBlock::cast(exit_)->Append(instr);
}
void Cfg::AppendReturnInstruction(Value* value) {
Append(new ReturnInstr(value));
ExitNode* global_exit = CfgGlobals::current()->exit();
InstructionBlock::cast(exit_)->set_successor(global_exit);
exit_ = NULL;
}
void InstructionBlock::Unmark() {
if (is_marked_) {
is_marked_ = false;
successor_->Unmark();
}
}
void EntryNode::Unmark() {
if (is_marked_) {
is_marked_ = false;
successor_->Unmark();
}
}
void ExitNode::Unmark() {
is_marked_ = false;
}
Handle<Code> Cfg::Compile(Handle<Script> script) {
const int kInitialBufferSize = 4 * KB;
MacroAssembler* masm = new MacroAssembler(NULL, kInitialBufferSize);
entry()->Compile(masm);
entry()->Unmark();
CodeDesc desc;
masm->GetCode(&desc);
FunctionLiteral* fun = CfgGlobals::current()->fun();
ZoneScopeInfo info(fun->scope());
InLoopFlag in_loop = fun->loop_nesting() ? IN_LOOP : NOT_IN_LOOP;
Code::Flags flags = Code::ComputeFlags(Code::FUNCTION, in_loop);
Handle<Code> code = Factory::NewCode(desc, &info, flags, masm->CodeObject());
// Add unresolved entries in the code to the fixup list.
Bootstrapper::AddFixup(*code, masm);
#ifdef ENABLE_DISASSEMBLER
if (FLAG_print_code) {
// Print the source code if available.
if (!script->IsUndefined() && !script->source()->IsUndefined()) {
PrintF("--- Raw source ---\n");
StringInputBuffer stream(String::cast(script->source()));
stream.Seek(fun->start_position());
// fun->end_position() points to the last character in the
// stream. We need to compensate by adding one to calculate the
// length.
int source_len = fun->end_position() - fun->start_position() + 1;
for (int i = 0; i < source_len; i++) {
if (stream.has_more()) PrintF("%c", stream.GetNext());
}
PrintF("\n\n");
}
PrintF("--- Code ---\n");
code->Disassemble(*fun->name()->ToCString());
}
#endif
return code;
}
// The expression builder should not be used for declarations or statements.
void ExpressionBuilder::VisitDeclaration(Declaration* decl) { UNREACHABLE(); }
#define DEFINE_VISIT(type) \
void ExpressionBuilder::Visit##type(type* stmt) { UNREACHABLE(); }
STATEMENT_NODE_LIST(DEFINE_VISIT)
#undef DEFINE_VISIT
// Macros (temporarily) handling unsupported expression types.
#define BAILOUT(reason) \
do { \
value_ = NULL; \
return; \
} while (false)
#define CHECK_BAILOUT() \
if (value_ == NULL) { return; } else {}
void ExpressionBuilder::VisitFunctionLiteral(FunctionLiteral* expr) {
BAILOUT("FunctionLiteral");
}
void ExpressionBuilder::VisitFunctionBoilerplateLiteral(
FunctionBoilerplateLiteral* expr) {
BAILOUT("FunctionBoilerplateLiteral");
}
void ExpressionBuilder::VisitConditional(Conditional* expr) {
BAILOUT("Conditional");
}
void ExpressionBuilder::VisitSlot(Slot* expr) {
BAILOUT("Slot");
}
void ExpressionBuilder::VisitVariableProxy(VariableProxy* expr) {
Expression* rewrite = expr->var()->rewrite();
if (rewrite == NULL || rewrite->AsSlot() == NULL) {
BAILOUT("unsupported variable (not a slot)");
}
Slot* slot = rewrite->AsSlot();
if (slot->type() != Slot::PARAMETER && slot->type() != Slot::LOCAL) {
BAILOUT("unsupported slot type (not a parameter or local)");
}
value_ = new SlotLocation(slot->type(), slot->index());
}
void ExpressionBuilder::VisitLiteral(Literal* expr) {
value_ = new Constant(expr->handle());
}
void ExpressionBuilder::VisitRegExpLiteral(RegExpLiteral* expr) {
BAILOUT("RegExpLiteral");
}
void ExpressionBuilder::VisitObjectLiteral(ObjectLiteral* expr) {
BAILOUT("ObjectLiteral");
}
void ExpressionBuilder::VisitArrayLiteral(ArrayLiteral* expr) {
BAILOUT("ArrayLiteral");
}
void ExpressionBuilder::VisitCatchExtensionObject(CatchExtensionObject* expr) {
BAILOUT("CatchExtensionObject");
}
void ExpressionBuilder::VisitAssignment(Assignment* expr) {
BAILOUT("Assignment");
}
void ExpressionBuilder::VisitThrow(Throw* expr) {
BAILOUT("Throw");
}
void ExpressionBuilder::VisitProperty(Property* expr) {
BAILOUT("Property");
}
void ExpressionBuilder::VisitCall(Call* expr) {
BAILOUT("Call");
}
void ExpressionBuilder::VisitCallEval(CallEval* expr) {
BAILOUT("CallEval");
}
void ExpressionBuilder::VisitCallNew(CallNew* expr) {
BAILOUT("CallNew");
}
void ExpressionBuilder::VisitCallRuntime(CallRuntime* expr) {
BAILOUT("CallRuntime");
}
void ExpressionBuilder::VisitUnaryOperation(UnaryOperation* expr) {
BAILOUT("UnaryOperation");
}
void ExpressionBuilder::VisitCountOperation(CountOperation* expr) {
BAILOUT("CountOperation");
}
void ExpressionBuilder::VisitBinaryOperation(BinaryOperation* expr) {
BAILOUT("BinaryOperation");
}
void ExpressionBuilder::VisitCompareOperation(CompareOperation* expr) {
BAILOUT("CompareOperation");
}
void ExpressionBuilder::VisitThisFunction(ThisFunction* expr) {
BAILOUT("ThisFunction");
}
#undef BAILOUT
#undef CHECK_BAILOUT
// Macros (temporarily) handling unsupported statement types.
#define BAILOUT(reason) \
do { \
cfg_ = NULL; \
return; \
} while (false)
#define CHECK_BAILOUT() \
if (cfg_ == NULL) { return; } else {}
void StatementBuilder::VisitStatements(ZoneList<Statement*>* stmts) {
for (int i = 0, len = stmts->length(); i < len; i++) {
Visit(stmts->at(i));
CHECK_BAILOUT();
if (!cfg_->has_exit()) return;
}
}
// The statement builder should not be used for declarations or expressions.
void StatementBuilder::VisitDeclaration(Declaration* decl) { UNREACHABLE(); }
#define DEFINE_VISIT(type) \
void StatementBuilder::Visit##type(type* expr) { UNREACHABLE(); }
EXPRESSION_NODE_LIST(DEFINE_VISIT)
#undef DEFINE_VISIT
void StatementBuilder::VisitBlock(Block* stmt) {
VisitStatements(stmt->statements());
}
void StatementBuilder::VisitExpressionStatement(ExpressionStatement* stmt) {
BAILOUT("ExpressionStatement");
}
void StatementBuilder::VisitEmptyStatement(EmptyStatement* stmt) {
// Nothing to do.
}
void StatementBuilder::VisitIfStatement(IfStatement* stmt) {
BAILOUT("IfStatement");
}
void StatementBuilder::VisitContinueStatement(ContinueStatement* stmt) {
BAILOUT("ContinueStatement");
}
void StatementBuilder::VisitBreakStatement(BreakStatement* stmt) {
BAILOUT("BreakStatement");
}
void StatementBuilder::VisitReturnStatement(ReturnStatement* stmt) {
ExpressionBuilder builder;
builder.Visit(stmt->expression());
Value* value = builder.value();
if (value == NULL) BAILOUT("unsupported expression type");
cfg_->AppendReturnInstruction(value);
}
void StatementBuilder::VisitWithEnterStatement(WithEnterStatement* stmt) {
BAILOUT("WithEnterStatement");
}
void StatementBuilder::VisitWithExitStatement(WithExitStatement* stmt) {
BAILOUT("WithExitStatement");
}
void StatementBuilder::VisitSwitchStatement(SwitchStatement* stmt) {
BAILOUT("SwitchStatement");
}
void StatementBuilder::VisitLoopStatement(LoopStatement* stmt) {
BAILOUT("LoopStatement");
}
void StatementBuilder::VisitForInStatement(ForInStatement* stmt) {
BAILOUT("ForInStatement");
}
void StatementBuilder::VisitTryCatch(TryCatch* stmt) {
BAILOUT("TryCatch");
}
void StatementBuilder::VisitTryFinally(TryFinally* stmt) {
BAILOUT("TryFinally");
}
void StatementBuilder::VisitDebuggerStatement(DebuggerStatement* stmt) {
BAILOUT("DebuggerStatement");
}
#ifdef DEBUG
// CFG printing support (via depth-first, preorder block traversal).
void Cfg::Print() {
entry_->Print();
entry_->Unmark();
}
void Constant::Print() {
PrintF("Constant(");
handle_->Print();
PrintF(")");
}
void SlotLocation::Print() {
PrintF("Slot(");
switch (type_) {
case Slot::PARAMETER:
PrintF("PARAMETER, %d)", index_);
break;
case Slot::LOCAL:
PrintF("LOCAL, %d)", index_);
break;
default:
UNREACHABLE();
}
}
void ReturnInstr::Print() {
PrintF("Return ");
value_->Print();
PrintF("\n");
}
void InstructionBlock::Print() {
if (!is_marked_) {
is_marked_ = true;
PrintF("L%d:\n", number());
for (int i = 0, len = instructions_.length(); i < len; i++) {
instructions_[i]->Print();
}
PrintF("Goto L%d\n\n", successor_->number());
successor_->Print();
}
}
void EntryNode::Print() {
if (!is_marked_) {
is_marked_ = true;
successor_->Print();
}
}
void ExitNode::Print() {
if (!is_marked_) {
is_marked_ = true;
PrintF("L%d:\nExit\n\n", number());
}
}
#endif // DEBUG
} } // namespace v8::internal

385
deps/v8/src/cfg.h

@ -0,0 +1,385 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef V8_CFG_H_
#define V8_CFG_H_
#include "ast.h"
namespace v8 {
namespace internal {
class ExitNode;
// A convenient class to keep 'global' values when building a CFG. Since
// CFG construction can be invoked recursively, CFG globals are stacked.
class CfgGlobals BASE_EMBEDDED {
public:
explicit CfgGlobals(FunctionLiteral* fun);
~CfgGlobals() { top_ = previous_; }
static CfgGlobals* current() {
ASSERT(top_ != NULL);
return top_;
}
FunctionLiteral* fun() { return global_fun_; }
ExitNode* exit() { return global_exit_; }
#ifdef DEBUG
int next_number() { return node_counter_++; }
#endif
private:
static CfgGlobals* top_;
// Function literal currently compiling.
FunctionLiteral* global_fun_;
// Shared global exit node for all returns from the same function.
ExitNode* global_exit_;
#ifdef DEBUG
// Used to number nodes when printing.
int node_counter_;
#endif
CfgGlobals* previous_;
};
// Values appear in instructions. They represent trivial source
// expressions: ones with no side effects and that do not require code to be
// generated.
class Value : public ZoneObject {
public:
virtual ~Value() {}
virtual void ToRegister(MacroAssembler* masm, Register reg) = 0;
#ifdef DEBUG
virtual void Print() = 0;
#endif
};
// A compile-time constant that appeared as a literal in the source AST.
class Constant : public Value {
public:
explicit Constant(Handle<Object> handle) : handle_(handle) {}
virtual ~Constant() {}
void ToRegister(MacroAssembler* masm, Register reg);
#ifdef DEBUG
void Print();
#endif
private:
Handle<Object> handle_;
};
// Locations are values that can be stored into ('lvalues').
class Location : public Value {
public:
virtual ~Location() {}
virtual void ToRegister(MacroAssembler* masm, Register reg) = 0;
#ifdef DEBUG
virtual void Print() = 0;
#endif
};
// SlotLocations represent parameters and stack-allocated (i.e.,
// non-context) local variables.
class SlotLocation : public Location {
public:
SlotLocation(Slot::Type type, int index) : type_(type), index_(index) {}
void ToRegister(MacroAssembler* masm, Register reg);
#ifdef DEBUG
void Print();
#endif
private:
Slot::Type type_;
int index_;
};
// Instructions are computations. The represent non-trivial source
// expressions: typically ones that have side effects and require code to
// be generated.
class Instruction : public ZoneObject {
public:
virtual ~Instruction() {}
virtual void Compile(MacroAssembler* masm) = 0;
#ifdef DEBUG
virtual void Print() = 0;
#endif
};
// Return a value.
class ReturnInstr : public Instruction {
public:
explicit ReturnInstr(Value* value) : value_(value) {}
virtual ~ReturnInstr() {}
void Compile(MacroAssembler* masm);
#ifdef DEBUG
void Print();
#endif
private:
Value* value_;
};
// Nodes make up control-flow graphs. They contain single-entry,
// single-exit blocks of instructions and administrative nodes making up the
// graph structure.
class CfgNode : public ZoneObject {
public:
CfgNode() : is_marked_(false) {
#ifdef DEBUG
number_ = -1;
#endif
}
virtual ~CfgNode() {}
bool is_marked() { return is_marked_; }
virtual bool is_block() { return false; }
virtual void Unmark() = 0;
virtual void Compile(MacroAssembler* masm) = 0;
#ifdef DEBUG
int number() {
if (number_ == -1) number_ = CfgGlobals::current()->next_number();
return number_;
}
virtual void Print() = 0;
#endif
protected:
bool is_marked_;
#ifdef DEBUG
int number_;
#endif
};
// A block is a single-entry, single-exit block of instructions.
class InstructionBlock : public CfgNode {
public:
InstructionBlock() : successor_(NULL), instructions_(4) {}
virtual ~InstructionBlock() {}
static InstructionBlock* cast(CfgNode* node) {
ASSERT(node->is_block());
return reinterpret_cast<InstructionBlock*>(node);
}
void set_successor(CfgNode* succ) {
ASSERT(successor_ == NULL);
successor_ = succ;
}
bool is_block() { return true; }
void Unmark();
void Compile(MacroAssembler* masm);
void Append(Instruction* instr) { instructions_.Add(instr); }
#ifdef DEBUG
void Print();
#endif
private:
CfgNode* successor_;
ZoneList<Instruction*> instructions_;
};
// The CFG for a function has a distinguished entry node. It has no
// predecessors and a single successor. The successor is the block
// containing the function's first instruction.
class EntryNode : public CfgNode {
public:
explicit EntryNode(InstructionBlock* succ) : successor_(succ) {}
virtual ~EntryNode() {}
void Unmark();
void Compile(MacroAssembler* masm);
#ifdef DEBUG
void Print();
#endif
private:
InstructionBlock* successor_;
};
// The CFG for a function has a distinguished exit node. It has no
// successor and arbitrarily many predecessors. The predecessors are all
// the blocks returning from the function.
class ExitNode : public CfgNode {
public:
ExitNode() {}
virtual ~ExitNode() {}
void Unmark();
void Compile(MacroAssembler* masm);
#ifdef DEBUG
void Print();
#endif
};
// A CFG consists of a linked structure of nodes. It has a single entry
// node and optionally an exit node. There is a distinguished global exit
// node that is used as the successor of all blocks that return from the
// function.
//
// Fragments of control-flow graphs, produced when traversing the statements
// and expressions in the source AST, are represented by the same class.
// They have instruction blocks as both their entry and exit (if there is
// one). Instructions can always be prepended or appended to fragments, and
// fragments can always be concatenated.
//
// A singleton CFG fragment (i.e., with only one node) has the same node as
// both entry and exit (if the exit is available).
class Cfg : public ZoneObject {
public:
// Create a singleton CFG fragment.
explicit Cfg(InstructionBlock* block) : entry_(block), exit_(block) {}
// Build the CFG for a function.
static Cfg* Build();
// The entry and exit nodes.
CfgNode* entry() { return entry_; }
CfgNode* exit() { return exit_; }
// True if the CFG has no nodes.
bool is_empty() { return entry_ == NULL; }
// True if the CFG has an available exit node (i.e., it can be appended or
// concatenated to).
bool has_exit() { return exit_ != NULL; }
// Add an entry node to a CFG fragment. It is no longer a fragment
// (instructions cannot be prepended).
void PrependEntryNode();
// Append an instruction to the end of a CFG fragment. Assumes it has an
// available exit.
void Append(Instruction* instr);
// Appends a return instruction to the end of a CFG fragment. It no
// longer has an available exit node.
void AppendReturnInstruction(Value* value);
Handle<Code> Compile(Handle<Script> script);
#ifdef DEBUG
// Support for printing.
void Print();
#endif
private:
// Entry and exit nodes.
CfgNode* entry_;
CfgNode* exit_;
};
// An Expression Builder traverses a trivial expression and returns a value.
class ExpressionBuilder : public AstVisitor {
public:
ExpressionBuilder() : value_(new Constant(Handle<Object>::null())) {}
Value* value() { return value_; }
// AST node visitors.
#define DECLARE_VISIT(type) void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
private:
Value* value_;
};
// A StatementBuilder traverses a statement and returns a CFG.
class StatementBuilder : public AstVisitor {
public:
StatementBuilder() : cfg_(new Cfg(new InstructionBlock())) {}
Cfg* cfg() { return cfg_; }
void VisitStatements(ZoneList<Statement*>* stmts);
// AST node visitors.
#define DECLARE_VISIT(type) void Visit##type(type* node);
AST_NODE_LIST(DECLARE_VISIT)
#undef DECLARE_VISIT
private:
Cfg* cfg_;
};
} } // namespace v8::internal
#endif // V8_CFG_H_

17
deps/v8/src/compiler.cc

@ -28,6 +28,7 @@
#include "v8.h"
#include "bootstrapper.h"
#include "cfg.h"
#include "codegen-inl.h"
#include "compilation-cache.h"
#include "compiler.h"
@ -78,6 +79,22 @@ static Handle<Code> MakeCode(FunctionLiteral* literal,
return Handle<Code>::null();
}
if (FLAG_multipass) {
CfgGlobals scope(literal);
Cfg* cfg = Cfg::Build();
#ifdef DEBUG
if (FLAG_print_cfg && cfg != NULL) {
SmartPointer<char> name = literal->name()->ToCString();
PrintF("Function \"%s\":\n", *name);
cfg->Print();
PrintF("\n");
}
#endif
if (cfg != NULL) {
return cfg->Compile(script);
}
}
// Generate code and return it.
Handle<Code> result = CodeGenerator::MakeCode(literal, script, is_eval);
return result;

10
deps/v8/src/date-delay.js

@ -156,6 +156,7 @@ var DST_offset_cache = {
// NOTE: The implementation relies on the fact that no time zones have
// more than one daylight savings offset change per month.
// If this function is called with NaN it returns NaN.
function DaylightSavingsOffset(t) {
// Load the cache object from the builtins object.
var cache = DST_offset_cache;
@ -219,6 +220,7 @@ var timezone_cache_time = $NaN;
var timezone_cache_timezone;
function LocalTimezone(t) {
if (NUMBER_IS_NAN(t)) return "";
if (t == timezone_cache_time) {
return timezone_cache_timezone;
}
@ -464,10 +466,12 @@ var Date_cache = {
value = cache.time;
} else {
value = DateParse(year);
if (!NUMBER_IS_NAN(value)) {
cache.time = value;
cache.year = YearFromTime(LocalTimeNoCheck(value));
cache.string = year;
}
}
} else {
// According to ECMA 262, no hint should be given for this
@ -647,11 +651,13 @@ function TimeString(time) {
function LocalTimezoneString(time) {
var timezoneOffset = (local_time_offset + DaylightSavingsOffset(time)) / msPerMinute;
var timezoneOffset =
(local_time_offset + DaylightSavingsOffset(time)) / msPerMinute;
var sign = (timezoneOffset >= 0) ? 1 : -1;
var hours = FLOOR((sign * timezoneOffset)/60);
var min = FLOOR((sign * timezoneOffset)%60);
var gmt = ' GMT' + ((sign == 1) ? '+' : '-') + TwoDigitString(hours) + TwoDigitString(min);
var gmt = ' GMT' + ((sign == 1) ? '+' : '-') +
TwoDigitString(hours) + TwoDigitString(min);
return gmt + ' (' + LocalTimezone(time) + ')';
}

2
deps/v8/src/execution.h

@ -205,7 +205,7 @@ class StackGuard BASE_EMBEDDED {
static void EnableInterrupts();
static void DisableInterrupts();
static const uintptr_t kLimitSize = 512 * KB;
static const uintptr_t kLimitSize = kPointerSize * 128 * KB;
static const uintptr_t kInterruptLimit = 0xfffffffe;
static const uintptr_t kIllegalLimit = 0xffffffff;

2
deps/v8/src/flag-definitions.h

@ -133,6 +133,7 @@ DEFINE_bool(debug_info, true, "add debug information to compiled functions")
DEFINE_bool(strict, false, "strict error checking")
DEFINE_int(min_preparse_length, 1024,
"Minimum length for automatic enable preparsing")
DEFINE_bool(multipass, false, "use the multipass code generator")
// compilation-cache.cc
DEFINE_bool(compilation_cache, true, "enable compilation cache")
@ -267,6 +268,7 @@ DEFINE_string(stop_at, "", "function name where to insert a breakpoint")
// compiler.cc
DEFINE_bool(print_builtin_scopes, false, "print scopes for builtins")
DEFINE_bool(print_scopes, false, "print scopes")
DEFINE_bool(print_cfg, false, "print control-flow graph")
// contexts.cc
DEFINE_bool(trace_contexts, false, "trace contexts operations")

2
deps/v8/src/heap-inl.h

@ -159,9 +159,7 @@ void Heap::RecordWrite(Address address, int offset) {
if (new_space_.Contains(address)) return;
ASSERT(!new_space_.FromSpaceContains(address));
SLOW_ASSERT(Contains(address + offset));
#ifndef V8_HOST_ARCH_64_BIT
Page::SetRSet(address, offset);
#endif // V8_HOST_ARCH_64_BIT
}

27
deps/v8/src/heap.cc

@ -681,33 +681,11 @@ void Heap::Scavenge() {
// Copy objects reachable from weak pointers.
GlobalHandles::IterateWeakRoots(&scavenge_visitor);
#ifdef V8_HOST_ARCH_64_BIT
// TODO(X64): Make this go away again. We currently disable RSets for
// 64-bit-mode.
HeapObjectIterator old_pointer_iterator(old_pointer_space_);
while (old_pointer_iterator.has_next()) {
HeapObject* heap_object = old_pointer_iterator.next();
heap_object->Iterate(&scavenge_visitor);
}
HeapObjectIterator map_iterator(map_space_);
while (map_iterator.has_next()) {
HeapObject* heap_object = map_iterator.next();
heap_object->Iterate(&scavenge_visitor);
}
LargeObjectIterator lo_iterator(lo_space_);
while (lo_iterator.has_next()) {
HeapObject* heap_object = lo_iterator.next();
if (heap_object->IsFixedArray()) {
heap_object->Iterate(&scavenge_visitor);
}
}
#else // !defined(V8_HOST_ARCH_64_BIT)
// Copy objects reachable from the old generation. By definition,
// there are no intergenerational pointers in code or data spaces.
IterateRSet(old_pointer_space_, &ScavengePointer);
IterateRSet(map_space_, &ScavengePointer);
lo_space_->IterateRSet(&ScavengePointer);
#endif
// Copy objects reachable from cells by scavenging cell values directly.
HeapObjectIterator cell_iterator(cell_space_);
@ -830,13 +808,11 @@ class UpdateRSetVisitor: public ObjectVisitor {
int Heap::UpdateRSet(HeapObject* obj) {
#ifndef V8_HOST_ARCH_64_BIT
// TODO(X64) Reenable RSet when we have a working 64-bit layout of Page.
ASSERT(!InNewSpace(obj));
// Special handling of fixed arrays to iterate the body based on the start
// address and offset. Just iterating the pointers as in UpdateRSetVisitor
// will not work because Page::SetRSet needs to have the start of the
// object.
// object for large object pages.
if (obj->IsFixedArray()) {
FixedArray* array = FixedArray::cast(obj);
int length = array->length();
@ -853,7 +829,6 @@ int Heap::UpdateRSet(HeapObject* obj) {
UpdateRSetVisitor v;
obj->Iterate(&v);
}
#endif // V8_HOST_ARCH_64_BIT
return obj->Size();
}

4
deps/v8/src/heap.h

@ -257,7 +257,7 @@ class Heap : public AllStatic {
// address with the mask will result in the start address of the new space
// for all addresses in either semispace.
static Address NewSpaceStart() { return new_space_.start(); }
static uint32_t NewSpaceMask() { return new_space_.mask(); }
static uintptr_t NewSpaceMask() { return new_space_.mask(); }
static Address NewSpaceTop() { return new_space_.top(); }
static NewSpace* new_space() { return &new_space_; }
@ -1123,11 +1123,9 @@ class VerifyPointersAndRSetVisitor: public ObjectVisitor {
HeapObject* object = HeapObject::cast(*current);
ASSERT(Heap::Contains(object));
ASSERT(object->map()->IsMap());
#ifndef V8_TARGET_ARCH_X64
if (Heap::InNewSpace(object)) {
ASSERT(Page::IsRSetSet(reinterpret_cast<Address>(current), 0));
}
#endif
}
}
}

4
deps/v8/src/ia32/assembler-ia32.h

@ -226,7 +226,9 @@ enum ScaleFactor {
times_1 = 0,
times_2 = 1,
times_4 = 2,
times_8 = 3
times_8 = 3,
times_pointer_size = times_4,
times_half_pointer_size = times_2
};

137
deps/v8/src/ia32/cfg-ia32.cc

@ -0,0 +1,137 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "cfg.h"
#include "codegen-inl.h"
#include "macro-assembler-ia32.h"
namespace v8 {
namespace internal {
#define __ ACCESS_MASM(masm)
void InstructionBlock::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
{
Comment cmt(masm, "[ InstructionBlock");
for (int i = 0, len = instructions_.length(); i < len; i++) {
instructions_[i]->Compile(masm);
}
}
successor_->Compile(masm);
}
void EntryNode::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
Label deferred_enter, deferred_exit;
{
Comment cmnt(masm, "[ EntryNode");
__ push(ebp);
__ mov(ebp, esp);
__ push(esi);
__ push(edi);
int count = CfgGlobals::current()->fun()->scope()->num_stack_slots();
if (count > 0) {
__ Set(eax, Immediate(Factory::undefined_value()));
for (int i = 0; i < count; i++) {
__ push(eax);
}
}
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceEnter, 0);
}
if (FLAG_check_stack) {
ExternalReference stack_limit =
ExternalReference::address_of_stack_guard_limit();
__ cmp(esp, Operand::StaticVariable(stack_limit));
__ j(below, &deferred_enter);
__ bind(&deferred_exit);
}
}
successor_->Compile(masm);
if (FLAG_check_stack) {
__ bind(&deferred_enter);
StackCheckStub stub;
__ CallStub(&stub);
__ jmp(&deferred_exit);
}
}
void ExitNode::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
Comment cmnt(masm, "[ ExitNode");
if (FLAG_trace) {
__ push(eax);
__ CallRuntime(Runtime::kTraceExit, 1);
}
__ RecordJSReturn();
__ mov(esp, ebp);
__ pop(ebp);
int count = CfgGlobals::current()->fun()->scope()->num_parameters();
__ ret((count + 1) * kPointerSize);
}
void ReturnInstr::Compile(MacroAssembler* masm) {
Comment cmnt(masm, "[ ReturnInstr");
value_->ToRegister(masm, eax);
}
void Constant::ToRegister(MacroAssembler* masm, Register reg) {
__ mov(reg, Immediate(handle_));
}
void SlotLocation::ToRegister(MacroAssembler* masm, Register reg) {
switch (type_) {
case Slot::PARAMETER: {
int count = CfgGlobals::current()->fun()->scope()->num_parameters();
__ mov(reg, Operand(ebp, (1 + count - index_) * kPointerSize));
break;
}
case Slot::LOCAL: {
const int kOffset = JavaScriptFrameConstants::kLocal0Offset;
__ mov(reg, Operand(ebp, kOffset - index_ * kPointerSize));
break;
}
default:
UNREACHABLE();
}
}
#undef __
} } // namespace v8::internal

7
deps/v8/src/ia32/codegen-ia32.cc

@ -5154,11 +5154,10 @@ void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
ASSERT(args->length() == 0);
ASSERT(kSmiTagSize == 1 && kSmiTag == 0); // shifting code depends on this
ASSERT(kSmiTag == 0); // EBP value is aligned, so it should look like Smi.
Result ebp_as_smi = allocator_->Allocate();
ASSERT(ebp_as_smi.is_valid());
__ mov(ebp_as_smi.reg(), Operand(ebp));
__ shr(ebp_as_smi.reg(), kSmiTagSize);
frame_->Push(&ebp_as_smi);
}
@ -7786,7 +7785,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// If this is the outermost JS call, set js_entry_sp value.
ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
__ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
__ j(NegateCondition(equal), &not_outermost_js);
__ j(not_equal, &not_outermost_js);
__ mov(Operand::StaticVariable(js_entry_sp), ebp);
__ bind(&not_outermost_js);
#endif
@ -7837,7 +7836,7 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// If current EBP value is the same as js_entry_sp value, it means that
// the current function is the outermost.
__ cmp(ebp, Operand::StaticVariable(js_entry_sp));
__ j(NegateCondition(equal), &not_outermost_js_2);
__ j(not_equal, &not_outermost_js_2);
__ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
__ bind(&not_outermost_js_2);
#endif

2
deps/v8/src/ia32/codegen-ia32.h

@ -603,7 +603,7 @@ class CodeGenerator: public AstVisitor {
friend class Reference;
friend class Result;
friend class CodeGeneratorPatcher; // Used in test-log-ia32.cc
friend class CodeGeneratorPatcher; // Used in test-log-stack-tracer.cc
DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
};

43
deps/v8/src/ia32/macro-assembler-ia32.cc

@ -146,43 +146,30 @@ void MacroAssembler::RecordWrite(Register object, int offset,
// for the remembered set bits.
Label done;
// This optimization cannot survive serialization and deserialization,
// so we disable as long as serialization can take place.
int32_t new_space_start =
reinterpret_cast<int32_t>(ExternalReference::new_space_start().address());
if (Serializer::enabled() || new_space_start < 0) {
// Cannot do smart bit-twiddling. Need to do two consecutive checks.
// Check for Smi first.
// Skip barrier if writing a smi.
ASSERT_EQ(0, kSmiTag);
test(value, Immediate(kSmiTagMask));
j(zero, &done);
// Test that the object address is not in the new space. We cannot
// set remembered set bits in the new space.
if (Serializer::enabled()) {
// Can't do arithmetic on external references if it might get serialized.
mov(value, Operand(object));
and_(value, Heap::NewSpaceMask());
cmp(Operand(value), Immediate(ExternalReference::new_space_start()));
j(equal, &done);
} else {
// move the value SmiTag into the sign bit
shl(value, 31);
// combine the object with value SmiTag
or_(value, Operand(object));
// remove the uninteresing bits inside the page
and_(value, Heap::NewSpaceMask() | (1 << 31));
// xor has two effects:
// - if the value was a smi, then the result will be negative
// - if the object is pointing into new space area the page bits will
// all be zero
xor_(value, new_space_start | (1 << 31));
// Check for both conditions in one branch
j(less_equal, &done);
int32_t new_space_start = reinterpret_cast<int32_t>(
ExternalReference::new_space_start().address());
lea(value, Operand(object, -new_space_start));
and_(value, Heap::NewSpaceMask());
j(equal, &done);
}
if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
// Compute the bit offset in the remembered set, leave it in 'value'.
mov(value, Operand(object));
lea(value, Operand(object, offset));
and_(value, Page::kPageAlignmentMask);
add(Operand(value), Immediate(offset));
shr(value, kObjectAlignmentBits);
shr(value, kPointerSizeLog2);
// Compute the page address from the heap object pointer, leave it in
// 'object'.
@ -192,7 +179,7 @@ void MacroAssembler::RecordWrite(Register object, int offset,
// to limit code size. We should probably evaluate this decision by
// measuring the performance of an equivalent implementation using
// "simpler" instructions
bts(Operand(object, 0), value);
bts(Operand(object, Page::kRSetOffset), value);
} else {
Register dst = scratch;
if (offset != 0) {
@ -201,7 +188,9 @@ void MacroAssembler::RecordWrite(Register object, int offset,
// array access: calculate the destination address in the same manner as
// KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
// into an array of words.
lea(dst, Operand(object, dst, times_2,
ASSERT_EQ(1, kSmiTagSize);
ASSERT_EQ(0, kSmiTag);
lea(dst, Operand(object, dst, times_half_pointer_size,
FixedArray::kHeaderSize - kHeapObjectTag));
}
// If we are already generating a shared stub, not inlining the

47
deps/v8/src/log.cc

@ -957,15 +957,25 @@ void Logger::TickEvent(TickSample* sample, bool overflow) {
}
bool Logger::IsProfilerPaused() {
return profiler_->paused();
int Logger::GetActiveProfilerModules() {
int result = PROFILER_MODULE_NONE;
if (!profiler_->paused()) {
result |= PROFILER_MODULE_CPU;
}
if (FLAG_log_gc) {
result |= PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS;
}
return result;
}
void Logger::PauseProfiler() {
if (profiler_->paused()) {
return;
}
void Logger::PauseProfiler(int flags) {
if (!Log::IsEnabled()) return;
const int active_modules = GetActiveProfilerModules();
const int modules_to_disable = active_modules & flags;
if (modules_to_disable == PROFILER_MODULE_NONE) return;
if (modules_to_disable & PROFILER_MODULE_CPU) {
profiler_->pause();
if (FLAG_prof_lazy) {
if (!FLAG_sliding_state_window) ticker_->Stop();
@ -973,15 +983,25 @@ void Logger::PauseProfiler() {
// Must be the same message as Log::kDynamicBufferSeal.
LOG(UncheckedStringEvent("profiler", "pause"));
}
}
if (modules_to_disable &
(PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
FLAG_log_gc = false;
}
// Turn off logging if no active modules remain.
if ((active_modules & ~flags) == PROFILER_MODULE_NONE) {
is_logging_ = false;
}
}
void Logger::ResumeProfiler() {
if (!profiler_->paused() || !Log::IsEnabled()) {
return;
}
void Logger::ResumeProfiler(int flags) {
if (!Log::IsEnabled()) return;
const int modules_to_enable = ~GetActiveProfilerModules() & flags;
if (modules_to_enable != PROFILER_MODULE_NONE) {
is_logging_ = true;
}
if (modules_to_enable & PROFILER_MODULE_CPU) {
if (FLAG_prof_lazy) {
LOG(UncheckedStringEvent("profiler", "resume"));
FLAG_log_code = true;
@ -989,6 +1009,11 @@ void Logger::ResumeProfiler() {
if (!FLAG_sliding_state_window) ticker_->Start();
}
profiler_->resume();
}
if (modules_to_enable &
(PROFILER_MODULE_HEAP_STATS | PROFILER_MODULE_JS_CONSTRUCTORS)) {
FLAG_log_gc = true;
}
}
@ -996,7 +1021,7 @@ void Logger::ResumeProfiler() {
// either from main or Profiler's thread.
void Logger::StopLoggingAndProfiling() {
Log::stop();
PauseProfiler();
PauseProfiler(PROFILER_MODULE_CPU);
}

8
deps/v8/src/log.h

@ -249,11 +249,11 @@ class Logger {
}
// Pause/Resume collection of profiling data.
// When data collection is paused, Tick events are discarded until
// When data collection is paused, CPU Tick events are discarded until
// data collection is Resumed.
static bool IsProfilerPaused();
static void PauseProfiler();
static void ResumeProfiler();
static void PauseProfiler(int flags);
static void ResumeProfiler(int flags);
static int GetActiveProfilerModules();
// If logging is performed into a memory buffer, allows to
// retrieve previously written messages. See v8.h.

2
deps/v8/src/platform-nullos.cc

@ -80,7 +80,7 @@ int64_t OS::Ticks() {
// Returns a string identifying the current timezone taking into
// account daylight saving.
char* OS::LocalTimezone(double time) {
const char* OS::LocalTimezone(double time) {
UNIMPLEMENTED();
return "<none>";
}

8
deps/v8/src/platform-posix.cc

@ -86,16 +86,20 @@ int64_t OS::Ticks() {
}
char* OS::LocalTimezone(double time) {
const char* OS::LocalTimezone(double time) {
if (isnan(time)) return "";
time_t tv = static_cast<time_t>(floor(time/msPerSecond));
struct tm* t = localtime(&tv);
return const_cast<char*>(t->tm_zone);
if (NULL == t) return "";
return t->tm_zone;
}
double OS::DaylightSavingsOffset(double time) {
if (isnan(time)) return nan_value();
time_t tv = static_cast<time_t>(floor(time/msPerSecond));
struct tm* t = localtime(&tv);
if (NULL == t) return nan_value();
return t->tm_isdst > 0 ? 3600 * msPerSecond : 0;
}

2
deps/v8/src/platform-win32.cc

@ -603,7 +603,7 @@ int64_t OS::Ticks() {
// Returns a string identifying the current timezone taking into
// account daylight saving.
char* OS::LocalTimezone(double time) {
const char* OS::LocalTimezone(double time) {
return Time(time).LocalTimezone();
}

2
deps/v8/src/platform.h

@ -143,7 +143,7 @@ class OS {
// Returns a string identifying the current time zone. The
// timestamp is used for determining if DST is in effect.
static char* LocalTimezone(double time);
static const char* LocalTimezone(double time);
// Returns the local time offset in milliseconds east of UTC without
// taking daylight savings time into account.

2
deps/v8/src/runtime.cc

@ -4920,7 +4920,7 @@ static Object* Runtime_DateLocalTimezone(Arguments args) {
ASSERT(args.length() == 1);
CONVERT_DOUBLE_CHECKED(x, args[0]);
char* zone = OS::LocalTimezone(x);
const char* zone = OS::LocalTimezone(x);
return Heap::AllocateStringFromUtf8(CStrVector(zone));
}

6
deps/v8/src/spaces-inl.h

@ -103,9 +103,9 @@ void Page::ClearRSet() {
// The address of the rset word containing the bit for this word is computed as:
// page_address + words * 4
// For a 64-bit address, if it is:
// | page address | quadwords(5) | bit offset(5) | pointer alignment (3) |
// | page address | words(5) | bit offset(5) | pointer alignment (3) |
// The address of the rset word containing the bit for this word is computed as:
// page_address + quadwords * 4 + kRSetOffset.
// page_address + words * 4 + kRSetOffset.
// The rset is accessed as 32-bit words, and bit offsets in a 32-bit word,
// even on the X64 architecture.
@ -115,7 +115,7 @@ Address Page::ComputeRSetBitPosition(Address address, int offset,
Page* page = Page::FromAddress(address);
uint32_t bit_offset = ArithmeticShiftRight(page->Offset(address) + offset,
kObjectAlignmentBits);
kPointerSizeLog2);
*bitmask = 1 << (bit_offset % kBitsPerInt);
Address rset_address =

23
deps/v8/src/spaces.h

@ -99,8 +99,11 @@ class AllocationInfo;
// its page offset by 32. Therefore, the object area in a page starts at the
// 256th byte (8K/32). Bytes 0 to 255 do not need the remembered set, so that
// the first two words (64 bits) in a page can be used for other purposes.
// TODO(X64): This description only represents the 32-bit layout.
// On the 64-bit platform, we add an offset to the start of the remembered set.
//
// On the 64-bit platform, we add an offset to the start of the remembered set,
// and pointers are aligned to 8-byte pointer size. This means that we need
// only 128 bytes for the RSet, and only get two bytes free in the RSet's RSet.
// For this reason we add an offset to get room for the Page data at the start.
//
// The mark-compact collector transforms a map pointer into a page index and a
// page offset. The map space can have up to 1024 pages, and 8M bytes (1024 *
@ -118,7 +121,7 @@ class Page {
// from [page_addr .. page_addr + kPageSize[
//
// Note that this function only works for addresses in normal paged
// spaces and addresses in the first 8K of large object pages (ie,
// spaces and addresses in the first 8K of large object pages (i.e.,
// the start of large objects but not necessarily derived pointers
// within them).
INLINE(static Page* FromAddress(Address a)) {
@ -218,7 +221,7 @@ class Page {
// Page size mask.
static const intptr_t kPageAlignmentMask = (1 << kPageSizeBits) - 1;
// The offset of the remembered set in a page, in addition to the empty words
// The offset of the remembered set in a page, in addition to the empty bytes
// formed as the remembered bits of the remembered set itself.
#ifdef V8_TARGET_ARCH_X64
static const int kRSetOffset = 4 * kPointerSize; // Room for four pointers.
@ -234,7 +237,7 @@ class Page {
// to align start of rset to a uint32_t address.
static const int kObjectStartOffset = 256;
// The start offset of the remembered set in a page.
// The start offset of the used part of the remembered set in a page.
static const int kRSetStartOffset = kRSetOffset +
kObjectStartOffset / kBitsPerPointer;
@ -264,16 +267,16 @@ class Page {
// low-order bit for large object pages will be cleared.
int is_normal_page;
// The following fields overlap with remembered set, they can only
// The following fields may overlap with remembered set, they can only
// be used in the mark-compact collector when remembered set is not
// used.
// The allocation pointer after relocating objects to this page.
Address mc_relocation_top;
// The index of the page in its owner space.
int mc_page_index;
// The allocation pointer after relocating objects to this page.
Address mc_relocation_top;
// The forwarding address of the first live object in this page.
Address mc_first_forwarded;
@ -1165,7 +1168,7 @@ class NewSpace : public Space {
// The start address of the space and a bit mask. Anding an address in the
// new space with the mask will result in the start address.
Address start() { return start_; }
uint32_t mask() { return address_mask_; }
uintptr_t mask() { return address_mask_; }
// The allocation top and limit addresses.
Address* allocation_top_address() { return &allocation_info_.top; }

9
deps/v8/src/string-stream.cc

@ -153,7 +153,7 @@ void StringStream::Add(Vector<const char> format, Vector<FmtElm> elms) {
}
break;
}
case 'i': case 'd': case 'u': case 'x': case 'c': case 'p': case 'X': {
case 'i': case 'd': case 'u': case 'x': case 'c': case 'X': {
int value = current.data_.u_int_;
EmbeddedVector<char, 24> formatted;
int length = OS::SNPrintF(formatted, temp.start(), value);
@ -167,6 +167,13 @@ void StringStream::Add(Vector<const char> format, Vector<FmtElm> elms) {
Add(formatted.start());
break;
}
case 'p': {
void* value = current.data_.u_pointer_;
EmbeddedVector<char, 20> formatted;
OS::SNPrintF(formatted, temp.start(), value);
Add(formatted.start());
break;
}
default:
UNREACHABLE();
break;

16
deps/v8/src/string-stream.h

@ -90,21 +90,12 @@ class FmtElm {
FmtElm(Handle<Object> value) : type_(HANDLE) { // NOLINT
data_.u_handle_ = value.location();
}
FmtElm(void* value) : type_(INT) { // NOLINT
#if V8_HOST_ARCH_64_BIT
// TODO(x64): FmtElm needs to treat pointers as pointers, and not as
// ints. This will require adding a pointer type, etc. For now just
// hack it and truncate the pointer.
// http://code.google.com/p/v8/issues/detail?id=335
data_.u_int_ = 0;
UNIMPLEMENTED();
#else
data_.u_int_ = reinterpret_cast<int>(value);
#endif
FmtElm(void* value) : type_(POINTER) { // NOLINT
data_.u_pointer_ = value;
}
private:
friend class StringStream;
enum Type { INT, DOUBLE, C_STR, LC_STR, OBJ, HANDLE };
enum Type { INT, DOUBLE, C_STR, LC_STR, OBJ, HANDLE, POINTER };
Type type_;
union {
int u_int_;
@ -113,6 +104,7 @@ class FmtElm {
const Vector<const uc16>* u_lc_str_;
Object* u_obj_;
Object** u_handle_;
void* u_pointer_;
} data_;
};

4
deps/v8/src/variables.h

@ -162,9 +162,7 @@ class Variable: public ZoneObject {
// be the global scope). scope() is NULL in that case. Currently the
// scope is only used to follow the context chain length.
Scope* scope() const { return scope_; }
// If this assertion fails it means that some code has tried to
// treat the special this variable as an ordinary variable with
// the name "this".
Handle<String> name() const { return name_; }
Mode mode() const { return mode_; }
bool is_accessed_from_inner_scope() const {

4
deps/v8/src/version.cc

@ -34,8 +34,8 @@
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 1
#define MINOR_VERSION 3
#define BUILD_NUMBER 1
#define PATCH_LEVEL 1
#define BUILD_NUMBER 2
#define PATCH_LEVEL 0
#define CANDIDATE_VERSION false
// Define SONAME to have the SCons build the put a specific SONAME into the

14
deps/v8/src/x64/assembler-x64.h

@ -44,15 +44,25 @@ namespace internal {
// Test whether a 64-bit value is in a specific range.
static inline bool is_uint32(int64_t x) {
const int64_t kUInt32Mask = V8_INT64_C(0xffffffff);
static const int64_t kUInt32Mask = V8_INT64_C(0xffffffff);
return x == (x & kUInt32Mask);
}
static inline bool is_int32(int64_t x) {
const int64_t kMinIntValue = V8_INT64_C(-0x80000000);
static const int64_t kMinIntValue = V8_INT64_C(-0x80000000);
return is_uint32(x - kMinIntValue);
}
static inline bool uint_is_int32(uint64_t x) {
static const uint64_t kMaxIntValue = V8_UINT64_C(0x80000000);
return x < kMaxIntValue;
}
static inline bool is_uint32(uint64_t x) {
static const uint64_t kMaxUIntValue = V8_UINT64_C(0x100000000);
return x < kMaxUIntValue;
}
// CPU Registers.
//
// 1) We would prefer to use an enum, but enum values are assignment-

146
deps/v8/src/x64/cfg-x64.cc

@ -0,0 +1,146 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "cfg.h"
#include "codegen-inl.h"
#include "debug.h"
#include "macro-assembler-x64.h"
namespace v8 {
namespace internal {
#define __ ACCESS_MASM(masm)
void InstructionBlock::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
{
Comment cmt(masm, "[ InstructionBlock");
for (int i = 0, len = instructions_.length(); i < len; i++) {
instructions_[i]->Compile(masm);
}
}
successor_->Compile(masm);
}
void EntryNode::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
Label deferred_enter, deferred_exit;
{
Comment cmnt(masm, "[ EntryNode");
__ push(rbp);
__ movq(rbp, rsp);
__ push(rsi);
__ push(rdi);
int count = CfgGlobals::current()->fun()->scope()->num_stack_slots();
if (count > 0) {
__ movq(kScratchRegister, Factory::undefined_value(),
RelocInfo::EMBEDDED_OBJECT);
for (int i = 0; i < count; i++) {
__ push(kScratchRegister);
}
}
if (FLAG_trace) {
__ CallRuntime(Runtime::kTraceEnter, 0);
}
if (FLAG_check_stack) {
ExternalReference stack_limit =
ExternalReference::address_of_stack_guard_limit();
__ movq(kScratchRegister, stack_limit);
__ cmpq(rsp, Operand(kScratchRegister, 0));
__ j(below, &deferred_enter);
__ bind(&deferred_exit);
}
}
successor_->Compile(masm);
if (FLAG_check_stack) {
__ bind(&deferred_enter);
StackCheckStub stub;
__ CallStub(&stub);
__ jmp(&deferred_exit);
}
}
void ExitNode::Compile(MacroAssembler* masm) {
ASSERT(!is_marked());
is_marked_ = true;
Comment cmnt(masm, "[ ExitNode");
if (FLAG_trace) {
__ push(rax);
__ CallRuntime(Runtime::kTraceExit, 1);
}
__ RecordJSReturn();
__ movq(rsp, rbp);
__ pop(rbp);
int count = CfgGlobals::current()->fun()->scope()->num_parameters();
__ ret((count + 1) * kPointerSize);
// Add padding that will be overwritten by a debugger breakpoint.
// "movq rsp, rbp; pop rbp" has length 5. "ret k" has length 2.
const int kPadding = Debug::kX64JSReturnSequenceLength - 5 - 2;
for (int i = 0; i < kPadding; ++i) {
__ int3();
}
}
void ReturnInstr::Compile(MacroAssembler* masm) {
Comment cmnt(masm, "[ ReturnInstr");
value_->ToRegister(masm, rax);
}
void Constant::ToRegister(MacroAssembler* masm, Register reg) {
__ Move(reg, handle_);
}
void SlotLocation::ToRegister(MacroAssembler* masm, Register reg) {
switch (type_) {
case Slot::PARAMETER: {
int count = CfgGlobals::current()->fun()->scope()->num_parameters();
__ movq(reg, Operand(rbp, (1 + count - index_) * kPointerSize));
break;
}
case Slot::LOCAL: {
const int kOffset = JavaScriptFrameConstants::kLocal0Offset;
__ movq(reg, Operand(rbp, kOffset - index_ * kPointerSize));
break;
}
default:
UNREACHABLE();
}
}
#undef __
} } // namespace v8::internal

64
deps/v8/src/x64/codegen-x64.cc

@ -3421,9 +3421,20 @@ void CodeGenerator::GenerateObjectEquals(ZoneList<Expression*>* args) {
}
void CodeGenerator::GenerateGetFramePointer(ZoneList<Expression*>* args) {
ASSERT(args->length() == 0);
ASSERT(kSmiTag == 0); // RBP value is aligned, so it should look like Smi.
Result rbp_as_smi = allocator_->Allocate();
ASSERT(rbp_as_smi.is_valid());
__ movq(rbp_as_smi.reg(), rbp);
frame_->Push(&rbp_as_smi);
}
void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* args) {
ASSERT(args->length() == 0);
frame_->SpillAll();
__ push(rsi);
// Make sure the frame is aligned like the OS expects.
static const int kFrameAlignment = OS::ActivationFrameAlignment();
@ -3436,11 +3447,12 @@ void CodeGenerator::GenerateRandomPositiveSmi(ZoneList<Expression*>* args) {
// Call V8::RandomPositiveSmi().
__ Call(FUNCTION_ADDR(V8::RandomPositiveSmi), RelocInfo::RUNTIME_ENTRY);
// Restore stack pointer from callee-saved register edi.
// Restore stack pointer from callee-saved register.
if (kFrameAlignment > 0) {
__ movq(rsp, rbx);
}
__ pop(rsi);
Result result = allocator_->Allocate(rax);
frame_->Push(&result);
}
@ -5555,13 +5567,16 @@ void Reference::TakeValue(TypeofState typeof_state) {
ASSERT(slot != NULL);
if (slot->type() == Slot::LOOKUP ||
slot->type() == Slot::CONTEXT ||
slot->var()->mode() == Variable::CONST) {
slot->var()->mode() == Variable::CONST ||
slot->is_arguments()) {
GetValue(typeof_state);
return;
}
// Only non-constant, frame-allocated parameters and locals can reach
// here.
// here. Be careful not to use the optimizations for arguments
// object access since it may not have been initialized yet.
ASSERT(!slot->is_arguments());
if (slot->type() == Slot::PARAMETER) {
cgen_->frame()->TakeParameterAt(slot->index());
} else {
@ -6419,22 +6434,23 @@ void CEntryStub::GenerateThrowOutOfMemory(MacroAssembler* masm) {
// Fetch top stack handler.
ExternalReference handler_address(Top::k_handler_address);
__ movq(kScratchRegister, handler_address);
__ movq(rdx, Operand(kScratchRegister, 0));
__ movq(rsp, Operand(kScratchRegister, 0));
// Unwind the handlers until the ENTRY handler is found.
Label loop, done;
__ bind(&loop);
// Load the type of the current stack handler.
__ cmpq(Operand(rdx, StackHandlerConstants::kStateOffset),
__ cmpq(Operand(rsp, StackHandlerConstants::kStateOffset),
Immediate(StackHandler::ENTRY));
__ j(equal, &done);
// Fetch the next handler in the list.
__ movq(rdx, Operand(rdx, StackHandlerConstants::kNextOffset));
ASSERT(StackHandlerConstants::kNextOffset == 0);
__ pop(rsp);
__ jmp(&loop);
__ bind(&done);
// Set the top handler address to next handler past the current ENTRY handler.
__ movq(rax, Operand(rdx, StackHandlerConstants::kNextOffset));
__ pop(rax);
__ store_rax(handler_address);
// Set external caught exception to false.
@ -6447,14 +6463,12 @@ void CEntryStub::GenerateThrowOutOfMemory(MacroAssembler* masm) {
ExternalReference pending_exception(Top::k_pending_exception_address);
__ store_rax(pending_exception);
// Restore the stack to the address of the ENTRY handler
__ movq(rsp, rdx);
// Clear the context pointer;
__ xor_(rsi, rsi);
// Restore registers from handler.
ASSERT_EQ(StackHandlerConstants::kNextOffset + kPointerSize,
StackHandlerConstants::kFPOffset);
__ pop(rbp); // FP
ASSERT_EQ(StackHandlerConstants::kFPOffset + kPointerSize,
StackHandlerConstants::kStateOffset);
@ -6570,6 +6584,9 @@ void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) {
void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
Label invoke, exit;
#ifdef ENABLE_LOGGING_AND_PROFILING
Label not_outermost_js, not_outermost_js_2;
#endif
// Setup frame.
__ push(rbp);
@ -6595,6 +6612,17 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
__ load_rax(c_entry_fp);
__ push(rax);
#ifdef ENABLE_LOGGING_AND_PROFILING
// If this is the outermost JS call, set js_entry_sp value.
ExternalReference js_entry_sp(Top::k_js_entry_sp_address);
__ load_rax(js_entry_sp);
__ testq(rax, rax);
__ j(not_zero, &not_outermost_js);
__ movq(rax, rbp);
__ store_rax(js_entry_sp);
__ bind(&not_outermost_js);
#endif
// Call a faked try-block that does the invoke.
__ call(&invoke);
@ -6637,6 +6665,16 @@ void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
// Pop next_sp.
__ addq(rsp, Immediate(StackHandlerConstants::kSize - kPointerSize));
#ifdef ENABLE_LOGGING_AND_PROFILING
// If current EBP value is the same as js_entry_sp value, it means that
// the current function is the outermost.
__ movq(kScratchRegister, js_entry_sp);
__ cmpq(rbp, Operand(kScratchRegister, 0));
__ j(not_equal, &not_outermost_js_2);
__ movq(Operand(kScratchRegister, 0), Immediate(0));
__ bind(&not_outermost_js_2);
#endif
// Restore the top frame descriptor from the stack.
__ bind(&exit);
__ movq(kScratchRegister, ExternalReference(Top::k_c_entry_fp_address));
@ -6770,6 +6808,7 @@ void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm) {
__ bind(&done);
}
void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
Register lhs,
Register rhs) {
@ -6804,6 +6843,7 @@ void FloatingPointHelper::LoadFloatOperands(MacroAssembler* masm,
__ bind(&done);
}
void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
Label* non_float) {
Label test_other, done;
@ -6841,6 +6881,7 @@ const char* GenericBinaryOpStub::GetName() {
}
}
void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
// Perform fast-case smi code for the operation (rax <op> rbx) and
// leave result in register rax.
@ -6981,7 +7022,6 @@ void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
Label call_runtime;
if (flags_ == SMI_CODE_IN_STUB) {
// The fast case smi code wasn't inlined in the stub caller
// code. Generate it here to speed up common operations.

4
deps/v8/src/x64/codegen-x64.h

@ -534,6 +534,8 @@ class CodeGenerator: public AstVisitor {
void GenerateLog(ZoneList<Expression*>* args);
void GenerateGetFramePointer(ZoneList<Expression*>* args);
// Fast support for Math.random().
void GenerateRandomPositiveSmi(ZoneList<Expression*>* args);
@ -593,6 +595,8 @@ class CodeGenerator: public AstVisitor {
friend class Reference;
friend class Result;
friend class CodeGeneratorPatcher; // Used in test-log-stack-tracer.cc
DISALLOW_COPY_AND_ASSIGN(CodeGenerator);
};

40
deps/v8/src/x64/disasm-x64.cc

@ -88,7 +88,7 @@ static ByteMnemonic two_operands_instr[] = {
{ 0x39, OPER_REG_OP_ORDER, "cmp" },
{ 0x3A, BYTE_REG_OPER_OP_ORDER, "cmp" },
{ 0x3B, REG_OPER_OP_ORDER, "cmp" },
{ 0x8D, REG_OPER_OP_ORDER, "lea" },
{ 0x63, REG_OPER_OP_ORDER, "movsxlq" },
{ 0x84, BYTE_REG_OPER_OP_ORDER, "test" },
{ 0x85, REG_OPER_OP_ORDER, "test" },
{ 0x86, BYTE_REG_OPER_OP_ORDER, "xchg" },
@ -97,6 +97,7 @@ static ByteMnemonic two_operands_instr[] = {
{ 0x89, OPER_REG_OP_ORDER, "mov" },
{ 0x8A, BYTE_REG_OPER_OP_ORDER, "mov" },
{ 0x8B, REG_OPER_OP_ORDER, "mov" },
{ 0x8D, REG_OPER_OP_ORDER, "lea" },
{ -1, UNSET_OP_ORDER, "" }
};
@ -139,7 +140,7 @@ static ByteMnemonic short_immediate_instr[] = {
static const char* conditional_code_suffix[] = {
"o", "no", "c", "nc", "z", "nz", "a", "na",
"o", "no", "c", "nc", "z", "nz", "na", "a",
"s", "ns", "pe", "po", "l", "ge", "le", "g"
};
@ -252,6 +253,24 @@ void InstructionTable::AddJumpConditionalShort() {
static InstructionTable instruction_table;
static InstructionDesc cmov_instructions[16] = {
{"cmovo", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmovno", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmovc", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmovnc", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmovz", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmovnz", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmovna", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmova", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmovs", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmovns", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmovpe", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmovpo", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmovl", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmovge", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmovle", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false},
{"cmovg", TWO_OPERANDS_INSTR, REG_OPER_OP_ORDER, false}
};
//------------------------------------------------------------------------------
// DisassemblerX64 implementation.
@ -533,7 +552,7 @@ int DisassemblerX64::PrintImmediate(byte* data, OperandSize size) {
value = 0; // Initialize variables on all paths to satisfy the compiler.
count = 0;
}
AppendToBuffer(V8_PTR_PREFIX"x", value);
AppendToBuffer("%" V8_PTR_PREFIX "x", value);
return count;
}
@ -966,6 +985,13 @@ int DisassemblerX64::TwoByteOpcodeInstruction(byte* data) {
// RDTSC or CPUID
AppendToBuffer("%s", mnemonic);
} else if ((opcode & 0xF0) == 0x40) {
// CMOVcc: conditional move.
int condition = opcode & 0x0F;
const InstructionDesc& idesc = cmov_instructions[condition];
byte_size_operand_ = idesc.byte_size_operation;
current += PrintOperands(idesc.mnem, idesc.op_order_, current);
} else if ((opcode & 0xF0) == 0x80) {
// Jcc: Conditional jump (branch).
current = data + JumpConditional(data);
@ -1350,9 +1376,9 @@ int DisassemblerX64::InstructionDecode(v8::internal::Vector<char> out_buffer,
const char* memory_location = NameOfAddress(
reinterpret_cast<byte*>(
*reinterpret_cast<int32_t*>(data + 1)));
if (*data == 0xA3) { // Opcode 0xA3
if (*data == 0xA1) { // Opcode 0xA1
AppendToBuffer("movzxlq rax,(%s)", memory_location);
} else { // Opcode 0xA1
} else { // Opcode 0xA3
AppendToBuffer("movzxlq (%s),rax", memory_location);
}
data += 5;
@ -1362,9 +1388,9 @@ int DisassemblerX64::InstructionDecode(v8::internal::Vector<char> out_buffer,
// New x64 instruction mov rax,(imm_64).
const char* memory_location = NameOfAddress(
*reinterpret_cast<byte**>(data + 1));
if (*data == 0xA3) { // Opcode 0xA3
if (*data == 0xA1) { // Opcode 0xA1
AppendToBuffer("movq rax,(%s)", memory_location);
} else { // Opcode 0xA1
} else { // Opcode 0xA3
AppendToBuffer("movq (%s),rax", memory_location);
}
data += 9;

38
deps/v8/src/x64/ic-x64.cc

@ -424,6 +424,9 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
// Check that the key is a smi.
__ testl(rbx, Immediate(kSmiTagMask));
__ j(not_zero, &slow);
// If it is a smi, make sure it is zero-extended, so it can be
// used as an index in a memory operand.
__ movl(rbx, rbx); // Clear the high bits of rbx.
__ CmpInstanceType(rcx, JS_ARRAY_TYPE);
__ j(equal, &array);
@ -434,7 +437,7 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
// Object case: Check key against length in the elements array.
// rax: value
// rdx: JSObject
// rbx: index (as a smi)
// rbx: index (as a smi), zero-extended.
__ movq(rcx, FieldOperand(rdx, JSObject::kElementsOffset));
// Check that the object is in fast mode (not dictionary).
__ Cmp(FieldOperand(rcx, HeapObject::kMapOffset), Factory::fixed_array_map());
@ -502,7 +505,8 @@ void KeyedStoreIC::GenerateGeneric(MacroAssembler* masm) {
// rax: value
// rcx: FixedArray
// rbx: index (as a smi)
__ movq(Operand(rcx, rbx, times_4, FixedArray::kHeaderSize - kHeapObjectTag),
__ movq(Operand(rcx, rbx, times_half_pointer_size,
FixedArray::kHeaderSize - kHeapObjectTag),
rax);
// Update write barrier for the elements array address.
__ movq(rdx, rax);
@ -602,9 +606,22 @@ void LoadIC::Generate(MacroAssembler* masm, ExternalReference const& f) {
void LoadIC::GenerateArrayLength(MacroAssembler* masm) {
Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
// ----------- S t a t e -------------
// -- rcx : name
// -- rsp[0] : return address
// -- rsp[8] : receiver
// -----------------------------------
Label miss;
__ movq(rax, Operand(rsp, kPointerSize));
StubCompiler::GenerateLoadArrayLength(masm, rax, rdx, &miss);
__ bind(&miss);
StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
}
void LoadIC::GenerateFunctionPrototype(MacroAssembler* masm) {
Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
}
@ -646,10 +663,23 @@ void LoadIC::GenerateNormal(MacroAssembler* masm) {
void LoadIC::GenerateStringLength(MacroAssembler* masm) {
Generate(masm, ExternalReference(IC_Utility(kLoadIC_Miss)));
// ----------- S t a t e -------------
// -- rcx : name
// -- rsp[0] : return address
// -- rsp[8] : receiver
// -----------------------------------
Label miss;
__ movq(rax, Operand(rsp, kPointerSize));
StubCompiler::GenerateLoadStringLength(masm, rax, rdx, &miss);
__ bind(&miss);
StubCompiler::GenerateLoadMiss(masm, Code::LOAD_IC);
}
bool LoadIC::PatchInlinedLoad(Address address, Object* map, int offset) {
// The address of the instruction following the call.
Address test_instruction_address =

156
deps/v8/src/x64/macro-assembler-x64.cc

@ -31,6 +31,7 @@
#include "codegen-inl.h"
#include "assembler-x64.h"
#include "macro-assembler-x64.h"
#include "serialize.h"
#include "debug.h"
namespace v8 {
@ -45,11 +46,156 @@ MacroAssembler::MacroAssembler(void* buffer, int size)
}
// TODO(x64): For now, the write barrier is disabled on x64 and we
// therefore generate no code. This should be fixed when the write
// barrier is enabled.
void MacroAssembler::RecordWrite(Register object, int offset,
Register value, Register scratch) {
static void RecordWriteHelper(MacroAssembler* masm,
Register object,
Register addr,
Register scratch) {
Label fast;
// Compute the page address from the heap object pointer, leave it
// in 'object'.
ASSERT(is_int32(~Page::kPageAlignmentMask));
masm->and_(object,
Immediate(static_cast<int32_t>(~Page::kPageAlignmentMask)));
// Compute the bit addr in the remembered set, leave it in "addr".
masm->subq(addr, object);
masm->shr(addr, Immediate(kPointerSizeLog2));
// If the bit offset lies beyond the normal remembered set range, it is in
// the extra remembered set area of a large object.
masm->cmpq(addr, Immediate(Page::kPageSize / kPointerSize));
masm->j(less, &fast);
// Adjust 'addr' to be relative to the start of the extra remembered set
// and the page address in 'object' to be the address of the extra
// remembered set.
masm->subq(addr, Immediate(Page::kPageSize / kPointerSize));
// Load the array length into 'scratch'.
masm->movl(scratch,
Operand(object,
Page::kObjectStartOffset + FixedArray::kLengthOffset));
// Extra remembered set starts right after FixedArray.
// Add the page header, array header, and array body size
// (length * pointer size) to the page address to find the extra remembered
// set start.
masm->lea(object,
Operand(object, scratch, times_pointer_size,
Page::kObjectStartOffset + FixedArray::kHeaderSize));
// NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
// to limit code size. We should probably evaluate this decision by
// measuring the performance of an equivalent implementation using
// "simpler" instructions
masm->bind(&fast);
masm->bts(Operand(object, Page::kRSetOffset), addr);
}
class RecordWriteStub : public CodeStub {
public:
RecordWriteStub(Register object, Register addr, Register scratch)
: object_(object), addr_(addr), scratch_(scratch) { }
void Generate(MacroAssembler* masm);
private:
Register object_;
Register addr_;
Register scratch_;
#ifdef DEBUG
void Print() {
PrintF("RecordWriteStub (object reg %d), (addr reg %d), (scratch reg %d)\n",
object_.code(), addr_.code(), scratch_.code());
}
#endif
// Minor key encoding in 12 bits of three registers (object, address and
// scratch) OOOOAAAASSSS.
class ScratchBits: public BitField<uint32_t, 0, 4> {};
class AddressBits: public BitField<uint32_t, 4, 4> {};
class ObjectBits: public BitField<uint32_t, 8, 4> {};
Major MajorKey() { return RecordWrite; }
int MinorKey() {
// Encode the registers.
return ObjectBits::encode(object_.code()) |
AddressBits::encode(addr_.code()) |
ScratchBits::encode(scratch_.code());
}
};
void RecordWriteStub::Generate(MacroAssembler* masm) {
RecordWriteHelper(masm, object_, addr_, scratch_);
masm->ret(0);
}
// Set the remembered set bit for [object+offset].
// object is the object being stored into, value is the object being stored.
// If offset is zero, then the scratch register contains the array index into
// the elements array represented as a Smi.
// All registers are clobbered by the operation.
void MacroAssembler::RecordWrite(Register object,
int offset,
Register value,
Register scratch) {
// First, check if a remembered set write is even needed. The tests below
// catch stores of Smis and stores into young gen (which does not have space
// for the remembered set bits.
Label done;
// Test that the object address is not in the new space. We cannot
// set remembered set bits in the new space.
movq(value, object);
ASSERT(is_int32(static_cast<int64_t>(Heap::NewSpaceMask())));
and_(value, Immediate(static_cast<int32_t>(Heap::NewSpaceMask())));
movq(kScratchRegister, ExternalReference::new_space_start());
cmpq(value, kScratchRegister);
j(equal, &done);
if ((offset > 0) && (offset < Page::kMaxHeapObjectSize)) {
// Compute the bit offset in the remembered set, leave it in 'value'.
lea(value, Operand(object, offset));
ASSERT(is_int32(Page::kPageAlignmentMask));
and_(value, Immediate(static_cast<int32_t>(Page::kPageAlignmentMask)));
shr(value, Immediate(kObjectAlignmentBits));
// Compute the page address from the heap object pointer, leave it in
// 'object' (immediate value is sign extended).
and_(object, Immediate(~Page::kPageAlignmentMask));
// NOTE: For now, we use the bit-test-and-set (bts) x86 instruction
// to limit code size. We should probably evaluate this decision by
// measuring the performance of an equivalent implementation using
// "simpler" instructions
bts(Operand(object, Page::kRSetOffset), value);
} else {
Register dst = scratch;
if (offset != 0) {
lea(dst, Operand(object, offset));
} else {
// array access: calculate the destination address in the same manner as
// KeyedStoreIC::GenerateGeneric. Multiply a smi by 4 to get an offset
// into an array of words.
lea(dst, Operand(object, dst, times_half_pointer_size,
FixedArray::kHeaderSize - kHeapObjectTag));
}
// If we are already generating a shared stub, not inlining the
// record write code isn't going to save us any memory.
if (generating_stub()) {
RecordWriteHelper(this, object, dst, value);
} else {
RecordWriteStub stub(object, dst, value);
CallStub(&stub);
}
}
bind(&done);
}

4
deps/v8/test/cctest/SConscript

@ -63,9 +63,9 @@ SOURCES = {
'arch:ia32': [
'test-assembler-ia32.cc',
'test-disasm-ia32.cc',
'test-log-ia32.cc'
'test-log-stack-tracer.cc'
],
'arch:x64': ['test-assembler-x64.cc'],
'arch:x64': ['test-assembler-x64.cc', 'test-log-stack-tracer.cc'],
'os:linux': ['test-platform-linux.cc'],
'os:macos': ['test-platform-macos.cc'],
'os:nullos': ['test-platform-nullos.cc'],

4
deps/v8/test/cctest/cctest.status

@ -63,7 +63,6 @@ test-api/TryCatchInTryFinally: FAIL
[ $arch == x64 ]
test-regexp/Graph: PASS || CRASH || FAIL
test-decls/Present: CRASH || FAIL
test-decls/Unknown: CRASH || FAIL
test-decls/Appearing: CRASH || FAIL
@ -114,9 +113,6 @@ test-debug/DebuggerUnload: CRASH || FAIL
test-debug/DebuggerHostDispatch: CRASH || FAIL
test-debug/DebugBreakInMessageHandler: CRASH || FAIL
test-debug/NoDebugBreakInAfterCompileMessageHandler: CRASH || FAIL
test-api/HugeConsStringOutOfMemory: CRASH || FAIL
test-api/OutOfMemory: CRASH || FAIL
test-api/OutOfMemoryNested: CRASH || FAIL
test-api/Threading: CRASH || FAIL
test-api/Threading2: PASS || TIMEOUT
test-api/TryCatchSourceInfo: CRASH || FAIL

376
deps/v8/test/cctest/test-log-stack-tracer.cc

@ -0,0 +1,376 @@
// Copyright 2006-2009 the V8 project authors. All rights reserved.
//
// Tests of profiler-related functions from log.h
#ifdef ENABLE_LOGGING_AND_PROFILING
#include <stdlib.h>
#include "v8.h"
#include "codegen.h"
#include "log.h"
#include "top.h"
#include "cctest.h"
#include "disassembler.h"
#include "register-allocator-inl.h"
using v8::Function;
using v8::Local;
using v8::Object;
using v8::Script;
using v8::String;
using v8::Value;
using v8::internal::byte;
using v8::internal::Address;
using v8::internal::Handle;
using v8::internal::JSFunction;
using v8::internal::StackTracer;
using v8::internal::TickSample;
using v8::internal::Top;
namespace i = v8::internal;
static v8::Persistent<v8::Context> env;
static struct {
TickSample* sample;
} trace_env = { NULL };
static void InitTraceEnv(TickSample* sample) {
trace_env.sample = sample;
}
static void DoTrace(Address fp) {
trace_env.sample->fp = reinterpret_cast<uintptr_t>(fp);
// sp is only used to define stack high bound
trace_env.sample->sp =
reinterpret_cast<uintptr_t>(trace_env.sample) - 10240;
StackTracer::Trace(trace_env.sample);
}
// Hide c_entry_fp to emulate situation when sampling is done while
// pure JS code is being executed
static void DoTraceHideCEntryFPAddress(Address fp) {
v8::internal::Address saved_c_frame_fp = *(Top::c_entry_fp_address());
CHECK(saved_c_frame_fp);
*(Top::c_entry_fp_address()) = 0;
DoTrace(fp);
*(Top::c_entry_fp_address()) = saved_c_frame_fp;
}
static void CheckRetAddrIsInFunction(const char* func_name,
Address ret_addr,
Address func_start_addr,
unsigned int func_len) {
printf("CheckRetAddrIsInFunction \"%s\": %p %p %p\n",
func_name, func_start_addr, ret_addr, func_start_addr + func_len);
CHECK_GE(ret_addr, func_start_addr);
CHECK_GE(func_start_addr + func_len, ret_addr);
}
static void CheckRetAddrIsInJSFunction(const char* func_name,
Address ret_addr,
Handle<JSFunction> func) {
v8::internal::Code* func_code = func->code();
CheckRetAddrIsInFunction(
func_name, ret_addr,
func_code->instruction_start(),
func_code->ExecutableSize());
}
// --- T r a c e E x t e n s i o n ---
class TraceExtension : public v8::Extension {
public:
TraceExtension() : v8::Extension("v8/trace", kSource) { }
virtual v8::Handle<v8::FunctionTemplate> GetNativeFunction(
v8::Handle<String> name);
static v8::Handle<v8::Value> Trace(const v8::Arguments& args);
static v8::Handle<v8::Value> JSTrace(const v8::Arguments& args);
static v8::Handle<v8::Value> JSEntrySP(const v8::Arguments& args);
static v8::Handle<v8::Value> JSEntrySPLevel2(const v8::Arguments& args);
private:
static Address GetFP(const v8::Arguments& args);
static const char* kSource;
};
const char* TraceExtension::kSource =
"native function trace();"
"native function js_trace();"
"native function js_entry_sp();"
"native function js_entry_sp_level2();";
v8::Handle<v8::FunctionTemplate> TraceExtension::GetNativeFunction(
v8::Handle<String> name) {
if (name->Equals(String::New("trace"))) {
return v8::FunctionTemplate::New(TraceExtension::Trace);
} else if (name->Equals(String::New("js_trace"))) {
return v8::FunctionTemplate::New(TraceExtension::JSTrace);
} else if (name->Equals(String::New("js_entry_sp"))) {
return v8::FunctionTemplate::New(TraceExtension::JSEntrySP);
} else if (name->Equals(String::New("js_entry_sp_level2"))) {
return v8::FunctionTemplate::New(TraceExtension::JSEntrySPLevel2);
} else {
CHECK(false);
return v8::Handle<v8::FunctionTemplate>();
}
}
Address TraceExtension::GetFP(const v8::Arguments& args) {
CHECK_EQ(1, args.Length());
// CodeGenerator::GenerateGetFramePointer pushes EBP / RBP value
// on stack. In 64-bit mode we can't use Smi operations code because
// they check that value is within Smi bounds.
Address fp = *reinterpret_cast<Address*>(*args[0]);
printf("Trace: %p\n", fp);
return fp;
}
v8::Handle<v8::Value> TraceExtension::Trace(const v8::Arguments& args) {
DoTrace(GetFP(args));
return v8::Undefined();
}
v8::Handle<v8::Value> TraceExtension::JSTrace(const v8::Arguments& args) {
DoTraceHideCEntryFPAddress(GetFP(args));
return v8::Undefined();
}
static Address GetJsEntrySp() {
CHECK_NE(NULL, Top::GetCurrentThread());
return Top::js_entry_sp(Top::GetCurrentThread());
}
v8::Handle<v8::Value> TraceExtension::JSEntrySP(const v8::Arguments& args) {
CHECK_NE(0, GetJsEntrySp());
return v8::Undefined();
}
static void CompileRun(const char* source) {
Script::Compile(String::New(source))->Run();
}
v8::Handle<v8::Value> TraceExtension::JSEntrySPLevel2(
const v8::Arguments& args) {
v8::HandleScope scope;
const Address js_entry_sp = GetJsEntrySp();
CHECK_NE(0, js_entry_sp);
CompileRun("js_entry_sp();");
CHECK_EQ(js_entry_sp, GetJsEntrySp());
return v8::Undefined();
}
static TraceExtension kTraceExtension;
v8::DeclareExtension kTraceExtensionDeclaration(&kTraceExtension);
static void InitializeVM() {
if (env.IsEmpty()) {
v8::HandleScope scope;
const char* extensions[] = { "v8/trace" };
v8::ExtensionConfiguration config(1, extensions);
env = v8::Context::New(&config);
}
v8::HandleScope scope;
env->Enter();
}
static Handle<JSFunction> CompileFunction(const char* source) {
return v8::Utils::OpenHandle(*Script::Compile(String::New(source)));
}
static Local<Value> GetGlobalProperty(const char* name) {
return env->Global()->Get(String::New(name));
}
static Handle<JSFunction> GetGlobalJSFunction(const char* name) {
Handle<JSFunction> js_func(JSFunction::cast(
*(v8::Utils::OpenHandle(
*GetGlobalProperty(name)))));
return js_func;
}
static void CheckRetAddrIsInJSFunction(const char* func_name,
Address ret_addr) {
CheckRetAddrIsInJSFunction(func_name, ret_addr,
GetGlobalJSFunction(func_name));
}
static void SetGlobalProperty(const char* name, Local<Value> value) {
env->Global()->Set(String::New(name), value);
}
static Handle<v8::internal::String> NewString(const char* s) {
return i::Factory::NewStringFromAscii(i::CStrVector(s));
}
namespace v8 {
namespace internal {
class CodeGeneratorPatcher {
public:
CodeGeneratorPatcher() {
CodeGenerator::InlineRuntimeLUT genGetFramePointer =
{&CodeGenerator::GenerateGetFramePointer, "_GetFramePointer"};
// _FastCharCodeAt is not used in our tests.
bool result = CodeGenerator::PatchInlineRuntimeEntry(
NewString("_FastCharCodeAt"),
genGetFramePointer, &oldInlineEntry);
CHECK(result);
}
~CodeGeneratorPatcher() {
CHECK(CodeGenerator::PatchInlineRuntimeEntry(
NewString("_GetFramePointer"),
oldInlineEntry, NULL));
}
private:
CodeGenerator::InlineRuntimeLUT oldInlineEntry;
};
} } // namespace v8::internal
// Creates a global function named 'func_name' that calls the tracing
// function 'trace_func_name' with an actual EBP register value,
// shifted right to be presented as Smi.
static void CreateTraceCallerFunction(const char* func_name,
const char* trace_func_name) {
i::EmbeddedVector<char, 256> trace_call_buf;
i::OS::SNPrintF(trace_call_buf, "%s(%%_GetFramePointer());", trace_func_name);
// Compile the script.
i::CodeGeneratorPatcher patcher;
bool allow_natives_syntax = i::FLAG_allow_natives_syntax;
i::FLAG_allow_natives_syntax = true;
Handle<JSFunction> func = CompileFunction(trace_call_buf.start());
CHECK(!func.is_null());
i::FLAG_allow_natives_syntax = allow_natives_syntax;
#ifdef DEBUG
v8::internal::Code* func_code = func->code();
CHECK(func_code->IsCode());
func_code->Print();
#endif
SetGlobalProperty(func_name, v8::ToApi<Value>(func));
}
TEST(CFromJSStackTrace) {
TickSample sample;
InitTraceEnv(&sample);
InitializeVM();
v8::HandleScope scope;
CreateTraceCallerFunction("JSFuncDoTrace", "trace");
CompileRun(
"function JSTrace() {"
" JSFuncDoTrace();"
"};\n"
"JSTrace();");
CHECK_GT(sample.frames_count, 1);
// Stack sampling will start from the first JS function, i.e. "JSFuncDoTrace"
CheckRetAddrIsInJSFunction("JSFuncDoTrace",
sample.stack[0]);
CheckRetAddrIsInJSFunction("JSTrace",
sample.stack[1]);
}
TEST(PureJSStackTrace) {
TickSample sample;
InitTraceEnv(&sample);
InitializeVM();
v8::HandleScope scope;
CreateTraceCallerFunction("JSFuncDoTrace", "js_trace");
CompileRun(
"function JSTrace() {"
" JSFuncDoTrace();"
"};\n"
"function OuterJSTrace() {"
" JSTrace();"
"};\n"
"OuterJSTrace();");
CHECK_GT(sample.frames_count, 1);
// Stack sampling will start from the caller of JSFuncDoTrace, i.e. "JSTrace"
CheckRetAddrIsInJSFunction("JSTrace",
sample.stack[0]);
CheckRetAddrIsInJSFunction("OuterJSTrace",
sample.stack[1]);
}
static void CFuncDoTrace() {
Address fp;
#ifdef __GNUC__
fp = reinterpret_cast<Address>(__builtin_frame_address(0));
#elif defined _MSC_VER && defined V8_TARGET_ARCH_IA32
__asm mov [fp], ebp // NOLINT
#elif defined _MSC_VER && defined V8_TARGET_ARCH_X64
// FIXME: I haven't really tried to compile it.
__asm movq [fp], rbp // NOLINT
#endif
DoTrace(fp);
}
static int CFunc(int depth) {
if (depth <= 0) {
CFuncDoTrace();
return 0;
} else {
return CFunc(depth - 1) + 1;
}
}
TEST(PureCStackTrace) {
TickSample sample;
InitTraceEnv(&sample);
// Check that sampler doesn't crash
CHECK_EQ(10, CFunc(10));
}
TEST(JsEntrySp) {
InitializeVM();
v8::HandleScope scope;
CHECK_EQ(0, GetJsEntrySp());
CompileRun("a = 1; b = a + 1;");
CHECK_EQ(0, GetJsEntrySp());
CompileRun("js_entry_sp();");
CHECK_EQ(0, GetJsEntrySp());
CompileRun("js_entry_sp_level2();");
CHECK_EQ(0, GetJsEntrySp());
}
#endif // ENABLE_LOGGING_AND_PROFILING

4
deps/v8/test/cctest/test-log.cc

@ -166,7 +166,7 @@ static void SigProfSignalHandler(int signal, siginfo_t* info, void* context) {
static int CheckThatProfilerWorks(int log_pos) {
Logger::ResumeProfiler();
Logger::ResumeProfiler(v8::PROFILER_MODULE_CPU);
CHECK(LoggerTestHelper::IsSamplerActive());
// Verify that the current map of compiled functions has been logged.
@ -207,7 +207,7 @@ static int CheckThatProfilerWorks(int log_pos) {
i::OS::Sleep(1);
}
Logger::PauseProfiler();
Logger::PauseProfiler(v8::PROFILER_MODULE_CPU);
CHECK(!LoggerTestHelper::IsSamplerActive());
// Wait 50 msecs to allow Profiler thread to process the last

2
deps/v8/test/mjsunit/mjsunit.status

@ -101,7 +101,5 @@ fuzz-natives: PASS || TIMEOUT
debug-handle: CRASH || FAIL
debug-clearbreakpointgroup: CRASH || FAIL
regress/regress-269: CRASH || FAIL
regress/regress-392: CRASH || FAIL
regress/regress-1200351: CRASH || FAIL
regress/regress-998565: CRASH || FAIL
tools/tickprocessor: PASS || CRASH || FAIL

38
deps/v8/test/mjsunit/regress/regress-416.js

@ -0,0 +1,38 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Test of invalid Date construction, and TimeClip function.
// See http://code.google.com/p/v8/issues/detail?id=416
assertTrue(isNaN(new Date(1e81).getTime()), "new Date(1e81)");
assertTrue(isNaN(new Date(-1e81).getTime()), "new Date(-1e81)");
assertTrue(isNaN(new Date(1e81, "").getTime()), "new Date(1e81, \"\")");
assertTrue(isNaN(new Date(-1e81, "").getTime()), "new Date(-1e81, \"\")");
assertTrue(isNaN(new Date(Number.NaN).getTime()), "new Date(Number.NaN)");
assertTrue(isNaN(new Date(Number.NaN, "").getTime()),
"new Date(Number.NaN, \"\")");

27
deps/v8/test/mozilla/mozilla.status

@ -1,4 +1,4 @@
# Copyright 2008 the V8 project authors. All rights reserved.
# Copyright 2009 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
@ -803,3 +803,28 @@ ecma/Expressions/11.7.3: SKIP
ecma/Expressions/11.10-3: SKIP
ecma/Expressions/11.7.1: SKIP
ecma_3/RegExp/regress-209067: SKIP
[ $ARCH == x64 ]
# Tests that fail on the 64-bit port. This section should be empty
# when the 64-bit port is fully debugged.
js1_2/regexp/regress-9141: FAIL
js1_5/Regress/regress-211590: CRASH
js1_5/Regress/regress-303213: PASS || CRASH
js1_5/extensions/regress-336410-2: CRASH
js1_5/extensions/regress-336410-1: CRASH
js1_5/Function/regress-338001: FAIL || CRASH
js1_5/extensions/regress-371636: CRASH
# The following failures were added when remembered sets were enabled.
js1_5/GC/regress-203278-2: FAIL || PASS || CRASH
js1_5/GC/regress-203278-3: FAIL || PASS
js1_5/Regress/regress-280769-3: FAIL || PASS
js1_5/Regress/regress-280769-4: CRASH || TIMEOUT
js1_5/Regress/regress-290575: CRASH
js1_5/extensions/regress-365692: FAIL || PASS
js1_5/Regress/regress-366601: FAIL
js1_5/Regress/regress-367561-03: CRASH
js1_5/Regress/regress-367561-01: CRASH || PASS
ecma/Expressions/11.7.2: CRASH

5
deps/v8/tools/gyp/v8.gyp

@ -216,6 +216,8 @@
'../../src/builtins.cc',
'../../src/builtins.h',
'../../src/bytecodes-irregexp.h',
'../../src/cfg.cc',
'../../src/cfg.h',
'../../src/char-predicates-inl.h',
'../../src/char-predicates.h',
'../../src/checks.cc',
@ -383,6 +385,7 @@
'../../src/arm/assembler-arm.cc',
'../../src/arm/assembler-arm.h',
'../../src/arm/builtins-arm.cc',
'../../src/arm/cfg-arm.cc',
'../../src/arm/codegen-arm.cc',
'../../src/arm/codegen-arm.h',
'../../src/arm/constants-arm.h',
@ -413,6 +416,7 @@
'../../src/ia32/assembler-ia32.cc',
'../../src/ia32/assembler-ia32.h',
'../../src/ia32/builtins-ia32.cc',
'../../src/ia32/cfg-ia32.cc',
'../../src/ia32/codegen-ia32.cc',
'../../src/ia32/codegen-ia32.h',
'../../src/ia32/cpu-ia32.cc',
@ -441,6 +445,7 @@
'../../src/x64/assembler-x64.cc',
'../../src/x64/assembler-x64.h',
'../../src/x64/builtins-x64.cc',
'../../src/x64/cfg-x64.cc',
'../../src/x64/codegen-x64.cc',
'../../src/x64/codegen-x64.h',
'../../src/x64/cpu-x64.cc',

12
deps/v8/tools/visual_studio/v8_base.vcproj

@ -236,6 +236,18 @@
RelativePath="..\..\src\bytecodes-irregexp.h"
>
</File>
<File
RelativePath="..\..\src\ia32\cfg-ia32.cc"
>
</File>
<File
RelativePath="..\..\src\cfg.cc"
>
</File>
<File
RelativePath="..\..\src\cfg.h"
>
</File>
<File
RelativePath="..\..\src\char-predicates-inl.h"
>

12
deps/v8/tools/visual_studio/v8_base_arm.vcproj

@ -236,6 +236,18 @@
RelativePath="..\..\src\bytecodes-irregexp.h"
>
</File>
<File
RelativePath="..\..\src\arm\cfg-arm.cc"
>
</File>
<File
RelativePath="..\..\src\cfg.cc"
>
</File>
<File
RelativePath="..\..\src\cfg.h"
>
</File>
<File
RelativePath="..\..\src\char-predicates-inl.h"
>

2
deps/v8/tools/visual_studio/v8_cctest.vcproj

@ -210,7 +210,7 @@
>
</File>
<File
RelativePath="..\..\test\cctest\test-log-ia32.cc"
RelativePath="..\..\test\cctest\test-log-stack-tracer.cc"
>
</File>
<File

Loading…
Cancel
Save