mirror of https://github.com/lukechilds/node.git
Ryan Dahl
15 years ago
150 changed files with 8072 additions and 6510 deletions
@ -0,0 +1,324 @@ |
|||
# Copyright 2008 the V8 project authors. All rights reserved. |
|||
# Redistribution and use in source and binary forms, with or without |
|||
# modification, are permitted provided that the following conditions are |
|||
# met: |
|||
# |
|||
# * Redistributions of source code must retain the above copyright |
|||
# notice, this list of conditions and the following disclaimer. |
|||
# * Redistributions in binary form must reproduce the above |
|||
# copyright notice, this list of conditions and the following |
|||
# disclaimer in the documentation and/or other materials provided |
|||
# with the distribution. |
|||
# * Neither the name of Google Inc. nor the names of its |
|||
# contributors may be used to endorse or promote products derived |
|||
# from this software without specific prior written permission. |
|||
# |
|||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
|||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
|||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
|||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT |
|||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, |
|||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT |
|||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, |
|||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY |
|||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT |
|||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE |
|||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
|||
|
|||
import sys |
|||
from os.path import join, dirname, abspath |
|||
root_dir = dirname(File('SConstruct').rfile().abspath) |
|||
sys.path.append(join(root_dir, 'tools')) |
|||
import js2c |
|||
Import('context') |
|||
|
|||
|
|||
SOURCES = { |
|||
'all': Split(""" |
|||
accessors.cc |
|||
allocation.cc |
|||
api.cc |
|||
assembler.cc |
|||
ast.cc |
|||
bootstrapper.cc |
|||
builtins.cc |
|||
checks.cc |
|||
circular-queue.cc |
|||
code-stubs.cc |
|||
codegen.cc |
|||
compilation-cache.cc |
|||
compiler.cc |
|||
contexts.cc |
|||
conversions.cc |
|||
counters.cc |
|||
cpu-profiler.cc |
|||
data-flow.cc |
|||
dateparser.cc |
|||
debug-agent.cc |
|||
debug.cc |
|||
disassembler.cc |
|||
diy-fp.cc |
|||
dtoa.cc |
|||
execution.cc |
|||
factory.cc |
|||
flags.cc |
|||
flow-graph.cc |
|||
frame-element.cc |
|||
frames.cc |
|||
full-codegen.cc |
|||
func-name-inferrer.cc |
|||
global-handles.cc |
|||
fast-dtoa.cc |
|||
fixed-dtoa.cc |
|||
handles.cc |
|||
hashmap.cc |
|||
heap-profiler.cc |
|||
heap.cc |
|||
ic.cc |
|||
interpreter-irregexp.cc |
|||
jsregexp.cc |
|||
jump-target.cc |
|||
liveedit.cc |
|||
log-utils.cc |
|||
log.cc |
|||
mark-compact.cc |
|||
messages.cc |
|||
objects.cc |
|||
objects-visiting.cc |
|||
oprofile-agent.cc |
|||
parser.cc |
|||
profile-generator.cc |
|||
property.cc |
|||
regexp-macro-assembler-irregexp.cc |
|||
regexp-macro-assembler.cc |
|||
regexp-stack.cc |
|||
register-allocator.cc |
|||
rewriter.cc |
|||
runtime.cc |
|||
scanner.cc |
|||
scopeinfo.cc |
|||
scopes.cc |
|||
serialize.cc |
|||
snapshot-common.cc |
|||
spaces.cc |
|||
string-stream.cc |
|||
stub-cache.cc |
|||
token.cc |
|||
top.cc |
|||
type-info.cc |
|||
unicode.cc |
|||
utils.cc |
|||
v8-counters.cc |
|||
v8.cc |
|||
v8threads.cc |
|||
variables.cc |
|||
version.cc |
|||
virtual-frame.cc |
|||
vm-state.cc |
|||
zone.cc |
|||
"""), |
|||
'arch:arm': Split(""" |
|||
jump-target-light.cc |
|||
virtual-frame-light.cc |
|||
arm/builtins-arm.cc |
|||
arm/codegen-arm.cc |
|||
arm/constants-arm.cc |
|||
arm/cpu-arm.cc |
|||
arm/debug-arm.cc |
|||
arm/disasm-arm.cc |
|||
arm/frames-arm.cc |
|||
arm/full-codegen-arm.cc |
|||
arm/ic-arm.cc |
|||
arm/jump-target-arm.cc |
|||
arm/macro-assembler-arm.cc |
|||
arm/regexp-macro-assembler-arm.cc |
|||
arm/register-allocator-arm.cc |
|||
arm/stub-cache-arm.cc |
|||
arm/virtual-frame-arm.cc |
|||
arm/assembler-arm.cc |
|||
"""), |
|||
'arch:mips': Split(""" |
|||
mips/assembler-mips.cc |
|||
mips/builtins-mips.cc |
|||
mips/codegen-mips.cc |
|||
mips/constants-mips.cc |
|||
mips/cpu-mips.cc |
|||
mips/debug-mips.cc |
|||
mips/disasm-mips.cc |
|||
mips/full-codegen-mips.cc |
|||
mips/frames-mips.cc |
|||
mips/ic-mips.cc |
|||
mips/jump-target-mips.cc |
|||
mips/macro-assembler-mips.cc |
|||
mips/register-allocator-mips.cc |
|||
mips/stub-cache-mips.cc |
|||
mips/virtual-frame-mips.cc |
|||
"""), |
|||
'arch:ia32': Split(""" |
|||
jump-target-heavy.cc |
|||
virtual-frame-heavy.cc |
|||
ia32/assembler-ia32.cc |
|||
ia32/builtins-ia32.cc |
|||
ia32/codegen-ia32.cc |
|||
ia32/cpu-ia32.cc |
|||
ia32/debug-ia32.cc |
|||
ia32/disasm-ia32.cc |
|||
ia32/frames-ia32.cc |
|||
ia32/full-codegen-ia32.cc |
|||
ia32/ic-ia32.cc |
|||
ia32/jump-target-ia32.cc |
|||
ia32/macro-assembler-ia32.cc |
|||
ia32/regexp-macro-assembler-ia32.cc |
|||
ia32/register-allocator-ia32.cc |
|||
ia32/stub-cache-ia32.cc |
|||
ia32/virtual-frame-ia32.cc |
|||
"""), |
|||
'arch:x64': Split(""" |
|||
jump-target-heavy.cc |
|||
virtual-frame-heavy.cc |
|||
x64/assembler-x64.cc |
|||
x64/builtins-x64.cc |
|||
x64/codegen-x64.cc |
|||
x64/cpu-x64.cc |
|||
x64/debug-x64.cc |
|||
x64/disasm-x64.cc |
|||
x64/frames-x64.cc |
|||
x64/full-codegen-x64.cc |
|||
x64/ic-x64.cc |
|||
x64/jump-target-x64.cc |
|||
x64/macro-assembler-x64.cc |
|||
x64/regexp-macro-assembler-x64.cc |
|||
x64/register-allocator-x64.cc |
|||
x64/stub-cache-x64.cc |
|||
x64/virtual-frame-x64.cc |
|||
"""), |
|||
'simulator:arm': ['arm/simulator-arm.cc'], |
|||
'simulator:mips': ['mips/simulator-mips.cc'], |
|||
'os:freebsd': ['platform-freebsd.cc', 'platform-posix.cc'], |
|||
'os:openbsd': ['platform-openbsd.cc', 'platform-posix.cc'], |
|||
'os:linux': ['platform-linux.cc', 'platform-posix.cc'], |
|||
'os:android': ['platform-linux.cc', 'platform-posix.cc'], |
|||
'os:macos': ['platform-macos.cc', 'platform-posix.cc'], |
|||
'os:solaris': ['platform-solaris.cc', 'platform-posix.cc'], |
|||
'os:nullos': ['platform-nullos.cc'], |
|||
'os:win32': ['platform-win32.cc'], |
|||
'mode:release': [], |
|||
'mode:debug': [ |
|||
'objects-debug.cc', 'prettyprinter.cc', 'regexp-macro-assembler-tracer.cc' |
|||
] |
|||
} |
|||
|
|||
|
|||
D8_FILES = { |
|||
'all': [ |
|||
'd8.cc', 'd8-debug.cc' |
|||
], |
|||
'os:linux': [ |
|||
'd8-posix.cc' |
|||
], |
|||
'os:macos': [ |
|||
'd8-posix.cc' |
|||
], |
|||
'os:android': [ |
|||
'd8-posix.cc' |
|||
], |
|||
'os:freebsd': [ |
|||
'd8-posix.cc' |
|||
], |
|||
'os:openbsd': [ |
|||
'd8-posix.cc' |
|||
], |
|||
'os:solaris': [ |
|||
'd8-posix.cc' |
|||
], |
|||
'os:win32': [ |
|||
'd8-windows.cc' |
|||
], |
|||
'os:nullos': [ |
|||
'd8-windows.cc' # Empty implementation at the moment. |
|||
], |
|||
'console:readline': [ |
|||
'd8-readline.cc' |
|||
] |
|||
} |
|||
|
|||
|
|||
LIBRARY_FILES = ''' |
|||
runtime.js |
|||
v8natives.js |
|||
array.js |
|||
string.js |
|||
uri.js |
|||
math.js |
|||
messages.js |
|||
apinatives.js |
|||
date.js |
|||
regexp.js |
|||
json.js |
|||
liveedit-debugger.js |
|||
mirror-debugger.js |
|||
debug-debugger.js |
|||
'''.split() |
|||
|
|||
|
|||
def Abort(message): |
|||
print message |
|||
sys.exit(1) |
|||
|
|||
|
|||
def ConfigureObjectFiles(): |
|||
env = Environment() |
|||
env.Replace(**context.flags['v8']) |
|||
context.ApplyEnvOverrides(env) |
|||
env['BUILDERS']['JS2C'] = Builder(action=js2c.JS2C) |
|||
env['BUILDERS']['Snapshot'] = Builder(action='$SOURCE $TARGET --logfile "$LOGFILE" --log-snapshot-positions') |
|||
|
|||
# Build the standard platform-independent source files. |
|||
source_files = context.GetRelevantSources(SOURCES) |
|||
|
|||
d8_files = context.GetRelevantSources(D8_FILES) |
|||
d8_js = env.JS2C('d8-js.cc', 'd8.js', TYPE='D8') |
|||
d8_js_obj = context.ConfigureObject(env, d8_js, CPPPATH=['.']) |
|||
d8_objs = [context.ConfigureObject(env, [d8_files]), d8_js_obj] |
|||
|
|||
# Combine the JavaScript library files into a single C++ file and |
|||
# compile it. |
|||
library_files = [s for s in LIBRARY_FILES] |
|||
library_files.append('macros.py') |
|||
libraries_src, libraries_empty_src = env.JS2C(['libraries.cc', 'libraries-empty.cc'], library_files, TYPE='CORE') |
|||
libraries_obj = context.ConfigureObject(env, libraries_src, CPPPATH=['.']) |
|||
|
|||
# Build dtoa. |
|||
dtoa_env = env.Copy() |
|||
dtoa_env.Replace(**context.flags['dtoa']) |
|||
dtoa_files = ['dtoa-config.c'] |
|||
dtoa_obj = context.ConfigureObject(dtoa_env, dtoa_files) |
|||
|
|||
source_objs = context.ConfigureObject(env, source_files) |
|||
non_snapshot_files = [dtoa_obj, source_objs] |
|||
|
|||
# Create snapshot if necessary. For cross compilation you should either |
|||
# do without snapshots and take the performance hit or you should build a |
|||
# host VM with the simulator=arm and snapshot=on options and then take the |
|||
# resulting snapshot.cc file from obj/release and put it in the src |
|||
# directory. Then rebuild the VM with the cross compiler and specify |
|||
# snapshot=nobuild on the scons command line. |
|||
empty_snapshot_obj = context.ConfigureObject(env, 'snapshot-empty.cc') |
|||
mksnapshot_env = env.Copy() |
|||
mksnapshot_env.Replace(**context.flags['mksnapshot']) |
|||
mksnapshot_src = 'mksnapshot.cc' |
|||
mksnapshot = mksnapshot_env.Program('mksnapshot', [mksnapshot_src, libraries_obj, non_snapshot_files, empty_snapshot_obj], PDB='mksnapshot.exe.pdb') |
|||
if context.use_snapshot: |
|||
if context.build_snapshot: |
|||
snapshot_cc = env.Snapshot('snapshot.cc', mksnapshot, LOGFILE=File('snapshot.log').abspath) |
|||
else: |
|||
snapshot_cc = 'snapshot.cc' |
|||
snapshot_obj = context.ConfigureObject(env, snapshot_cc, CPPPATH=['.']) |
|||
else: |
|||
snapshot_obj = empty_snapshot_obj |
|||
library_objs = [non_snapshot_files, libraries_obj, snapshot_obj] |
|||
return (library_objs, d8_objs, [mksnapshot]) |
|||
|
|||
|
|||
(library_objs, d8_objs, mksnapshot) = ConfigureObjectFiles() |
|||
Return('library_objs d8_objs mksnapshot') |
@ -1,241 +0,0 @@ |
|||
// Copyright 2010 the V8 project authors. All rights reserved.
|
|||
// Redistribution and use in source and binary forms, with or without
|
|||
// modification, are permitted provided that the following conditions are
|
|||
// met:
|
|||
//
|
|||
// * Redistributions of source code must retain the above copyright
|
|||
// notice, this list of conditions and the following disclaimer.
|
|||
// * Redistributions in binary form must reproduce the above
|
|||
// copyright notice, this list of conditions and the following
|
|||
// disclaimer in the documentation and/or other materials provided
|
|||
// with the distribution.
|
|||
// * Neither the name of Google Inc. nor the names of its
|
|||
// contributors may be used to endorse or promote products derived
|
|||
// from this software without specific prior written permission.
|
|||
//
|
|||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
|||
#include "v8.h" |
|||
|
|||
#if defined(V8_TARGET_ARCH_ARM) |
|||
|
|||
#include "codegen-inl.h" |
|||
#include "fast-codegen.h" |
|||
#include "scopes.h" |
|||
|
|||
namespace v8 { |
|||
namespace internal { |
|||
|
|||
#define __ ACCESS_MASM(masm()) |
|||
|
|||
Register FastCodeGenerator::accumulator0() { return r0; } |
|||
Register FastCodeGenerator::accumulator1() { return r1; } |
|||
Register FastCodeGenerator::scratch0() { return r3; } |
|||
Register FastCodeGenerator::scratch1() { return r4; } |
|||
Register FastCodeGenerator::scratch2() { return r5; } |
|||
Register FastCodeGenerator::receiver_reg() { return r2; } |
|||
Register FastCodeGenerator::context_reg() { return cp; } |
|||
|
|||
|
|||
void FastCodeGenerator::EmitLoadReceiver() { |
|||
// Offset 2 is due to return address and saved frame pointer.
|
|||
int index = 2 + scope()->num_parameters(); |
|||
__ ldr(receiver_reg(), MemOperand(sp, index * kPointerSize)); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) { |
|||
ASSERT(!destination().is(no_reg)); |
|||
ASSERT(cell->IsJSGlobalPropertyCell()); |
|||
|
|||
__ mov(destination(), Operand(cell)); |
|||
__ ldr(destination(), |
|||
FieldMemOperand(destination(), JSGlobalPropertyCell::kValueOffset)); |
|||
if (FLAG_debug_code) { |
|||
__ mov(ip, Operand(Factory::the_hole_value())); |
|||
__ cmp(destination(), ip); |
|||
__ Check(ne, "DontDelete cells can't contain the hole"); |
|||
} |
|||
|
|||
// The loaded value is not known to be a smi.
|
|||
clear_as_smi(destination()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) { |
|||
LookupResult lookup; |
|||
info()->receiver()->Lookup(*name, &lookup); |
|||
|
|||
ASSERT(lookup.holder() == *info()->receiver()); |
|||
ASSERT(lookup.type() == FIELD); |
|||
Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); |
|||
int index = lookup.GetFieldIndex() - map->inobject_properties(); |
|||
int offset = index * kPointerSize; |
|||
|
|||
// We will emit the write barrier unless the stored value is statically
|
|||
// known to be a smi.
|
|||
bool needs_write_barrier = !is_smi(accumulator0()); |
|||
|
|||
// Negative offsets are inobject properties.
|
|||
if (offset < 0) { |
|||
offset += map->instance_size(); |
|||
__ str(accumulator0(), FieldMemOperand(receiver_reg(), offset)); |
|||
if (needs_write_barrier) { |
|||
// Preserve receiver from write barrier.
|
|||
__ mov(scratch0(), receiver_reg()); |
|||
} |
|||
} else { |
|||
offset += FixedArray::kHeaderSize; |
|||
__ ldr(scratch0(), |
|||
FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset)); |
|||
__ str(accumulator0(), FieldMemOperand(scratch0(), offset)); |
|||
} |
|||
|
|||
if (needs_write_barrier) { |
|||
__ RecordWrite(scratch0(), Operand(offset), scratch1(), scratch2()); |
|||
} |
|||
|
|||
if (destination().is(accumulator1())) { |
|||
__ mov(accumulator1(), accumulator0()); |
|||
if (is_smi(accumulator0())) { |
|||
set_as_smi(accumulator1()); |
|||
} else { |
|||
clear_as_smi(accumulator1()); |
|||
} |
|||
} |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) { |
|||
ASSERT(!destination().is(no_reg)); |
|||
LookupResult lookup; |
|||
info()->receiver()->Lookup(*name, &lookup); |
|||
|
|||
ASSERT(lookup.holder() == *info()->receiver()); |
|||
ASSERT(lookup.type() == FIELD); |
|||
Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); |
|||
int index = lookup.GetFieldIndex() - map->inobject_properties(); |
|||
int offset = index * kPointerSize; |
|||
|
|||
// Perform the load. Negative offsets are inobject properties.
|
|||
if (offset < 0) { |
|||
offset += map->instance_size(); |
|||
__ ldr(destination(), FieldMemOperand(receiver_reg(), offset)); |
|||
} else { |
|||
offset += FixedArray::kHeaderSize; |
|||
__ ldr(scratch0(), |
|||
FieldMemOperand(receiver_reg(), JSObject::kPropertiesOffset)); |
|||
__ ldr(destination(), FieldMemOperand(scratch0(), offset)); |
|||
} |
|||
|
|||
// The loaded value is not known to be a smi.
|
|||
clear_as_smi(destination()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::EmitBitOr() { |
|||
if (is_smi(accumulator0()) && is_smi(accumulator1())) { |
|||
// If both operands are known to be a smi then there is no need to check
|
|||
// the operands or result. There is no need to perform the operation in
|
|||
// an effect context.
|
|||
if (!destination().is(no_reg)) { |
|||
__ orr(destination(), accumulator1(), Operand(accumulator0())); |
|||
} |
|||
} else { |
|||
// Left is in accumulator1, right in accumulator0.
|
|||
if (destination().is(accumulator0())) { |
|||
__ mov(scratch0(), accumulator0()); |
|||
__ orr(destination(), accumulator1(), Operand(accumulator1())); |
|||
Label* bailout = |
|||
info()->AddBailout(accumulator1(), scratch0()); // Left, right.
|
|||
__ BranchOnNotSmi(destination(), bailout); |
|||
} else if (destination().is(accumulator1())) { |
|||
__ mov(scratch0(), accumulator1()); |
|||
__ orr(destination(), accumulator1(), Operand(accumulator0())); |
|||
Label* bailout = info()->AddBailout(scratch0(), accumulator0()); |
|||
__ BranchOnNotSmi(destination(), bailout); |
|||
} else { |
|||
ASSERT(destination().is(no_reg)); |
|||
__ orr(scratch0(), accumulator1(), Operand(accumulator0())); |
|||
Label* bailout = info()->AddBailout(accumulator1(), accumulator0()); |
|||
__ BranchOnNotSmi(scratch0(), bailout); |
|||
} |
|||
} |
|||
|
|||
// If we didn't bailout, the result (in fact, both inputs too) is known to
|
|||
// be a smi.
|
|||
set_as_smi(accumulator0()); |
|||
set_as_smi(accumulator1()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::Generate(CompilationInfo* compilation_info) { |
|||
ASSERT(info_ == NULL); |
|||
info_ = compilation_info; |
|||
Comment cmnt(masm_, "[ function compiled by fast code generator"); |
|||
|
|||
// Save the caller's frame pointer and set up our own.
|
|||
Comment prologue_cmnt(masm(), ";; Prologue"); |
|||
__ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit()); |
|||
__ add(fp, sp, Operand(2 * kPointerSize)); |
|||
// Note that we keep a live register reference to cp (context) at
|
|||
// this point.
|
|||
|
|||
Label* bailout_to_beginning = info()->AddBailout(); |
|||
// Receiver (this) is allocated to a fixed register.
|
|||
if (info()->has_this_properties()) { |
|||
Comment cmnt(masm(), ";; MapCheck(this)"); |
|||
if (FLAG_print_ir) { |
|||
PrintF("MapCheck(this)\n"); |
|||
} |
|||
ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject()); |
|||
Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver()); |
|||
Handle<Map> map(object->map()); |
|||
EmitLoadReceiver(); |
|||
__ CheckMap(receiver_reg(), scratch0(), map, bailout_to_beginning, false); |
|||
} |
|||
|
|||
// If there is a global variable access check if the global object is the
|
|||
// same as at lazy-compilation time.
|
|||
if (info()->has_globals()) { |
|||
Comment cmnt(masm(), ";; MapCheck(GLOBAL)"); |
|||
if (FLAG_print_ir) { |
|||
PrintF("MapCheck(GLOBAL)\n"); |
|||
} |
|||
ASSERT(info()->has_global_object()); |
|||
Handle<Map> map(info()->global_object()->map()); |
|||
__ ldr(scratch0(), CodeGenerator::GlobalObject()); |
|||
__ CheckMap(scratch0(), scratch1(), map, bailout_to_beginning, true); |
|||
} |
|||
|
|||
VisitStatements(function()->body()); |
|||
|
|||
Comment return_cmnt(masm(), ";; Return(<undefined>)"); |
|||
if (FLAG_print_ir) { |
|||
PrintF("Return(<undefined>)\n"); |
|||
} |
|||
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex); |
|||
__ mov(sp, fp); |
|||
__ ldm(ia_w, sp, fp.bit() | lr.bit()); |
|||
int32_t sp_delta = (scope()->num_parameters() + 1) * kPointerSize; |
|||
__ add(sp, sp, Operand(sp_delta)); |
|||
__ Jump(lr); |
|||
} |
|||
|
|||
|
|||
#undef __ |
|||
|
|||
|
|||
} } // namespace v8::internal
|
|||
|
|||
#endif // V8_TARGET_ARCH_ARM
|
@ -1,746 +0,0 @@ |
|||
// Copyright 2010 the V8 project authors. All rights reserved.
|
|||
// Redistribution and use in source and binary forms, with or without
|
|||
// modification, are permitted provided that the following conditions are
|
|||
// met:
|
|||
//
|
|||
// * Redistributions of source code must retain the above copyright
|
|||
// notice, this list of conditions and the following disclaimer.
|
|||
// * Redistributions in binary form must reproduce the above
|
|||
// copyright notice, this list of conditions and the following
|
|||
// disclaimer in the documentation and/or other materials provided
|
|||
// with the distribution.
|
|||
// * Neither the name of Google Inc. nor the names of its
|
|||
// contributors may be used to endorse or promote products derived
|
|||
// from this software without specific prior written permission.
|
|||
//
|
|||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
|||
#include "v8.h" |
|||
|
|||
#include "codegen-inl.h" |
|||
#include "data-flow.h" |
|||
#include "fast-codegen.h" |
|||
#include "scopes.h" |
|||
|
|||
namespace v8 { |
|||
namespace internal { |
|||
|
|||
#define BAILOUT(reason) \ |
|||
do { \ |
|||
if (FLAG_trace_bailout) { \ |
|||
PrintF("%s\n", reason); \ |
|||
} \ |
|||
has_supported_syntax_ = false; \ |
|||
return; \ |
|||
} while (false) |
|||
|
|||
|
|||
#define CHECK_BAILOUT \ |
|||
do { \ |
|||
if (!has_supported_syntax_) return; \ |
|||
} while (false) |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::Check(CompilationInfo* info) { |
|||
info_ = info; |
|||
|
|||
// We do not specialize if we do not have a receiver or if it is not a
|
|||
// JS object with fast mode properties.
|
|||
if (!info->has_receiver()) BAILOUT("No receiver"); |
|||
if (!info->receiver()->IsJSObject()) BAILOUT("Receiver is not an object"); |
|||
Handle<JSObject> object = Handle<JSObject>::cast(info->receiver()); |
|||
if (!object->HasFastProperties()) BAILOUT("Receiver is in dictionary mode"); |
|||
|
|||
// We do not support stack or heap slots (both of which require
|
|||
// allocation).
|
|||
Scope* scope = info->scope(); |
|||
if (scope->num_stack_slots() > 0) { |
|||
BAILOUT("Function has stack-allocated locals"); |
|||
} |
|||
if (scope->num_heap_slots() > 0) { |
|||
BAILOUT("Function has context-allocated locals"); |
|||
} |
|||
|
|||
VisitDeclarations(scope->declarations()); |
|||
CHECK_BAILOUT; |
|||
|
|||
// We do not support empty function bodies.
|
|||
if (info->function()->body()->is_empty()) { |
|||
BAILOUT("Function has an empty body"); |
|||
} |
|||
VisitStatements(info->function()->body()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitDeclarations( |
|||
ZoneList<Declaration*>* decls) { |
|||
if (!decls->is_empty()) BAILOUT("Function has declarations"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitStatements(ZoneList<Statement*>* stmts) { |
|||
if (stmts->length() != 1) { |
|||
BAILOUT("Function body is not a singleton statement."); |
|||
} |
|||
Visit(stmts->at(0)); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitDeclaration(Declaration* decl) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitBlock(Block* stmt) { |
|||
VisitStatements(stmt->statements()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitExpressionStatement( |
|||
ExpressionStatement* stmt) { |
|||
Visit(stmt->expression()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitEmptyStatement(EmptyStatement* stmt) { |
|||
// Supported.
|
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitIfStatement(IfStatement* stmt) { |
|||
BAILOUT("IfStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitContinueStatement(ContinueStatement* stmt) { |
|||
BAILOUT("Continuestatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitBreakStatement(BreakStatement* stmt) { |
|||
BAILOUT("BreakStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitReturnStatement(ReturnStatement* stmt) { |
|||
BAILOUT("ReturnStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitWithEnterStatement( |
|||
WithEnterStatement* stmt) { |
|||
BAILOUT("WithEnterStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitWithExitStatement(WithExitStatement* stmt) { |
|||
BAILOUT("WithExitStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitSwitchStatement(SwitchStatement* stmt) { |
|||
BAILOUT("SwitchStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitDoWhileStatement(DoWhileStatement* stmt) { |
|||
BAILOUT("DoWhileStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitWhileStatement(WhileStatement* stmt) { |
|||
BAILOUT("WhileStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitForStatement(ForStatement* stmt) { |
|||
BAILOUT("ForStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitForInStatement(ForInStatement* stmt) { |
|||
BAILOUT("ForInStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitTryCatchStatement(TryCatchStatement* stmt) { |
|||
BAILOUT("TryCatchStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitTryFinallyStatement( |
|||
TryFinallyStatement* stmt) { |
|||
BAILOUT("TryFinallyStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitDebuggerStatement( |
|||
DebuggerStatement* stmt) { |
|||
BAILOUT("DebuggerStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitFunctionLiteral(FunctionLiteral* expr) { |
|||
BAILOUT("FunctionLiteral"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitSharedFunctionInfoLiteral( |
|||
SharedFunctionInfoLiteral* expr) { |
|||
BAILOUT("SharedFunctionInfoLiteral"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitConditional(Conditional* expr) { |
|||
BAILOUT("Conditional"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitSlot(Slot* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitVariableProxy(VariableProxy* expr) { |
|||
// Only global variable references are supported.
|
|||
Variable* var = expr->var(); |
|||
if (!var->is_global() || var->is_this()) BAILOUT("Non-global variable"); |
|||
|
|||
// Check if the global variable is existing and non-deletable.
|
|||
if (info()->has_global_object()) { |
|||
LookupResult lookup; |
|||
info()->global_object()->Lookup(*expr->name(), &lookup); |
|||
if (!lookup.IsProperty()) { |
|||
BAILOUT("Non-existing global variable"); |
|||
} |
|||
// We do not handle global variables with accessors or interceptors.
|
|||
if (lookup.type() != NORMAL) { |
|||
BAILOUT("Global variable with accessors or interceptors."); |
|||
} |
|||
// We do not handle deletable global variables.
|
|||
if (!lookup.IsDontDelete()) { |
|||
BAILOUT("Deletable global variable"); |
|||
} |
|||
} |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitLiteral(Literal* expr) { |
|||
BAILOUT("Literal"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitRegExpLiteral(RegExpLiteral* expr) { |
|||
BAILOUT("RegExpLiteral"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitObjectLiteral(ObjectLiteral* expr) { |
|||
BAILOUT("ObjectLiteral"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitArrayLiteral(ArrayLiteral* expr) { |
|||
BAILOUT("ArrayLiteral"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitCatchExtensionObject( |
|||
CatchExtensionObject* expr) { |
|||
BAILOUT("CatchExtensionObject"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitAssignment(Assignment* expr) { |
|||
// Simple assignments to (named) this properties are supported.
|
|||
if (expr->op() != Token::ASSIGN) BAILOUT("Non-simple assignment"); |
|||
|
|||
Property* prop = expr->target()->AsProperty(); |
|||
if (prop == NULL) BAILOUT("Non-property assignment"); |
|||
VariableProxy* proxy = prop->obj()->AsVariableProxy(); |
|||
if (proxy == NULL || !proxy->var()->is_this()) { |
|||
BAILOUT("Non-this-property assignment"); |
|||
} |
|||
if (!prop->key()->IsPropertyName()) { |
|||
BAILOUT("Non-named-property assignment"); |
|||
} |
|||
|
|||
// We will only specialize for fields on the object itself.
|
|||
// Expression::IsPropertyName implies that the name is a literal
|
|||
// symbol but we do not assume that.
|
|||
Literal* key = prop->key()->AsLiteral(); |
|||
if (key != NULL && key->handle()->IsString()) { |
|||
Handle<Object> receiver = info()->receiver(); |
|||
Handle<String> name = Handle<String>::cast(key->handle()); |
|||
LookupResult lookup; |
|||
receiver->Lookup(*name, &lookup); |
|||
if (!lookup.IsProperty()) { |
|||
BAILOUT("Assigned property not found at compile time"); |
|||
} |
|||
if (lookup.holder() != *receiver) BAILOUT("Non-own property assignment"); |
|||
if (!lookup.type() == FIELD) BAILOUT("Non-field property assignment"); |
|||
} else { |
|||
UNREACHABLE(); |
|||
BAILOUT("Unexpected non-string-literal property key"); |
|||
} |
|||
|
|||
Visit(expr->value()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitThrow(Throw* expr) { |
|||
BAILOUT("Throw"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitProperty(Property* expr) { |
|||
// We support named this property references.
|
|||
VariableProxy* proxy = expr->obj()->AsVariableProxy(); |
|||
if (proxy == NULL || !proxy->var()->is_this()) { |
|||
BAILOUT("Non-this-property reference"); |
|||
} |
|||
if (!expr->key()->IsPropertyName()) { |
|||
BAILOUT("Non-named-property reference"); |
|||
} |
|||
|
|||
// We will only specialize for fields on the object itself.
|
|||
// Expression::IsPropertyName implies that the name is a literal
|
|||
// symbol but we do not assume that.
|
|||
Literal* key = expr->key()->AsLiteral(); |
|||
if (key != NULL && key->handle()->IsString()) { |
|||
Handle<Object> receiver = info()->receiver(); |
|||
Handle<String> name = Handle<String>::cast(key->handle()); |
|||
LookupResult lookup; |
|||
receiver->Lookup(*name, &lookup); |
|||
if (!lookup.IsProperty()) { |
|||
BAILOUT("Referenced property not found at compile time"); |
|||
} |
|||
if (lookup.holder() != *receiver) BAILOUT("Non-own property reference"); |
|||
if (!lookup.type() == FIELD) BAILOUT("Non-field property reference"); |
|||
} else { |
|||
UNREACHABLE(); |
|||
BAILOUT("Unexpected non-string-literal property key"); |
|||
} |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitCall(Call* expr) { |
|||
BAILOUT("Call"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitCallNew(CallNew* expr) { |
|||
BAILOUT("CallNew"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitCallRuntime(CallRuntime* expr) { |
|||
BAILOUT("CallRuntime"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitUnaryOperation(UnaryOperation* expr) { |
|||
BAILOUT("UnaryOperation"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitCountOperation(CountOperation* expr) { |
|||
BAILOUT("CountOperation"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitBinaryOperation(BinaryOperation* expr) { |
|||
// We support bitwise OR.
|
|||
switch (expr->op()) { |
|||
case Token::COMMA: |
|||
BAILOUT("BinaryOperation COMMA"); |
|||
case Token::OR: |
|||
BAILOUT("BinaryOperation OR"); |
|||
case Token::AND: |
|||
BAILOUT("BinaryOperation AND"); |
|||
|
|||
case Token::BIT_OR: |
|||
// We support expressions nested on the left because they only require
|
|||
// a pair of registers to keep all intermediate values in registers
|
|||
// (i.e., the expression stack has height no more than two).
|
|||
if (!expr->right()->IsLeaf()) BAILOUT("expression nested on right"); |
|||
|
|||
// We do not allow subexpressions with side effects because we
|
|||
// (currently) bail out to the beginning of the full function. The
|
|||
// only expressions with side effects that we would otherwise handle
|
|||
// are assignments.
|
|||
if (expr->left()->AsAssignment() != NULL || |
|||
expr->right()->AsAssignment() != NULL) { |
|||
BAILOUT("subexpression of binary operation has side effects"); |
|||
} |
|||
|
|||
Visit(expr->left()); |
|||
CHECK_BAILOUT; |
|||
Visit(expr->right()); |
|||
break; |
|||
|
|||
case Token::BIT_XOR: |
|||
BAILOUT("BinaryOperation BIT_XOR"); |
|||
case Token::BIT_AND: |
|||
BAILOUT("BinaryOperation BIT_AND"); |
|||
case Token::SHL: |
|||
BAILOUT("BinaryOperation SHL"); |
|||
case Token::SAR: |
|||
BAILOUT("BinaryOperation SAR"); |
|||
case Token::SHR: |
|||
BAILOUT("BinaryOperation SHR"); |
|||
case Token::ADD: |
|||
BAILOUT("BinaryOperation ADD"); |
|||
case Token::SUB: |
|||
BAILOUT("BinaryOperation SUB"); |
|||
case Token::MUL: |
|||
BAILOUT("BinaryOperation MUL"); |
|||
case Token::DIV: |
|||
BAILOUT("BinaryOperation DIV"); |
|||
case Token::MOD: |
|||
BAILOUT("BinaryOperation MOD"); |
|||
default: |
|||
UNREACHABLE(); |
|||
} |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitCompareOperation(CompareOperation* expr) { |
|||
BAILOUT("CompareOperation"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitThisFunction(ThisFunction* expr) { |
|||
BAILOUT("ThisFunction"); |
|||
} |
|||
|
|||
#undef BAILOUT |
|||
#undef CHECK_BAILOUT |
|||
|
|||
|
|||
#define __ ACCESS_MASM(masm()) |
|||
|
|||
Handle<Code> FastCodeGenerator::MakeCode(CompilationInfo* info) { |
|||
// Label the AST before calling MakeCodePrologue, so AST node numbers are
|
|||
// printed with the AST.
|
|||
AstLabeler labeler; |
|||
labeler.Label(info); |
|||
|
|||
CodeGenerator::MakeCodePrologue(info); |
|||
|
|||
const int kInitialBufferSize = 4 * KB; |
|||
MacroAssembler masm(NULL, kInitialBufferSize); |
|||
|
|||
// Generate the fast-path code.
|
|||
FastCodeGenerator fast_cgen(&masm); |
|||
fast_cgen.Generate(info); |
|||
if (fast_cgen.HasStackOverflow()) { |
|||
ASSERT(!Top::has_pending_exception()); |
|||
return Handle<Code>::null(); |
|||
} |
|||
|
|||
// Generate the full code for the function in bailout mode, using the same
|
|||
// macro assembler.
|
|||
CodeGenerator cgen(&masm); |
|||
CodeGeneratorScope scope(&cgen); |
|||
info->set_mode(CompilationInfo::SECONDARY); |
|||
cgen.Generate(info); |
|||
if (cgen.HasStackOverflow()) { |
|||
ASSERT(!Top::has_pending_exception()); |
|||
return Handle<Code>::null(); |
|||
} |
|||
|
|||
Code::Flags flags = Code::ComputeFlags(Code::FUNCTION, NOT_IN_LOOP); |
|||
return CodeGenerator::MakeCodeEpilogue(&masm, flags, info); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitDeclaration(Declaration* decl) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitBlock(Block* stmt) { |
|||
VisitStatements(stmt->statements()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) { |
|||
Visit(stmt->expression()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) { |
|||
// Nothing to do.
|
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitIfStatement(IfStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitBreakStatement(BreakStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitWithEnterStatement(WithEnterStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitWithExitStatement(WithExitStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitWhileStatement(WhileStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitForStatement(ForStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitForInStatement(ForInStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitSharedFunctionInfoLiteral( |
|||
SharedFunctionInfoLiteral* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitConditional(Conditional* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitSlot(Slot* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) { |
|||
ASSERT(expr->var()->is_global() && !expr->var()->is_this()); |
|||
// Check if we can compile a global variable load directly from the cell.
|
|||
ASSERT(info()->has_global_object()); |
|||
LookupResult lookup; |
|||
info()->global_object()->Lookup(*expr->name(), &lookup); |
|||
// We only support normal (non-accessor/interceptor) DontDelete properties
|
|||
// for now.
|
|||
ASSERT(lookup.IsProperty()); |
|||
ASSERT_EQ(NORMAL, lookup.type()); |
|||
ASSERT(lookup.IsDontDelete()); |
|||
Handle<Object> cell(info()->global_object()->GetPropertyCell(&lookup)); |
|||
|
|||
// Global variable lookups do not have side effects, so we do not need to
|
|||
// emit code if we are in an effect context.
|
|||
if (!destination().is(no_reg)) { |
|||
Comment cmnt(masm(), ";; Global"); |
|||
if (FLAG_print_ir) { |
|||
SmartPointer<char> name = expr->name()->ToCString(); |
|||
PrintF("%d: t%d = Global(%s)\n", expr->num(), |
|||
expr->num(), *name); |
|||
} |
|||
EmitGlobalVariableLoad(cell); |
|||
} |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitLiteral(Literal* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitAssignment(Assignment* expr) { |
|||
// Known to be a simple this property assignment. Effectively a unary
|
|||
// operation.
|
|||
{ Register my_destination = destination(); |
|||
set_destination(accumulator0()); |
|||
Visit(expr->value()); |
|||
set_destination(my_destination); |
|||
} |
|||
|
|||
Property* prop = expr->target()->AsProperty(); |
|||
ASSERT_NOT_NULL(prop); |
|||
ASSERT_NOT_NULL(prop->obj()->AsVariableProxy()); |
|||
ASSERT(prop->obj()->AsVariableProxy()->var()->is_this()); |
|||
ASSERT(prop->key()->IsPropertyName()); |
|||
Handle<String> name = |
|||
Handle<String>::cast(prop->key()->AsLiteral()->handle()); |
|||
|
|||
Comment cmnt(masm(), ";; Store to this"); |
|||
if (FLAG_print_ir) { |
|||
SmartPointer<char> name_string = name->ToCString(); |
|||
PrintF("%d: ", expr->num()); |
|||
if (!destination().is(no_reg)) PrintF("t%d = ", expr->num()); |
|||
PrintF("Store(this, \"%s\", t%d)\n", *name_string, |
|||
expr->value()->num()); |
|||
} |
|||
|
|||
EmitThisPropertyStore(name); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitThrow(Throw* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitProperty(Property* expr) { |
|||
ASSERT_NOT_NULL(expr->obj()->AsVariableProxy()); |
|||
ASSERT(expr->obj()->AsVariableProxy()->var()->is_this()); |
|||
ASSERT(expr->key()->IsPropertyName()); |
|||
if (!destination().is(no_reg)) { |
|||
Handle<String> name = |
|||
Handle<String>::cast(expr->key()->AsLiteral()->handle()); |
|||
|
|||
Comment cmnt(masm(), ";; Load from this"); |
|||
if (FLAG_print_ir) { |
|||
SmartPointer<char> name_string = name->ToCString(); |
|||
PrintF("%d: t%d = Load(this, \"%s\")\n", |
|||
expr->num(), expr->num(), *name_string); |
|||
} |
|||
EmitThisPropertyLoad(name); |
|||
} |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitCall(Call* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitCallNew(CallNew* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitCallRuntime(CallRuntime* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitCountOperation(CountOperation* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { |
|||
// We support limited binary operations: bitwise OR only allowed to be
|
|||
// nested on the left.
|
|||
ASSERT(expr->op() == Token::BIT_OR); |
|||
ASSERT(expr->right()->IsLeaf()); |
|||
|
|||
{ Register my_destination = destination(); |
|||
set_destination(accumulator1()); |
|||
Visit(expr->left()); |
|||
set_destination(accumulator0()); |
|||
Visit(expr->right()); |
|||
set_destination(my_destination); |
|||
} |
|||
|
|||
Comment cmnt(masm(), ";; BIT_OR"); |
|||
if (FLAG_print_ir) { |
|||
PrintF("%d: ", expr->num()); |
|||
if (!destination().is(no_reg)) PrintF("t%d = ", expr->num()); |
|||
PrintF("BIT_OR(t%d, t%d)\n", expr->left()->num(), expr->right()->num()); |
|||
} |
|||
EmitBitOr(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitCompareOperation(CompareOperation* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitThisFunction(ThisFunction* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
#undef __ |
|||
|
|||
|
|||
} } // namespace v8::internal
|
@ -1,161 +0,0 @@ |
|||
// Copyright 2010 the V8 project authors. All rights reserved.
|
|||
// Redistribution and use in source and binary forms, with or without
|
|||
// modification, are permitted provided that the following conditions are
|
|||
// met:
|
|||
//
|
|||
// * Redistributions of source code must retain the above copyright
|
|||
// notice, this list of conditions and the following disclaimer.
|
|||
// * Redistributions in binary form must reproduce the above
|
|||
// copyright notice, this list of conditions and the following
|
|||
// disclaimer in the documentation and/or other materials provided
|
|||
// with the distribution.
|
|||
// * Neither the name of Google Inc. nor the names of its
|
|||
// contributors may be used to endorse or promote products derived
|
|||
// from this software without specific prior written permission.
|
|||
//
|
|||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
|||
#ifndef V8_FAST_CODEGEN_H_ |
|||
#define V8_FAST_CODEGEN_H_ |
|||
|
|||
#if V8_TARGET_ARCH_IA32 |
|||
#include "ia32/fast-codegen-ia32.h" |
|||
#else |
|||
|
|||
#include "v8.h" |
|||
|
|||
#include "ast.h" |
|||
#include "compiler.h" |
|||
#include "list.h" |
|||
|
|||
namespace v8 { |
|||
namespace internal { |
|||
|
|||
class FastCodeGenSyntaxChecker: public AstVisitor { |
|||
public: |
|||
explicit FastCodeGenSyntaxChecker() |
|||
: info_(NULL), has_supported_syntax_(true) { |
|||
} |
|||
|
|||
void Check(CompilationInfo* info); |
|||
|
|||
CompilationInfo* info() { return info_; } |
|||
bool has_supported_syntax() { return has_supported_syntax_; } |
|||
|
|||
private: |
|||
void VisitDeclarations(ZoneList<Declaration*>* decls); |
|||
void VisitStatements(ZoneList<Statement*>* stmts); |
|||
|
|||
// AST node visit functions.
|
|||
#define DECLARE_VISIT(type) virtual void Visit##type(type* node); |
|||
AST_NODE_LIST(DECLARE_VISIT) |
|||
#undef DECLARE_VISIT |
|||
|
|||
CompilationInfo* info_; |
|||
bool has_supported_syntax_; |
|||
|
|||
DISALLOW_COPY_AND_ASSIGN(FastCodeGenSyntaxChecker); |
|||
}; |
|||
|
|||
|
|||
class FastCodeGenerator: public AstVisitor { |
|||
public: |
|||
explicit FastCodeGenerator(MacroAssembler* masm) |
|||
: masm_(masm), info_(NULL), destination_(no_reg), smi_bits_(0) { |
|||
} |
|||
|
|||
static Handle<Code> MakeCode(CompilationInfo* info); |
|||
|
|||
void Generate(CompilationInfo* compilation_info); |
|||
|
|||
private: |
|||
MacroAssembler* masm() { return masm_; } |
|||
CompilationInfo* info() { return info_; } |
|||
|
|||
Register destination() { return destination_; } |
|||
void set_destination(Register reg) { destination_ = reg; } |
|||
|
|||
FunctionLiteral* function() { return info_->function(); } |
|||
Scope* scope() { return info_->scope(); } |
|||
|
|||
// Platform-specific fixed registers, all guaranteed distinct.
|
|||
Register accumulator0(); |
|||
Register accumulator1(); |
|||
Register scratch0(); |
|||
Register scratch1(); |
|||
Register scratch2(); |
|||
Register receiver_reg(); |
|||
Register context_reg(); |
|||
|
|||
Register other_accumulator(Register reg) { |
|||
ASSERT(reg.is(accumulator0()) || reg.is(accumulator1())); |
|||
return (reg.is(accumulator0())) ? accumulator1() : accumulator0(); |
|||
} |
|||
|
|||
// Flags are true if the respective register is statically known to hold a
|
|||
// smi. We do not track every register, only the accumulator registers.
|
|||
bool is_smi(Register reg) { |
|||
ASSERT(!reg.is(no_reg)); |
|||
return (smi_bits_ & reg.bit()) != 0; |
|||
} |
|||
void set_as_smi(Register reg) { |
|||
ASSERT(!reg.is(no_reg)); |
|||
smi_bits_ = smi_bits_ | reg.bit(); |
|||
} |
|||
void clear_as_smi(Register reg) { |
|||
ASSERT(!reg.is(no_reg)); |
|||
smi_bits_ = smi_bits_ & ~reg.bit(); |
|||
} |
|||
|
|||
// AST node visit functions.
|
|||
#define DECLARE_VISIT(type) virtual void Visit##type(type* node); |
|||
AST_NODE_LIST(DECLARE_VISIT) |
|||
#undef DECLARE_VISIT |
|||
|
|||
// Emit code to load the receiver from the stack into receiver_reg.
|
|||
void EmitLoadReceiver(); |
|||
|
|||
// Emit code to load a global variable directly from a global property
|
|||
// cell into the destination register.
|
|||
void EmitGlobalVariableLoad(Handle<Object> cell); |
|||
|
|||
// Emit a store to an own property of this. The stored value is expected
|
|||
// in accumulator0 and the receiver in receiver_reg. The receiver
|
|||
// register is preserved and the result (the stored value) is left in the
|
|||
// destination register.
|
|||
void EmitThisPropertyStore(Handle<String> name); |
|||
|
|||
// Emit a load from an own property of this. The receiver is expected in
|
|||
// receiver_reg. The receiver register is preserved and the result is
|
|||
// left in the destination register.
|
|||
void EmitThisPropertyLoad(Handle<String> name); |
|||
|
|||
// Emit a bitwise or operation. The left operand is in accumulator1 and
|
|||
// the right is in accumulator0. The result should be left in the
|
|||
// destination register.
|
|||
void EmitBitOr(); |
|||
|
|||
MacroAssembler* masm_; |
|||
CompilationInfo* info_; |
|||
Register destination_; |
|||
uint32_t smi_bits_; |
|||
|
|||
DISALLOW_COPY_AND_ASSIGN(FastCodeGenerator); |
|||
}; |
|||
|
|||
|
|||
} } // namespace v8::internal
|
|||
|
|||
#endif // V8_TARGET_ARCH_IA32
|
|||
|
|||
#endif // V8_FAST_CODEGEN_H_
|
@ -1,954 +0,0 @@ |
|||
// Copyright 2010 the V8 project authors. All rights reserved.
|
|||
// Redistribution and use in source and binary forms, with or without
|
|||
// modification, are permitted provided that the following conditions are
|
|||
// met:
|
|||
//
|
|||
// * Redistributions of source code must retain the above copyright
|
|||
// notice, this list of conditions and the following disclaimer.
|
|||
// * Redistributions in binary form must reproduce the above
|
|||
// copyright notice, this list of conditions and the following
|
|||
// disclaimer in the documentation and/or other materials provided
|
|||
// with the distribution.
|
|||
// * Neither the name of Google Inc. nor the names of its
|
|||
// contributors may be used to endorse or promote products derived
|
|||
// from this software without specific prior written permission.
|
|||
//
|
|||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
|||
#include "v8.h" |
|||
|
|||
#if defined(V8_TARGET_ARCH_IA32) |
|||
|
|||
#include "codegen-inl.h" |
|||
#include "fast-codegen.h" |
|||
#include "data-flow.h" |
|||
#include "scopes.h" |
|||
|
|||
namespace v8 { |
|||
namespace internal { |
|||
|
|||
#define BAILOUT(reason) \ |
|||
do { \ |
|||
if (FLAG_trace_bailout) { \ |
|||
PrintF("%s\n", reason); \ |
|||
} \ |
|||
has_supported_syntax_ = false; \ |
|||
return; \ |
|||
} while (false) |
|||
|
|||
|
|||
#define CHECK_BAILOUT \ |
|||
do { \ |
|||
if (!has_supported_syntax_) return; \ |
|||
} while (false) |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::Check(CompilationInfo* info) { |
|||
info_ = info; |
|||
|
|||
// We do not specialize if we do not have a receiver or if it is not a
|
|||
// JS object with fast mode properties.
|
|||
if (!info->has_receiver()) BAILOUT("No receiver"); |
|||
if (!info->receiver()->IsJSObject()) BAILOUT("Receiver is not an object"); |
|||
Handle<JSObject> object = Handle<JSObject>::cast(info->receiver()); |
|||
if (!object->HasFastProperties()) BAILOUT("Receiver is in dictionary mode"); |
|||
|
|||
// We do not support stack or heap slots (both of which require
|
|||
// allocation).
|
|||
Scope* scope = info->scope(); |
|||
if (scope->num_stack_slots() > 0) { |
|||
BAILOUT("Function has stack-allocated locals"); |
|||
} |
|||
if (scope->num_heap_slots() > 0) { |
|||
BAILOUT("Function has context-allocated locals"); |
|||
} |
|||
|
|||
VisitDeclarations(scope->declarations()); |
|||
CHECK_BAILOUT; |
|||
|
|||
// We do not support empty function bodies.
|
|||
if (info->function()->body()->is_empty()) { |
|||
BAILOUT("Function has an empty body"); |
|||
} |
|||
VisitStatements(info->function()->body()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitDeclarations( |
|||
ZoneList<Declaration*>* decls) { |
|||
if (!decls->is_empty()) BAILOUT("Function has declarations"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitStatements(ZoneList<Statement*>* stmts) { |
|||
if (stmts->length() != 1) { |
|||
BAILOUT("Function body is not a singleton statement."); |
|||
} |
|||
Visit(stmts->at(0)); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitDeclaration(Declaration* decl) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitBlock(Block* stmt) { |
|||
VisitStatements(stmt->statements()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitExpressionStatement( |
|||
ExpressionStatement* stmt) { |
|||
Visit(stmt->expression()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitEmptyStatement(EmptyStatement* stmt) { |
|||
// Supported.
|
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitIfStatement(IfStatement* stmt) { |
|||
BAILOUT("IfStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitContinueStatement(ContinueStatement* stmt) { |
|||
BAILOUT("Continuestatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitBreakStatement(BreakStatement* stmt) { |
|||
BAILOUT("BreakStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitReturnStatement(ReturnStatement* stmt) { |
|||
BAILOUT("ReturnStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitWithEnterStatement( |
|||
WithEnterStatement* stmt) { |
|||
BAILOUT("WithEnterStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitWithExitStatement(WithExitStatement* stmt) { |
|||
BAILOUT("WithExitStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitSwitchStatement(SwitchStatement* stmt) { |
|||
BAILOUT("SwitchStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitDoWhileStatement(DoWhileStatement* stmt) { |
|||
BAILOUT("DoWhileStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitWhileStatement(WhileStatement* stmt) { |
|||
BAILOUT("WhileStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitForStatement(ForStatement* stmt) { |
|||
BAILOUT("ForStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitForInStatement(ForInStatement* stmt) { |
|||
BAILOUT("ForInStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitTryCatchStatement(TryCatchStatement* stmt) { |
|||
BAILOUT("TryCatchStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitTryFinallyStatement( |
|||
TryFinallyStatement* stmt) { |
|||
BAILOUT("TryFinallyStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitDebuggerStatement( |
|||
DebuggerStatement* stmt) { |
|||
BAILOUT("DebuggerStatement"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitFunctionLiteral(FunctionLiteral* expr) { |
|||
BAILOUT("FunctionLiteral"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitSharedFunctionInfoLiteral( |
|||
SharedFunctionInfoLiteral* expr) { |
|||
BAILOUT("SharedFunctionInfoLiteral"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitConditional(Conditional* expr) { |
|||
BAILOUT("Conditional"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitSlot(Slot* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitVariableProxy(VariableProxy* expr) { |
|||
// Only global variable references are supported.
|
|||
Variable* var = expr->var(); |
|||
if (!var->is_global() || var->is_this()) BAILOUT("Non-global variable"); |
|||
|
|||
// Check if the global variable is existing and non-deletable.
|
|||
if (info()->has_global_object()) { |
|||
LookupResult lookup; |
|||
info()->global_object()->Lookup(*expr->name(), &lookup); |
|||
if (!lookup.IsProperty()) { |
|||
BAILOUT("Non-existing global variable"); |
|||
} |
|||
// We do not handle global variables with accessors or interceptors.
|
|||
if (lookup.type() != NORMAL) { |
|||
BAILOUT("Global variable with accessors or interceptors."); |
|||
} |
|||
// We do not handle deletable global variables.
|
|||
if (!lookup.IsDontDelete()) { |
|||
BAILOUT("Deletable global variable"); |
|||
} |
|||
} |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitLiteral(Literal* expr) { |
|||
BAILOUT("Literal"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitRegExpLiteral(RegExpLiteral* expr) { |
|||
BAILOUT("RegExpLiteral"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitObjectLiteral(ObjectLiteral* expr) { |
|||
BAILOUT("ObjectLiteral"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitArrayLiteral(ArrayLiteral* expr) { |
|||
BAILOUT("ArrayLiteral"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitCatchExtensionObject( |
|||
CatchExtensionObject* expr) { |
|||
BAILOUT("CatchExtensionObject"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitAssignment(Assignment* expr) { |
|||
// Simple assignments to (named) this properties are supported.
|
|||
if (expr->op() != Token::ASSIGN) BAILOUT("Non-simple assignment"); |
|||
|
|||
Property* prop = expr->target()->AsProperty(); |
|||
if (prop == NULL) BAILOUT("Non-property assignment"); |
|||
VariableProxy* proxy = prop->obj()->AsVariableProxy(); |
|||
if (proxy == NULL || !proxy->var()->is_this()) { |
|||
BAILOUT("Non-this-property assignment"); |
|||
} |
|||
if (!prop->key()->IsPropertyName()) { |
|||
BAILOUT("Non-named-property assignment"); |
|||
} |
|||
|
|||
// We will only specialize for fields on the object itself.
|
|||
// Expression::IsPropertyName implies that the name is a literal
|
|||
// symbol but we do not assume that.
|
|||
Literal* key = prop->key()->AsLiteral(); |
|||
if (key != NULL && key->handle()->IsString()) { |
|||
Handle<Object> receiver = info()->receiver(); |
|||
Handle<String> name = Handle<String>::cast(key->handle()); |
|||
LookupResult lookup; |
|||
receiver->Lookup(*name, &lookup); |
|||
if (!lookup.IsProperty()) { |
|||
BAILOUT("Assigned property not found at compile time"); |
|||
} |
|||
if (lookup.holder() != *receiver) BAILOUT("Non-own property assignment"); |
|||
if (!lookup.type() == FIELD) BAILOUT("Non-field property assignment"); |
|||
} else { |
|||
UNREACHABLE(); |
|||
BAILOUT("Unexpected non-string-literal property key"); |
|||
} |
|||
|
|||
Visit(expr->value()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitThrow(Throw* expr) { |
|||
BAILOUT("Throw"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitProperty(Property* expr) { |
|||
// We support named this property references.
|
|||
VariableProxy* proxy = expr->obj()->AsVariableProxy(); |
|||
if (proxy == NULL || !proxy->var()->is_this()) { |
|||
BAILOUT("Non-this-property reference"); |
|||
} |
|||
if (!expr->key()->IsPropertyName()) { |
|||
BAILOUT("Non-named-property reference"); |
|||
} |
|||
|
|||
// We will only specialize for fields on the object itself.
|
|||
// Expression::IsPropertyName implies that the name is a literal
|
|||
// symbol but we do not assume that.
|
|||
Literal* key = expr->key()->AsLiteral(); |
|||
if (key != NULL && key->handle()->IsString()) { |
|||
Handle<Object> receiver = info()->receiver(); |
|||
Handle<String> name = Handle<String>::cast(key->handle()); |
|||
LookupResult lookup; |
|||
receiver->Lookup(*name, &lookup); |
|||
if (!lookup.IsProperty()) { |
|||
BAILOUT("Referenced property not found at compile time"); |
|||
} |
|||
if (lookup.holder() != *receiver) BAILOUT("Non-own property reference"); |
|||
if (!lookup.type() == FIELD) BAILOUT("Non-field property reference"); |
|||
} else { |
|||
UNREACHABLE(); |
|||
BAILOUT("Unexpected non-string-literal property key"); |
|||
} |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitCall(Call* expr) { |
|||
BAILOUT("Call"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitCallNew(CallNew* expr) { |
|||
BAILOUT("CallNew"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitCallRuntime(CallRuntime* expr) { |
|||
BAILOUT("CallRuntime"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitUnaryOperation(UnaryOperation* expr) { |
|||
BAILOUT("UnaryOperation"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitCountOperation(CountOperation* expr) { |
|||
BAILOUT("CountOperation"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitBinaryOperation(BinaryOperation* expr) { |
|||
// We support bitwise OR.
|
|||
switch (expr->op()) { |
|||
case Token::COMMA: |
|||
BAILOUT("BinaryOperation COMMA"); |
|||
case Token::OR: |
|||
BAILOUT("BinaryOperation OR"); |
|||
case Token::AND: |
|||
BAILOUT("BinaryOperation AND"); |
|||
|
|||
case Token::BIT_OR: |
|||
// We support expressions nested on the left because they only require
|
|||
// a pair of registers to keep all intermediate values in registers
|
|||
// (i.e., the expression stack has height no more than two).
|
|||
if (!expr->right()->IsLeaf()) BAILOUT("expression nested on right"); |
|||
|
|||
// We do not allow subexpressions with side effects because we
|
|||
// (currently) bail out to the beginning of the full function. The
|
|||
// only expressions with side effects that we would otherwise handle
|
|||
// are assignments.
|
|||
if (expr->left()->AsAssignment() != NULL || |
|||
expr->right()->AsAssignment() != NULL) { |
|||
BAILOUT("subexpression of binary operation has side effects"); |
|||
} |
|||
|
|||
Visit(expr->left()); |
|||
CHECK_BAILOUT; |
|||
Visit(expr->right()); |
|||
break; |
|||
|
|||
case Token::BIT_XOR: |
|||
BAILOUT("BinaryOperation BIT_XOR"); |
|||
case Token::BIT_AND: |
|||
BAILOUT("BinaryOperation BIT_AND"); |
|||
case Token::SHL: |
|||
BAILOUT("BinaryOperation SHL"); |
|||
case Token::SAR: |
|||
BAILOUT("BinaryOperation SAR"); |
|||
case Token::SHR: |
|||
BAILOUT("BinaryOperation SHR"); |
|||
case Token::ADD: |
|||
BAILOUT("BinaryOperation ADD"); |
|||
case Token::SUB: |
|||
BAILOUT("BinaryOperation SUB"); |
|||
case Token::MUL: |
|||
BAILOUT("BinaryOperation MUL"); |
|||
case Token::DIV: |
|||
BAILOUT("BinaryOperation DIV"); |
|||
case Token::MOD: |
|||
BAILOUT("BinaryOperation MOD"); |
|||
default: |
|||
UNREACHABLE(); |
|||
} |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitCompareOperation(CompareOperation* expr) { |
|||
BAILOUT("CompareOperation"); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenSyntaxChecker::VisitThisFunction(ThisFunction* expr) { |
|||
BAILOUT("ThisFunction"); |
|||
} |
|||
|
|||
#undef BAILOUT |
|||
#undef CHECK_BAILOUT |
|||
|
|||
|
|||
#define __ ACCESS_MASM(masm()) |
|||
|
|||
Handle<Code> FastCodeGenerator::MakeCode(CompilationInfo* info) { |
|||
// Label the AST before calling MakeCodePrologue, so AST node numbers are
|
|||
// printed with the AST.
|
|||
AstLabeler labeler; |
|||
labeler.Label(info); |
|||
|
|||
CodeGenerator::MakeCodePrologue(info); |
|||
|
|||
const int kInitialBufferSize = 4 * KB; |
|||
MacroAssembler masm(NULL, kInitialBufferSize); |
|||
|
|||
// Generate the fast-path code.
|
|||
FastCodeGenerator fast_cgen(&masm); |
|||
fast_cgen.Generate(info); |
|||
if (fast_cgen.HasStackOverflow()) { |
|||
ASSERT(!Top::has_pending_exception()); |
|||
return Handle<Code>::null(); |
|||
} |
|||
|
|||
// Generate the full code for the function in bailout mode, using the same
|
|||
// macro assembler.
|
|||
CodeGenerator cgen(&masm); |
|||
CodeGeneratorScope scope(&cgen); |
|||
info->set_mode(CompilationInfo::SECONDARY); |
|||
cgen.Generate(info); |
|||
if (cgen.HasStackOverflow()) { |
|||
ASSERT(!Top::has_pending_exception()); |
|||
return Handle<Code>::null(); |
|||
} |
|||
|
|||
Code::Flags flags = Code::ComputeFlags(Code::FUNCTION, NOT_IN_LOOP); |
|||
return CodeGenerator::MakeCodeEpilogue(&masm, flags, info); |
|||
} |
|||
|
|||
|
|||
Register FastCodeGenerator::accumulator0() { return eax; } |
|||
Register FastCodeGenerator::accumulator1() { return edx; } |
|||
Register FastCodeGenerator::scratch0() { return ecx; } |
|||
Register FastCodeGenerator::scratch1() { return edi; } |
|||
Register FastCodeGenerator::receiver_reg() { return ebx; } |
|||
Register FastCodeGenerator::context_reg() { return esi; } |
|||
|
|||
|
|||
void FastCodeGenerator::EmitLoadReceiver() { |
|||
// Offset 2 is due to return address and saved frame pointer.
|
|||
int index = 2 + function()->scope()->num_parameters(); |
|||
__ mov(receiver_reg(), Operand(ebp, index * kPointerSize)); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::EmitGlobalVariableLoad(Handle<Object> cell) { |
|||
ASSERT(!destination().is(no_reg)); |
|||
ASSERT(cell->IsJSGlobalPropertyCell()); |
|||
|
|||
__ mov(destination(), Immediate(cell)); |
|||
__ mov(destination(), |
|||
FieldOperand(destination(), JSGlobalPropertyCell::kValueOffset)); |
|||
if (FLAG_debug_code) { |
|||
__ cmp(destination(), Factory::the_hole_value()); |
|||
__ Check(not_equal, "DontDelete cells can't contain the hole"); |
|||
} |
|||
|
|||
// The loaded value is not known to be a smi.
|
|||
clear_as_smi(destination()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::EmitThisPropertyStore(Handle<String> name) { |
|||
LookupResult lookup; |
|||
info()->receiver()->Lookup(*name, &lookup); |
|||
|
|||
ASSERT(lookup.holder() == *info()->receiver()); |
|||
ASSERT(lookup.type() == FIELD); |
|||
Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); |
|||
int index = lookup.GetFieldIndex() - map->inobject_properties(); |
|||
int offset = index * kPointerSize; |
|||
|
|||
// We will emit the write barrier unless the stored value is statically
|
|||
// known to be a smi.
|
|||
bool needs_write_barrier = !is_smi(accumulator0()); |
|||
|
|||
// Perform the store. Negative offsets are inobject properties.
|
|||
if (offset < 0) { |
|||
offset += map->instance_size(); |
|||
__ mov(FieldOperand(receiver_reg(), offset), accumulator0()); |
|||
if (needs_write_barrier) { |
|||
// Preserve receiver from write barrier.
|
|||
__ mov(scratch0(), receiver_reg()); |
|||
} |
|||
} else { |
|||
offset += FixedArray::kHeaderSize; |
|||
__ mov(scratch0(), |
|||
FieldOperand(receiver_reg(), JSObject::kPropertiesOffset)); |
|||
__ mov(FieldOperand(scratch0(), offset), accumulator0()); |
|||
} |
|||
|
|||
if (needs_write_barrier) { |
|||
if (destination().is(no_reg)) { |
|||
// After RecordWrite accumulator0 is only accidently a smi, but it is
|
|||
// already marked as not known to be one.
|
|||
__ RecordWrite(scratch0(), offset, accumulator0(), scratch1()); |
|||
} else { |
|||
// Copy the value to the other accumulator to preserve a copy from the
|
|||
// write barrier. One of the accumulators is available as a scratch
|
|||
// register. Neither is a smi.
|
|||
__ mov(accumulator1(), accumulator0()); |
|||
clear_as_smi(accumulator1()); |
|||
Register value_scratch = other_accumulator(destination()); |
|||
__ RecordWrite(scratch0(), offset, value_scratch, scratch1()); |
|||
} |
|||
} else if (destination().is(accumulator1())) { |
|||
__ mov(accumulator1(), accumulator0()); |
|||
// Is a smi because we do not need the write barrier.
|
|||
set_as_smi(accumulator1()); |
|||
} |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::EmitThisPropertyLoad(Handle<String> name) { |
|||
ASSERT(!destination().is(no_reg)); |
|||
LookupResult lookup; |
|||
info()->receiver()->Lookup(*name, &lookup); |
|||
|
|||
ASSERT(lookup.holder() == *info()->receiver()); |
|||
ASSERT(lookup.type() == FIELD); |
|||
Handle<Map> map(Handle<HeapObject>::cast(info()->receiver())->map()); |
|||
int index = lookup.GetFieldIndex() - map->inobject_properties(); |
|||
int offset = index * kPointerSize; |
|||
|
|||
// Perform the load. Negative offsets are inobject properties.
|
|||
if (offset < 0) { |
|||
offset += map->instance_size(); |
|||
__ mov(destination(), FieldOperand(receiver_reg(), offset)); |
|||
} else { |
|||
offset += FixedArray::kHeaderSize; |
|||
__ mov(scratch0(), |
|||
FieldOperand(receiver_reg(), JSObject::kPropertiesOffset)); |
|||
__ mov(destination(), FieldOperand(scratch0(), offset)); |
|||
} |
|||
|
|||
// The loaded value is not known to be a smi.
|
|||
clear_as_smi(destination()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::EmitBitOr() { |
|||
if (is_smi(accumulator0()) && is_smi(accumulator1())) { |
|||
// If both operands are known to be a smi then there is no need to check
|
|||
// the operands or result. There is no need to perform the operation in
|
|||
// an effect context.
|
|||
if (!destination().is(no_reg)) { |
|||
// Leave the result in the destination register. Bitwise or is
|
|||
// commutative.
|
|||
__ or_(destination(), Operand(other_accumulator(destination()))); |
|||
} |
|||
} else { |
|||
// Left is in accumulator1, right in accumulator0.
|
|||
Label* bailout = NULL; |
|||
if (destination().is(accumulator0())) { |
|||
__ mov(scratch0(), accumulator0()); |
|||
__ or_(destination(), Operand(accumulator1())); // Or is commutative.
|
|||
__ test(destination(), Immediate(kSmiTagMask)); |
|||
bailout = info()->AddBailout(accumulator1(), scratch0()); // Left, right.
|
|||
} else if (destination().is(accumulator1())) { |
|||
__ mov(scratch0(), accumulator1()); |
|||
__ or_(destination(), Operand(accumulator0())); |
|||
__ test(destination(), Immediate(kSmiTagMask)); |
|||
bailout = info()->AddBailout(scratch0(), accumulator0()); |
|||
} else { |
|||
ASSERT(destination().is(no_reg)); |
|||
__ mov(scratch0(), accumulator1()); |
|||
__ or_(scratch0(), Operand(accumulator0())); |
|||
__ test(scratch0(), Immediate(kSmiTagMask)); |
|||
bailout = info()->AddBailout(accumulator1(), accumulator0()); |
|||
} |
|||
__ j(not_zero, bailout, not_taken); |
|||
} |
|||
|
|||
// If we didn't bailout, the result (in fact, both inputs too) is known to
|
|||
// be a smi.
|
|||
set_as_smi(accumulator0()); |
|||
set_as_smi(accumulator1()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::Generate(CompilationInfo* compilation_info) { |
|||
ASSERT(info_ == NULL); |
|||
info_ = compilation_info; |
|||
Comment cmnt(masm_, "[ function compiled by fast code generator"); |
|||
|
|||
// Save the caller's frame pointer and set up our own.
|
|||
Comment prologue_cmnt(masm(), ";; Prologue"); |
|||
__ push(ebp); |
|||
__ mov(ebp, esp); |
|||
__ push(esi); // Context.
|
|||
__ push(edi); // Closure.
|
|||
// Note that we keep a live register reference to esi (context) at this
|
|||
// point.
|
|||
|
|||
Label* bailout_to_beginning = info()->AddBailout(); |
|||
// Receiver (this) is allocated to a fixed register.
|
|||
if (info()->has_this_properties()) { |
|||
Comment cmnt(masm(), ";; MapCheck(this)"); |
|||
if (FLAG_print_ir) { |
|||
PrintF("#: MapCheck(this)\n"); |
|||
} |
|||
ASSERT(info()->has_receiver() && info()->receiver()->IsHeapObject()); |
|||
Handle<HeapObject> object = Handle<HeapObject>::cast(info()->receiver()); |
|||
Handle<Map> map(object->map()); |
|||
EmitLoadReceiver(); |
|||
__ CheckMap(receiver_reg(), map, bailout_to_beginning, false); |
|||
} |
|||
|
|||
// If there is a global variable access check if the global object is the
|
|||
// same as at lazy-compilation time.
|
|||
if (info()->has_globals()) { |
|||
Comment cmnt(masm(), ";; MapCheck(GLOBAL)"); |
|||
if (FLAG_print_ir) { |
|||
PrintF("#: MapCheck(GLOBAL)\n"); |
|||
} |
|||
ASSERT(info()->has_global_object()); |
|||
Handle<Map> map(info()->global_object()->map()); |
|||
__ mov(scratch0(), CodeGenerator::GlobalObject()); |
|||
__ CheckMap(scratch0(), map, bailout_to_beginning, true); |
|||
} |
|||
|
|||
VisitStatements(function()->body()); |
|||
|
|||
Comment return_cmnt(masm(), ";; Return(<undefined>)"); |
|||
if (FLAG_print_ir) { |
|||
PrintF("#: Return(<undefined>)\n"); |
|||
} |
|||
__ mov(eax, Factory::undefined_value()); |
|||
__ mov(esp, ebp); |
|||
__ pop(ebp); |
|||
__ ret((scope()->num_parameters() + 1) * kPointerSize); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitDeclaration(Declaration* decl) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitBlock(Block* stmt) { |
|||
VisitStatements(stmt->statements()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitExpressionStatement(ExpressionStatement* stmt) { |
|||
Visit(stmt->expression()); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitEmptyStatement(EmptyStatement* stmt) { |
|||
// Nothing to do.
|
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitIfStatement(IfStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitContinueStatement(ContinueStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitBreakStatement(BreakStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitWithEnterStatement(WithEnterStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitWithExitStatement(WithExitStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitDoWhileStatement(DoWhileStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitWhileStatement(WhileStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitForStatement(ForStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitForInStatement(ForInStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitTryCatchStatement(TryCatchStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitTryFinallyStatement(TryFinallyStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitDebuggerStatement(DebuggerStatement* stmt) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitSharedFunctionInfoLiteral( |
|||
SharedFunctionInfoLiteral* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitConditional(Conditional* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitSlot(Slot* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) { |
|||
ASSERT(expr->var()->is_global() && !expr->var()->is_this()); |
|||
// Check if we can compile a global variable load directly from the cell.
|
|||
ASSERT(info()->has_global_object()); |
|||
LookupResult lookup; |
|||
info()->global_object()->Lookup(*expr->name(), &lookup); |
|||
// We only support normal (non-accessor/interceptor) DontDelete properties
|
|||
// for now.
|
|||
ASSERT(lookup.IsProperty()); |
|||
ASSERT_EQ(NORMAL, lookup.type()); |
|||
ASSERT(lookup.IsDontDelete()); |
|||
Handle<Object> cell(info()->global_object()->GetPropertyCell(&lookup)); |
|||
|
|||
// Global variable lookups do not have side effects, so we do not need to
|
|||
// emit code if we are in an effect context.
|
|||
if (!destination().is(no_reg)) { |
|||
Comment cmnt(masm(), ";; Global"); |
|||
if (FLAG_print_ir) { |
|||
SmartPointer<char> name = expr->name()->ToCString(); |
|||
PrintF("%d: t%d = Global(%s)\n", expr->num(), |
|||
expr->num(), *name); |
|||
} |
|||
EmitGlobalVariableLoad(cell); |
|||
} |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitLiteral(Literal* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitCatchExtensionObject(CatchExtensionObject* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitAssignment(Assignment* expr) { |
|||
// Known to be a simple this property assignment. Effectively a unary
|
|||
// operation.
|
|||
{ Register my_destination = destination(); |
|||
set_destination(accumulator0()); |
|||
Visit(expr->value()); |
|||
set_destination(my_destination); |
|||
} |
|||
|
|||
Property* prop = expr->target()->AsProperty(); |
|||
ASSERT_NOT_NULL(prop); |
|||
ASSERT_NOT_NULL(prop->obj()->AsVariableProxy()); |
|||
ASSERT(prop->obj()->AsVariableProxy()->var()->is_this()); |
|||
ASSERT(prop->key()->IsPropertyName()); |
|||
Handle<String> name = |
|||
Handle<String>::cast(prop->key()->AsLiteral()->handle()); |
|||
|
|||
Comment cmnt(masm(), ";; Store to this"); |
|||
if (FLAG_print_ir) { |
|||
SmartPointer<char> name_string = name->ToCString(); |
|||
PrintF("%d: ", expr->num()); |
|||
if (!destination().is(no_reg)) PrintF("t%d = ", expr->num()); |
|||
PrintF("Store(this, \"%s\", t%d)\n", *name_string, |
|||
expr->value()->num()); |
|||
} |
|||
|
|||
EmitThisPropertyStore(name); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitThrow(Throw* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitProperty(Property* expr) { |
|||
ASSERT_NOT_NULL(expr->obj()->AsVariableProxy()); |
|||
ASSERT(expr->obj()->AsVariableProxy()->var()->is_this()); |
|||
ASSERT(expr->key()->IsPropertyName()); |
|||
if (!destination().is(no_reg)) { |
|||
Handle<String> name = |
|||
Handle<String>::cast(expr->key()->AsLiteral()->handle()); |
|||
|
|||
Comment cmnt(masm(), ";; Load from this"); |
|||
if (FLAG_print_ir) { |
|||
SmartPointer<char> name_string = name->ToCString(); |
|||
PrintF("%d: t%d = Load(this, \"%s\")\n", |
|||
expr->num(), expr->num(), *name_string); |
|||
} |
|||
EmitThisPropertyLoad(name); |
|||
} |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitCall(Call* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitCallNew(CallNew* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitCallRuntime(CallRuntime* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitCountOperation(CountOperation* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { |
|||
// We support limited binary operations: bitwise OR only allowed to be
|
|||
// nested on the left.
|
|||
ASSERT(expr->op() == Token::BIT_OR); |
|||
ASSERT(expr->right()->IsLeaf()); |
|||
|
|||
{ Register my_destination = destination(); |
|||
set_destination(accumulator1()); |
|||
Visit(expr->left()); |
|||
set_destination(accumulator0()); |
|||
Visit(expr->right()); |
|||
set_destination(my_destination); |
|||
} |
|||
|
|||
Comment cmnt(masm(), ";; BIT_OR"); |
|||
if (FLAG_print_ir) { |
|||
PrintF("%d: ", expr->num()); |
|||
if (!destination().is(no_reg)) PrintF("t%d = ", expr->num()); |
|||
PrintF("BIT_OR(t%d, t%d)\n", expr->left()->num(), expr->right()->num()); |
|||
} |
|||
EmitBitOr(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitCompareOperation(CompareOperation* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
|
|||
void FastCodeGenerator::VisitThisFunction(ThisFunction* expr) { |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
#undef __ |
|||
|
|||
|
|||
} } // namespace v8::internal
|
|||
|
|||
#endif // V8_TARGET_ARCH_IA32
|
@ -1,155 +0,0 @@ |
|||
// Copyright 2010 the V8 project authors. All rights reserved.
|
|||
// Redistribution and use in source and binary forms, with or without
|
|||
// modification, are permitted provided that the following conditions are
|
|||
// met:
|
|||
//
|
|||
// * Redistributions of source code must retain the above copyright
|
|||
// notice, this list of conditions and the following disclaimer.
|
|||
// * Redistributions in binary form must reproduce the above
|
|||
// copyright notice, this list of conditions and the following
|
|||
// disclaimer in the documentation and/or other materials provided
|
|||
// with the distribution.
|
|||
// * Neither the name of Google Inc. nor the names of its
|
|||
// contributors may be used to endorse or promote products derived
|
|||
// from this software without specific prior written permission.
|
|||
//
|
|||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
|||
#ifndef V8_FAST_CODEGEN_IA32_H_ |
|||
#define V8_FAST_CODEGEN_IA32_H_ |
|||
|
|||
#include "v8.h" |
|||
|
|||
#include "ast.h" |
|||
#include "compiler.h" |
|||
#include "list.h" |
|||
|
|||
namespace v8 { |
|||
namespace internal { |
|||
|
|||
class FastCodeGenSyntaxChecker: public AstVisitor { |
|||
public: |
|||
explicit FastCodeGenSyntaxChecker() |
|||
: info_(NULL), has_supported_syntax_(true) { |
|||
} |
|||
|
|||
void Check(CompilationInfo* info); |
|||
|
|||
CompilationInfo* info() { return info_; } |
|||
bool has_supported_syntax() { return has_supported_syntax_; } |
|||
|
|||
private: |
|||
void VisitDeclarations(ZoneList<Declaration*>* decls); |
|||
void VisitStatements(ZoneList<Statement*>* stmts); |
|||
|
|||
// AST node visit functions.
|
|||
#define DECLARE_VISIT(type) virtual void Visit##type(type* node); |
|||
AST_NODE_LIST(DECLARE_VISIT) |
|||
#undef DECLARE_VISIT |
|||
|
|||
CompilationInfo* info_; |
|||
bool has_supported_syntax_; |
|||
|
|||
DISALLOW_COPY_AND_ASSIGN(FastCodeGenSyntaxChecker); |
|||
}; |
|||
|
|||
|
|||
class FastCodeGenerator: public AstVisitor { |
|||
public: |
|||
explicit FastCodeGenerator(MacroAssembler* masm) |
|||
: masm_(masm), info_(NULL), destination_(no_reg), smi_bits_(0) { |
|||
} |
|||
|
|||
static Handle<Code> MakeCode(CompilationInfo* info); |
|||
|
|||
void Generate(CompilationInfo* compilation_info); |
|||
|
|||
private: |
|||
MacroAssembler* masm() { return masm_; } |
|||
CompilationInfo* info() { return info_; } |
|||
|
|||
Register destination() { return destination_; } |
|||
void set_destination(Register reg) { destination_ = reg; } |
|||
|
|||
FunctionLiteral* function() { return info_->function(); } |
|||
Scope* scope() { return info_->scope(); } |
|||
|
|||
// Platform-specific fixed registers, all guaranteed distinct.
|
|||
Register accumulator0(); |
|||
Register accumulator1(); |
|||
Register scratch0(); |
|||
Register scratch1(); |
|||
Register receiver_reg(); |
|||
Register context_reg(); |
|||
|
|||
Register other_accumulator(Register reg) { |
|||
ASSERT(reg.is(accumulator0()) || reg.is(accumulator1())); |
|||
return (reg.is(accumulator0())) ? accumulator1() : accumulator0(); |
|||
} |
|||
|
|||
// Flags are true if the respective register is statically known to hold a
|
|||
// smi. We do not track every register, only the accumulator registers.
|
|||
bool is_smi(Register reg) { |
|||
ASSERT(!reg.is(no_reg)); |
|||
return (smi_bits_ & reg.bit()) != 0; |
|||
} |
|||
void set_as_smi(Register reg) { |
|||
ASSERT(!reg.is(no_reg)); |
|||
smi_bits_ = smi_bits_ | reg.bit(); |
|||
} |
|||
void clear_as_smi(Register reg) { |
|||
ASSERT(!reg.is(no_reg)); |
|||
smi_bits_ = smi_bits_ & ~reg.bit(); |
|||
} |
|||
|
|||
// AST node visit functions.
|
|||
#define DECLARE_VISIT(type) virtual void Visit##type(type* node); |
|||
AST_NODE_LIST(DECLARE_VISIT) |
|||
#undef DECLARE_VISIT |
|||
|
|||
// Emit code to load the receiver from the stack into receiver_reg.
|
|||
void EmitLoadReceiver(); |
|||
|
|||
// Emit code to load a global variable directly from a global property
|
|||
// cell into the destination register.
|
|||
void EmitGlobalVariableLoad(Handle<Object> cell); |
|||
|
|||
// Emit a store to an own property of this. The stored value is expected
|
|||
// in accumulator0 and the receiver in receiver_reg. The receiver
|
|||
// register is preserved and the result (the stored value) is left in the
|
|||
// destination register.
|
|||
void EmitThisPropertyStore(Handle<String> name); |
|||
|
|||
// Emit a load from an own property of this. The receiver is expected in
|
|||
// receiver_reg. The receiver register is preserved and the result is
|
|||
// left in the destination register.
|
|||
void EmitThisPropertyLoad(Handle<String> name); |
|||
|
|||
// Emit a bitwise or operation. The left operand is in accumulator1 and
|
|||
// the right is in accumulator0. The result should be left in the
|
|||
// destination register.
|
|||
void EmitBitOr(); |
|||
|
|||
MacroAssembler* masm_; |
|||
CompilationInfo* info_; |
|||
|
|||
Register destination_; |
|||
uint32_t smi_bits_; |
|||
|
|||
DISALLOW_COPY_AND_ASSIGN(FastCodeGenerator); |
|||
}; |
|||
|
|||
|
|||
} } // namespace v8::internal
|
|||
|
|||
#endif // V8_FAST_CODEGEN_IA32_H_
|
@ -0,0 +1,139 @@ |
|||
// Copyright 2009 the V8 project authors. All rights reserved.
|
|||
// Redistribution and use in source and binary forms, with or without
|
|||
// modification, are permitted provided that the following conditions are
|
|||
// met:
|
|||
//
|
|||
// * Redistributions of source code must retain the above copyright
|
|||
// notice, this list of conditions and the following disclaimer.
|
|||
// * Redistributions in binary form must reproduce the above
|
|||
// copyright notice, this list of conditions and the following
|
|||
// disclaimer in the documentation and/or other materials provided
|
|||
// with the distribution.
|
|||
// * Neither the name of Google Inc. nor the names of its
|
|||
// contributors may be used to endorse or promote products derived
|
|||
// from this software without specific prior written permission.
|
|||
//
|
|||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
|||
#include "v8.h" |
|||
|
|||
#include "ic-inl.h" |
|||
#include "objects-visiting.h" |
|||
|
|||
namespace v8 { |
|||
namespace internal { |
|||
|
|||
|
|||
static inline bool IsShortcutCandidate(int type) { |
|||
return ((type & kShortcutTypeMask) == kShortcutTypeTag); |
|||
} |
|||
|
|||
|
|||
StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId( |
|||
int instance_type, |
|||
int instance_size) { |
|||
if (instance_type < FIRST_NONSTRING_TYPE) { |
|||
switch (instance_type & kStringRepresentationMask) { |
|||
case kSeqStringTag: |
|||
if ((instance_type & kStringEncodingMask) == kAsciiStringTag) { |
|||
return kVisitSeqAsciiString; |
|||
} else { |
|||
return kVisitSeqTwoByteString; |
|||
} |
|||
|
|||
case kConsStringTag: |
|||
if (IsShortcutCandidate(instance_type)) { |
|||
return kVisitShortcutCandidate; |
|||
} else { |
|||
return kVisitConsString; |
|||
} |
|||
|
|||
case kExternalStringTag: |
|||
return GetVisitorIdForSize(kVisitDataObject, |
|||
kVisitDataObjectGeneric, |
|||
ExternalString::kSize); |
|||
} |
|||
UNREACHABLE(); |
|||
} |
|||
|
|||
switch (instance_type) { |
|||
case BYTE_ARRAY_TYPE: |
|||
return kVisitByteArray; |
|||
|
|||
case FIXED_ARRAY_TYPE: |
|||
return kVisitFixedArray; |
|||
|
|||
case ODDBALL_TYPE: |
|||
return kVisitOddball; |
|||
|
|||
case MAP_TYPE: |
|||
return kVisitMap; |
|||
|
|||
case CODE_TYPE: |
|||
return kVisitCode; |
|||
|
|||
case JS_GLOBAL_PROPERTY_CELL_TYPE: |
|||
return kVisitPropertyCell; |
|||
|
|||
case SHARED_FUNCTION_INFO_TYPE: |
|||
return kVisitSharedFunctionInfo; |
|||
|
|||
case PROXY_TYPE: |
|||
return GetVisitorIdForSize(kVisitDataObject, |
|||
kVisitDataObjectGeneric, |
|||
Proxy::kSize); |
|||
|
|||
case FILLER_TYPE: |
|||
return kVisitDataObjectGeneric; |
|||
|
|||
case JS_OBJECT_TYPE: |
|||
case JS_CONTEXT_EXTENSION_OBJECT_TYPE: |
|||
case JS_VALUE_TYPE: |
|||
case JS_ARRAY_TYPE: |
|||
case JS_REGEXP_TYPE: |
|||
case JS_FUNCTION_TYPE: |
|||
case JS_GLOBAL_PROXY_TYPE: |
|||
case JS_GLOBAL_OBJECT_TYPE: |
|||
case JS_BUILTINS_OBJECT_TYPE: |
|||
return GetVisitorIdForSize(kVisitJSObject, |
|||
kVisitJSObjectGeneric, |
|||
instance_size); |
|||
|
|||
case HEAP_NUMBER_TYPE: |
|||
case PIXEL_ARRAY_TYPE: |
|||
case EXTERNAL_BYTE_ARRAY_TYPE: |
|||
case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE: |
|||
case EXTERNAL_SHORT_ARRAY_TYPE: |
|||
case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE: |
|||
case EXTERNAL_INT_ARRAY_TYPE: |
|||
case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE: |
|||
case EXTERNAL_FLOAT_ARRAY_TYPE: |
|||
return GetVisitorIdForSize(kVisitDataObject, |
|||
kVisitDataObjectGeneric, |
|||
instance_size); |
|||
|
|||
#define MAKE_STRUCT_CASE(NAME, Name, name) \ |
|||
case NAME##_TYPE: |
|||
STRUCT_LIST(MAKE_STRUCT_CASE) |
|||
#undef MAKE_STRUCT_CASE |
|||
return GetVisitorIdForSize(kVisitStruct, |
|||
kVisitStructGeneric, |
|||
instance_size); |
|||
|
|||
default: |
|||
UNREACHABLE(); |
|||
return kVisitorIdCount; |
|||
} |
|||
} |
|||
|
|||
} } // namespace v8::internal
|
@ -0,0 +1,382 @@ |
|||
// Copyright 2006-2009 the V8 project authors. All rights reserved.
|
|||
// Redistribution and use in source and binary forms, with or without
|
|||
// modification, are permitted provided that the following conditions are
|
|||
// met:
|
|||
//
|
|||
// * Redistributions of source code must retain the above copyright
|
|||
// notice, this list of conditions and the following disclaimer.
|
|||
// * Redistributions in binary form must reproduce the above
|
|||
// copyright notice, this list of conditions and the following
|
|||
// disclaimer in the documentation and/or other materials provided
|
|||
// with the distribution.
|
|||
// * Neither the name of Google Inc. nor the names of its
|
|||
// contributors may be used to endorse or promote products derived
|
|||
// from this software without specific prior written permission.
|
|||
//
|
|||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
|||
#ifndef V8_OBJECTS_ITERATION_H_ |
|||
#define V8_OBJECTS_ITERATION_H_ |
|||
|
|||
// This file provides base classes and auxiliary methods for defining
|
|||
// static object visitors used during GC.
|
|||
// Visiting HeapObject body with a normal ObjectVisitor requires performing
|
|||
// two switches on object's instance type to determine object size and layout
|
|||
// and one or more virtual method calls on visitor itself.
|
|||
// Static visitor is different: it provides a dispatch table which contains
|
|||
// pointers to specialized visit functions. Each map has the visitor_id
|
|||
// field which contains an index of specialized visitor to use.
|
|||
|
|||
namespace v8 { |
|||
namespace internal { |
|||
|
|||
|
|||
// Base class for all static visitors.
|
|||
class StaticVisitorBase : public AllStatic { |
|||
public: |
|||
enum VisitorId { |
|||
kVisitSeqAsciiString = 0, |
|||
kVisitSeqTwoByteString, |
|||
kVisitShortcutCandidate, |
|||
kVisitByteArray, |
|||
kVisitFixedArray, |
|||
|
|||
// For data objects, JS objects and structs along with generic visitor which
|
|||
// can visit object of any size we provide visitors specialized by
|
|||
// object size in words.
|
|||
// Ids of specialized visitors are declared in a linear order (without
|
|||
// holes) starting from the id of visitor specialized for 2 words objects
|
|||
// (base visitor id) and ending with the id of generic visitor.
|
|||
// Method GetVisitorIdForSize depends on this ordering to calculate visitor
|
|||
// id of specialized visitor from given instance size, base visitor id and
|
|||
// generic visitor's id.
|
|||
|
|||
kVisitDataObject, |
|||
kVisitDataObject2 = kVisitDataObject, |
|||
kVisitDataObject3, |
|||
kVisitDataObject4, |
|||
kVisitDataObject5, |
|||
kVisitDataObject6, |
|||
kVisitDataObject7, |
|||
kVisitDataObject8, |
|||
kVisitDataObject9, |
|||
kVisitDataObjectGeneric, |
|||
|
|||
kVisitJSObject, |
|||
kVisitJSObject2 = kVisitJSObject, |
|||
kVisitJSObject3, |
|||
kVisitJSObject4, |
|||
kVisitJSObject5, |
|||
kVisitJSObject6, |
|||
kVisitJSObject7, |
|||
kVisitJSObject8, |
|||
kVisitJSObject9, |
|||
kVisitJSObjectGeneric, |
|||
|
|||
kVisitStruct, |
|||
kVisitStruct2 = kVisitStruct, |
|||
kVisitStruct3, |
|||
kVisitStruct4, |
|||
kVisitStruct5, |
|||
kVisitStruct6, |
|||
kVisitStruct7, |
|||
kVisitStruct8, |
|||
kVisitStruct9, |
|||
kVisitStructGeneric, |
|||
|
|||
kVisitConsString, |
|||
kVisitOddball, |
|||
kVisitCode, |
|||
kVisitMap, |
|||
kVisitPropertyCell, |
|||
kVisitSharedFunctionInfo, |
|||
|
|||
kVisitorIdCount, |
|||
kMinObjectSizeInWords = 2 |
|||
}; |
|||
|
|||
// Determine which specialized visitor should be used for given instance type
|
|||
// and instance type.
|
|||
static VisitorId GetVisitorId(int instance_type, int instance_size); |
|||
|
|||
static VisitorId GetVisitorId(Map* map) { |
|||
return GetVisitorId(map->instance_type(), map->instance_size()); |
|||
} |
|||
|
|||
// For visitors that allow specialization by size calculate VisitorId based
|
|||
// on size, base visitor id and generic visitor id.
|
|||
static VisitorId GetVisitorIdForSize(VisitorId base, |
|||
VisitorId generic, |
|||
int object_size) { |
|||
ASSERT((base == kVisitDataObject) || |
|||
(base == kVisitStruct) || |
|||
(base == kVisitJSObject)); |
|||
ASSERT(IsAligned(object_size, kPointerSize)); |
|||
ASSERT(kMinObjectSizeInWords * kPointerSize <= object_size); |
|||
ASSERT(object_size < Page::kMaxHeapObjectSize); |
|||
|
|||
const VisitorId specialization = static_cast<VisitorId>( |
|||
base + (object_size >> kPointerSizeLog2) - kMinObjectSizeInWords); |
|||
|
|||
return Min(specialization, generic); |
|||
} |
|||
}; |
|||
|
|||
|
|||
template<typename Callback> |
|||
class VisitorDispatchTable { |
|||
public: |
|||
inline Callback GetVisitor(Map* map) { |
|||
return callbacks_[map->visitor_id()]; |
|||
} |
|||
|
|||
void Register(StaticVisitorBase::VisitorId id, Callback callback) { |
|||
ASSERT((0 <= id) && (id < StaticVisitorBase::kVisitorIdCount)); |
|||
callbacks_[id] = callback; |
|||
} |
|||
|
|||
template<typename Visitor, |
|||
StaticVisitorBase::VisitorId base, |
|||
StaticVisitorBase::VisitorId generic, |
|||
int object_size_in_words> |
|||
void RegisterSpecialization() { |
|||
static const int size = object_size_in_words * kPointerSize; |
|||
Register(StaticVisitorBase::GetVisitorIdForSize(base, generic, size), |
|||
&Visitor::template VisitSpecialized<size>); |
|||
} |
|||
|
|||
|
|||
template<typename Visitor, |
|||
StaticVisitorBase::VisitorId base, |
|||
StaticVisitorBase::VisitorId generic> |
|||
void RegisterSpecializations() { |
|||
STATIC_ASSERT( |
|||
(generic - base + StaticVisitorBase::kMinObjectSizeInWords) == 10); |
|||
RegisterSpecialization<Visitor, base, generic, 2>(); |
|||
RegisterSpecialization<Visitor, base, generic, 3>(); |
|||
RegisterSpecialization<Visitor, base, generic, 4>(); |
|||
RegisterSpecialization<Visitor, base, generic, 5>(); |
|||
RegisterSpecialization<Visitor, base, generic, 6>(); |
|||
RegisterSpecialization<Visitor, base, generic, 7>(); |
|||
RegisterSpecialization<Visitor, base, generic, 8>(); |
|||
RegisterSpecialization<Visitor, base, generic, 9>(); |
|||
Register(generic, &Visitor::Visit); |
|||
} |
|||
|
|||
private: |
|||
Callback callbacks_[StaticVisitorBase::kVisitorIdCount]; |
|||
}; |
|||
|
|||
|
|||
template<typename StaticVisitor> |
|||
class BodyVisitorBase : public AllStatic { |
|||
public: |
|||
static inline void IteratePointers(HeapObject* object, |
|||
int start_offset, |
|||
int end_offset) { |
|||
Object** start_slot = reinterpret_cast<Object**>(object->address() + |
|||
start_offset); |
|||
Object** end_slot = reinterpret_cast<Object**>(object->address() + |
|||
end_offset); |
|||
StaticVisitor::VisitPointers(start_slot, end_slot); |
|||
} |
|||
}; |
|||
|
|||
|
|||
template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType> |
|||
class FlexibleBodyVisitor : public BodyVisitorBase<StaticVisitor> { |
|||
public: |
|||
static inline ReturnType Visit(Map* map, HeapObject* object) { |
|||
int object_size = BodyDescriptor::SizeOf(map, object); |
|||
IteratePointers(object, BodyDescriptor::kStartOffset, object_size); |
|||
return static_cast<ReturnType>(object_size); |
|||
} |
|||
|
|||
template<int object_size> |
|||
static inline ReturnType VisitSpecialized(Map* map, HeapObject* object) { |
|||
IteratePointers(object, BodyDescriptor::kStartOffset, object_size); |
|||
return static_cast<ReturnType>(object_size); |
|||
} |
|||
}; |
|||
|
|||
|
|||
template<typename StaticVisitor, typename BodyDescriptor, typename ReturnType> |
|||
class FixedBodyVisitor : public BodyVisitorBase<StaticVisitor> { |
|||
public: |
|||
static inline ReturnType Visit(Map* map, HeapObject* object) { |
|||
IteratePointers(object, |
|||
BodyDescriptor::kStartOffset, |
|||
BodyDescriptor::kEndOffset); |
|||
return static_cast<ReturnType>(BodyDescriptor::kSize); |
|||
} |
|||
}; |
|||
|
|||
|
|||
// Base class for visitors used for a linear new space iteration.
|
|||
// IterateBody returns size of visited object.
|
|||
// Certain types of objects (i.e. Code objects) are not handled
|
|||
// by dispatch table of this visitor because they cannot appear
|
|||
// in the new space.
|
|||
//
|
|||
// This class is intended to be used in the following way:
|
|||
//
|
|||
// class SomeVisitor : public StaticNewSpaceVisitor<SomeVisitor> {
|
|||
// ...
|
|||
// }
|
|||
//
|
|||
// This is an example of Curiously recurring template pattern
|
|||
// (see http://en.wikipedia.org/wiki/Curiously_recurring_template_pattern).
|
|||
// We use CRTP to guarantee aggressive compile time optimizations (i.e.
|
|||
// inlining and specialization of StaticVisitor::VisitPointers methods).
|
|||
template<typename StaticVisitor> |
|||
class StaticNewSpaceVisitor : public StaticVisitorBase { |
|||
public: |
|||
static void Initialize() { |
|||
table_.Register(kVisitShortcutCandidate, |
|||
&FixedBodyVisitor<StaticVisitor, |
|||
ConsString::BodyDescriptor, |
|||
int>::Visit); |
|||
|
|||
table_.Register(kVisitConsString, |
|||
&FixedBodyVisitor<StaticVisitor, |
|||
ConsString::BodyDescriptor, |
|||
int>::Visit); |
|||
|
|||
table_.Register(kVisitFixedArray, |
|||
&FlexibleBodyVisitor<StaticVisitor, |
|||
FixedArray::BodyDescriptor, |
|||
int>::Visit); |
|||
|
|||
table_.Register(kVisitByteArray, &VisitByteArray); |
|||
|
|||
table_.Register(kVisitSharedFunctionInfo, |
|||
&FixedBodyVisitor<StaticVisitor, |
|||
SharedFunctionInfo::BodyDescriptor, |
|||
int>::Visit); |
|||
|
|||
table_.Register(kVisitSeqAsciiString, &VisitSeqAsciiString); |
|||
|
|||
table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString); |
|||
|
|||
table_.RegisterSpecializations<DataObjectVisitor, |
|||
kVisitDataObject, |
|||
kVisitDataObjectGeneric>(); |
|||
table_.RegisterSpecializations<JSObjectVisitor, |
|||
kVisitJSObject, |
|||
kVisitJSObjectGeneric>(); |
|||
table_.RegisterSpecializations<StructVisitor, |
|||
kVisitStruct, |
|||
kVisitStructGeneric>(); |
|||
} |
|||
|
|||
static inline int IterateBody(Map* map, HeapObject* obj) { |
|||
return table_.GetVisitor(map)(map, obj); |
|||
} |
|||
|
|||
static inline void VisitPointers(Object** start, Object** end) { |
|||
for (Object** p = start; p < end; p++) StaticVisitor::VisitPointer(p); |
|||
} |
|||
|
|||
private: |
|||
static inline int VisitByteArray(Map* map, HeapObject* object) { |
|||
return reinterpret_cast<ByteArray*>(object)->ByteArraySize(); |
|||
} |
|||
|
|||
static inline int VisitSeqAsciiString(Map* map, HeapObject* object) { |
|||
return SeqAsciiString::cast(object)-> |
|||
SeqAsciiStringSize(map->instance_type()); |
|||
} |
|||
|
|||
static inline int VisitSeqTwoByteString(Map* map, HeapObject* object) { |
|||
return SeqTwoByteString::cast(object)-> |
|||
SeqTwoByteStringSize(map->instance_type()); |
|||
} |
|||
|
|||
class DataObjectVisitor { |
|||
public: |
|||
template<int object_size> |
|||
static inline int VisitSpecialized(Map* map, HeapObject* object) { |
|||
return object_size; |
|||
} |
|||
|
|||
static inline int Visit(Map* map, HeapObject* object) { |
|||
return map->instance_size(); |
|||
} |
|||
}; |
|||
|
|||
typedef FlexibleBodyVisitor<StaticVisitor, |
|||
StructBodyDescriptor, |
|||
int> StructVisitor; |
|||
|
|||
typedef FlexibleBodyVisitor<StaticVisitor, |
|||
JSObject::BodyDescriptor, |
|||
int> JSObjectVisitor; |
|||
|
|||
typedef int (*Callback)(Map* map, HeapObject* object); |
|||
|
|||
static VisitorDispatchTable<Callback> table_; |
|||
}; |
|||
|
|||
|
|||
template<typename StaticVisitor> |
|||
VisitorDispatchTable<typename StaticNewSpaceVisitor<StaticVisitor>::Callback> |
|||
StaticNewSpaceVisitor<StaticVisitor>::table_; |
|||
|
|||
|
|||
void Code::CodeIterateBody(ObjectVisitor* v) { |
|||
int mode_mask = RelocInfo::kCodeTargetMask | |
|||
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | |
|||
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | |
|||
RelocInfo::ModeMask(RelocInfo::JS_RETURN) | |
|||
RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | |
|||
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); |
|||
|
|||
// Use the relocation info pointer before it is visited by
|
|||
// the heap compaction in the next statement.
|
|||
RelocIterator it(this, mode_mask); |
|||
|
|||
IteratePointers(v, |
|||
kRelocationInfoOffset, |
|||
kRelocationInfoOffset + kPointerSize); |
|||
|
|||
for (; !it.done(); it.next()) { |
|||
it.rinfo()->Visit(v); |
|||
} |
|||
} |
|||
|
|||
|
|||
template<typename StaticVisitor> |
|||
void Code::CodeIterateBody() { |
|||
int mode_mask = RelocInfo::kCodeTargetMask | |
|||
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | |
|||
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | |
|||
RelocInfo::ModeMask(RelocInfo::JS_RETURN) | |
|||
RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) | |
|||
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); |
|||
|
|||
// Use the relocation info pointer before it is visited by
|
|||
// the heap compaction in the next statement.
|
|||
RelocIterator it(this, mode_mask); |
|||
|
|||
StaticVisitor::VisitPointer( |
|||
reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset)); |
|||
|
|||
for (; !it.done(); it.next()) { |
|||
it.rinfo()->template Visit<StaticVisitor>(); |
|||
} |
|||
} |
|||
|
|||
|
|||
} } // namespace v8::internal
|
|||
|
|||
#endif // V8_OBJECTS_ITERATION_H_
|
@ -0,0 +1,580 @@ |
|||
// Copyright 2006-2008 the V8 project authors. All rights reserved.
|
|||
// Redistribution and use in source and binary forms, with or without
|
|||
// modification, are permitted provided that the following conditions are
|
|||
// met:
|
|||
//
|
|||
// * Redistributions of source code must retain the above copyright
|
|||
// notice, this list of conditions and the following disclaimer.
|
|||
// * Redistributions in binary form must reproduce the above
|
|||
// copyright notice, this list of conditions and the following
|
|||
// disclaimer in the documentation and/or other materials provided
|
|||
// with the distribution.
|
|||
// * Neither the name of Google Inc. nor the names of its
|
|||
// contributors may be used to endorse or promote products derived
|
|||
// from this software without specific prior written permission.
|
|||
//
|
|||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
|||
// This module contains the platform-specific code. This make the rest of the
|
|||
// code less dependent on operating system, compilers and runtime libraries.
|
|||
// This module does specifically not deal with differences between different
|
|||
// processor architecture.
|
|||
// The platform classes have the same definition for all platforms. The
|
|||
// implementation for a particular platform is put in platform_<os>.cc.
|
|||
// The build system then uses the implementation for the target platform.
|
|||
//
|
|||
// This design has been chosen because it is simple and fast. Alternatively,
|
|||
// the platform dependent classes could have been implemented using abstract
|
|||
// superclasses with virtual methods and having specializations for each
|
|||
// platform. This design was rejected because it was more complicated and
|
|||
// slower. It would require factory methods for selecting the right
|
|||
// implementation and the overhead of virtual methods for performance
|
|||
// sensitive like mutex locking/unlocking.
|
|||
|
|||
#ifndef V8_PLATFORM_H_ |
|||
#define V8_PLATFORM_H_ |
|||
|
|||
#define V8_INFINITY INFINITY |
|||
|
|||
// Windows specific stuff.
|
|||
#ifdef WIN32 |
|||
|
|||
// Microsoft Visual C++ specific stuff.
|
|||
#ifdef _MSC_VER |
|||
|
|||
enum { |
|||
FP_NAN, |
|||
FP_INFINITE, |
|||
FP_ZERO, |
|||
FP_SUBNORMAL, |
|||
FP_NORMAL |
|||
}; |
|||
|
|||
#undef V8_INFINITY |
|||
#define V8_INFINITY HUGE_VAL |
|||
|
|||
namespace v8 { |
|||
namespace internal { |
|||
int isfinite(double x); |
|||
} } |
|||
int isnan(double x); |
|||
int isinf(double x); |
|||
int isless(double x, double y); |
|||
int isgreater(double x, double y); |
|||
int fpclassify(double x); |
|||
int signbit(double x); |
|||
|
|||
int strncasecmp(const char* s1, const char* s2, int n); |
|||
|
|||
#endif // _MSC_VER
|
|||
|
|||
// Random is missing on both Visual Studio and MinGW.
|
|||
int random(); |
|||
|
|||
#endif // WIN32
|
|||
|
|||
|
|||
#ifdef __sun |
|||
# ifndef signbit |
|||
int signbit(double x); |
|||
# endif |
|||
#endif |
|||
|
|||
|
|||
// GCC specific stuff
|
|||
#ifdef __GNUC__ |
|||
|
|||
// Needed for va_list on at least MinGW and Android.
|
|||
#include <stdarg.h> |
|||
|
|||
#define __GNUC_VERSION__ (__GNUC__ * 10000 + __GNUC_MINOR__ * 100) |
|||
|
|||
// Unfortunately, the INFINITY macro cannot be used with the '-pedantic'
|
|||
// warning flag and certain versions of GCC due to a bug:
|
|||
// http://gcc.gnu.org/bugzilla/show_bug.cgi?id=11931
|
|||
// For now, we use the more involved template-based version from <limits>, but
|
|||
// only when compiling with GCC versions affected by the bug (2.96.x - 4.0.x)
|
|||
// __GNUC_PREREQ is not defined in GCC for Mac OS X, so we define our own macro
|
|||
#if __GNUC_VERSION__ >= 29600 && __GNUC_VERSION__ < 40100 |
|||
#include <limits> |
|||
#undef V8_INFINITY |
|||
#define V8_INFINITY std::numeric_limits<double>::infinity() |
|||
#endif |
|||
|
|||
#endif // __GNUC__
|
|||
|
|||
namespace v8 { |
|||
namespace internal { |
|||
|
|||
// Use AtomicWord for a machine-sized pointer. It is assumed that
|
|||
// reads and writes of naturally aligned values of this type are atomic.
|
|||
typedef intptr_t AtomicWord; |
|||
|
|||
class Semaphore; |
|||
|
|||
double ceiling(double x); |
|||
double modulo(double x, double y); |
|||
|
|||
// Forward declarations.
|
|||
class Socket; |
|||
|
|||
// ----------------------------------------------------------------------------
|
|||
// OS
|
|||
//
|
|||
// This class has static methods for the different platform specific
|
|||
// functions. Add methods here to cope with differences between the
|
|||
// supported platforms.
|
|||
|
|||
class OS { |
|||
public: |
|||
// Initializes the platform OS support. Called once at VM startup.
|
|||
static void Setup(); |
|||
|
|||
// Returns the accumulated user time for thread. This routine
|
|||
// can be used for profiling. The implementation should
|
|||
// strive for high-precision timer resolution, preferable
|
|||
// micro-second resolution.
|
|||
static int GetUserTime(uint32_t* secs, uint32_t* usecs); |
|||
|
|||
// Get a tick counter normalized to one tick per microsecond.
|
|||
// Used for calculating time intervals.
|
|||
static int64_t Ticks(); |
|||
|
|||
// Returns current time as the number of milliseconds since
|
|||
// 00:00:00 UTC, January 1, 1970.
|
|||
static double TimeCurrentMillis(); |
|||
|
|||
// Returns a string identifying the current time zone. The
|
|||
// timestamp is used for determining if DST is in effect.
|
|||
static const char* LocalTimezone(double time); |
|||
|
|||
// Returns the local time offset in milliseconds east of UTC without
|
|||
// taking daylight savings time into account.
|
|||
static double LocalTimeOffset(); |
|||
|
|||
// Returns the daylight savings offset for the given time.
|
|||
static double DaylightSavingsOffset(double time); |
|||
|
|||
// Returns last OS error.
|
|||
static int GetLastError(); |
|||
|
|||
static FILE* FOpen(const char* path, const char* mode); |
|||
|
|||
// Log file open mode is platform-dependent due to line ends issues.
|
|||
static const char* LogFileOpenMode; |
|||
|
|||
// Print output to console. This is mostly used for debugging output.
|
|||
// On platforms that has standard terminal output, the output
|
|||
// should go to stdout.
|
|||
static void Print(const char* format, ...); |
|||
static void VPrint(const char* format, va_list args); |
|||
|
|||
// Print error output to console. This is mostly used for error message
|
|||
// output. On platforms that has standard terminal output, the output
|
|||
// should go to stderr.
|
|||
static void PrintError(const char* format, ...); |
|||
static void VPrintError(const char* format, va_list args); |
|||
|
|||
// Allocate/Free memory used by JS heap. Pages are readable/writable, but
|
|||
// they are not guaranteed to be executable unless 'executable' is true.
|
|||
// Returns the address of allocated memory, or NULL if failed.
|
|||
static void* Allocate(const size_t requested, |
|||
size_t* allocated, |
|||
bool is_executable); |
|||
static void Free(void* address, const size_t size); |
|||
// Get the Alignment guaranteed by Allocate().
|
|||
static size_t AllocateAlignment(); |
|||
|
|||
#ifdef ENABLE_HEAP_PROTECTION |
|||
// Protect/unprotect a block of memory by marking it read-only/writable.
|
|||
static void Protect(void* address, size_t size); |
|||
static void Unprotect(void* address, size_t size, bool is_executable); |
|||
#endif |
|||
|
|||
// Returns an indication of whether a pointer is in a space that
|
|||
// has been allocated by Allocate(). This method may conservatively
|
|||
// always return false, but giving more accurate information may
|
|||
// improve the robustness of the stack dump code in the presence of
|
|||
// heap corruption.
|
|||
static bool IsOutsideAllocatedSpace(void* pointer); |
|||
|
|||
// Sleep for a number of milliseconds.
|
|||
static void Sleep(const int milliseconds); |
|||
|
|||
// Abort the current process.
|
|||
static void Abort(); |
|||
|
|||
// Debug break.
|
|||
static void DebugBreak(); |
|||
|
|||
// Walk the stack.
|
|||
static const int kStackWalkError = -1; |
|||
static const int kStackWalkMaxNameLen = 256; |
|||
static const int kStackWalkMaxTextLen = 256; |
|||
struct StackFrame { |
|||
void* address; |
|||
char text[kStackWalkMaxTextLen]; |
|||
}; |
|||
|
|||
static int StackWalk(Vector<StackFrame> frames); |
|||
|
|||
// Factory method for creating platform dependent Mutex.
|
|||
// Please use delete to reclaim the storage for the returned Mutex.
|
|||
static Mutex* CreateMutex(); |
|||
|
|||
// Factory method for creating platform dependent Semaphore.
|
|||
// Please use delete to reclaim the storage for the returned Semaphore.
|
|||
static Semaphore* CreateSemaphore(int count); |
|||
|
|||
// Factory method for creating platform dependent Socket.
|
|||
// Please use delete to reclaim the storage for the returned Socket.
|
|||
static Socket* CreateSocket(); |
|||
|
|||
class MemoryMappedFile { |
|||
public: |
|||
static MemoryMappedFile* create(const char* name, int size, void* initial); |
|||
virtual ~MemoryMappedFile() { } |
|||
virtual void* memory() = 0; |
|||
}; |
|||
|
|||
// Safe formatting print. Ensures that str is always null-terminated.
|
|||
// Returns the number of chars written, or -1 if output was truncated.
|
|||
static int SNPrintF(Vector<char> str, const char* format, ...); |
|||
static int VSNPrintF(Vector<char> str, |
|||
const char* format, |
|||
va_list args); |
|||
|
|||
static char* StrChr(char* str, int c); |
|||
static void StrNCpy(Vector<char> dest, const char* src, size_t n); |
|||
|
|||
// Support for profiler. Can do nothing, in which case ticks
|
|||
// occuring in shared libraries will not be properly accounted
|
|||
// for.
|
|||
static void LogSharedLibraryAddresses(); |
|||
|
|||
// The return value indicates the CPU features we are sure of because of the
|
|||
// OS. For example MacOSX doesn't run on any x86 CPUs that don't have SSE2
|
|||
// instructions.
|
|||
// This is a little messy because the interpretation is subject to the cross
|
|||
// of the CPU and the OS. The bits in the answer correspond to the bit
|
|||
// positions indicated by the members of the CpuFeature enum from globals.h
|
|||
static uint64_t CpuFeaturesImpliedByPlatform(); |
|||
|
|||
// Returns the double constant NAN
|
|||
static double nan_value(); |
|||
|
|||
// Support runtime detection of VFP3 on ARM CPUs.
|
|||
static bool ArmCpuHasFeature(CpuFeature feature); |
|||
|
|||
// Returns the activation frame alignment constraint or zero if
|
|||
// the platform doesn't care. Guaranteed to be a power of two.
|
|||
static int ActivationFrameAlignment(); |
|||
|
|||
static void ReleaseStore(volatile AtomicWord* ptr, AtomicWord value); |
|||
|
|||
private: |
|||
static const int msPerSecond = 1000; |
|||
|
|||
DISALLOW_IMPLICIT_CONSTRUCTORS(OS); |
|||
}; |
|||
|
|||
|
|||
class VirtualMemory { |
|||
public: |
|||
// Reserves virtual memory with size.
|
|||
explicit VirtualMemory(size_t size); |
|||
~VirtualMemory(); |
|||
|
|||
// Returns whether the memory has been reserved.
|
|||
bool IsReserved(); |
|||
|
|||
// Returns the start address of the reserved memory.
|
|||
void* address() { |
|||
ASSERT(IsReserved()); |
|||
return address_; |
|||
}; |
|||
|
|||
// Returns the size of the reserved memory.
|
|||
size_t size() { return size_; } |
|||
|
|||
// Commits real memory. Returns whether the operation succeeded.
|
|||
bool Commit(void* address, size_t size, bool is_executable); |
|||
|
|||
// Uncommit real memory. Returns whether the operation succeeded.
|
|||
bool Uncommit(void* address, size_t size); |
|||
|
|||
private: |
|||
void* address_; // Start address of the virtual memory.
|
|||
size_t size_; // Size of the virtual memory.
|
|||
}; |
|||
|
|||
|
|||
// ----------------------------------------------------------------------------
|
|||
// ThreadHandle
|
|||
//
|
|||
// A ThreadHandle represents a thread identifier for a thread. The ThreadHandle
|
|||
// does not own the underlying os handle. Thread handles can be used for
|
|||
// refering to threads and testing equality.
|
|||
|
|||
class ThreadHandle { |
|||
public: |
|||
enum Kind { SELF, INVALID }; |
|||
explicit ThreadHandle(Kind kind); |
|||
|
|||
// Destructor.
|
|||
~ThreadHandle(); |
|||
|
|||
// Test for thread running.
|
|||
bool IsSelf() const; |
|||
|
|||
// Test for valid thread handle.
|
|||
bool IsValid() const; |
|||
|
|||
// Get platform-specific data.
|
|||
class PlatformData; |
|||
PlatformData* thread_handle_data() { return data_; } |
|||
|
|||
// Initialize the handle to kind
|
|||
void Initialize(Kind kind); |
|||
|
|||
private: |
|||
PlatformData* data_; // Captures platform dependent data.
|
|||
}; |
|||
|
|||
|
|||
// ----------------------------------------------------------------------------
|
|||
// Thread
|
|||
//
|
|||
// Thread objects are used for creating and running threads. When the start()
|
|||
// method is called the new thread starts running the run() method in the new
|
|||
// thread. The Thread object should not be deallocated before the thread has
|
|||
// terminated.
|
|||
|
|||
class Thread: public ThreadHandle { |
|||
public: |
|||
// Opaque data type for thread-local storage keys.
|
|||
enum LocalStorageKey {}; |
|||
|
|||
// Create new thread.
|
|||
Thread(); |
|||
virtual ~Thread(); |
|||
|
|||
// Start new thread by calling the Run() method in the new thread.
|
|||
void Start(); |
|||
|
|||
// Wait until thread terminates.
|
|||
void Join(); |
|||
|
|||
// Abstract method for run handler.
|
|||
virtual void Run() = 0; |
|||
|
|||
// Thread-local storage.
|
|||
static LocalStorageKey CreateThreadLocalKey(); |
|||
static void DeleteThreadLocalKey(LocalStorageKey key); |
|||
static void* GetThreadLocal(LocalStorageKey key); |
|||
static int GetThreadLocalInt(LocalStorageKey key) { |
|||
return static_cast<int>(reinterpret_cast<intptr_t>(GetThreadLocal(key))); |
|||
} |
|||
static void SetThreadLocal(LocalStorageKey key, void* value); |
|||
static void SetThreadLocalInt(LocalStorageKey key, int value) { |
|||
SetThreadLocal(key, reinterpret_cast<void*>(static_cast<intptr_t>(value))); |
|||
} |
|||
static bool HasThreadLocal(LocalStorageKey key) { |
|||
return GetThreadLocal(key) != NULL; |
|||
} |
|||
|
|||
// A hint to the scheduler to let another thread run.
|
|||
static void YieldCPU(); |
|||
|
|||
private: |
|||
class PlatformData; |
|||
PlatformData* data_; |
|||
DISALLOW_COPY_AND_ASSIGN(Thread); |
|||
}; |
|||
|
|||
|
|||
// ----------------------------------------------------------------------------
|
|||
// Mutex
|
|||
//
|
|||
// Mutexes are used for serializing access to non-reentrant sections of code.
|
|||
// The implementations of mutex should allow for nested/recursive locking.
|
|||
|
|||
class Mutex { |
|||
public: |
|||
virtual ~Mutex() {} |
|||
|
|||
// Locks the given mutex. If the mutex is currently unlocked, it becomes
|
|||
// locked and owned by the calling thread, and immediately. If the mutex
|
|||
// is already locked by another thread, suspends the calling thread until
|
|||
// the mutex is unlocked.
|
|||
virtual int Lock() = 0; |
|||
|
|||
// Unlocks the given mutex. The mutex is assumed to be locked and owned by
|
|||
// the calling thread on entrance.
|
|||
virtual int Unlock() = 0; |
|||
}; |
|||
|
|||
|
|||
// ----------------------------------------------------------------------------
|
|||
// ScopedLock
|
|||
//
|
|||
// Stack-allocated ScopedLocks provide block-scoped locking and unlocking
|
|||
// of a mutex.
|
|||
class ScopedLock { |
|||
public: |
|||
explicit ScopedLock(Mutex* mutex): mutex_(mutex) { |
|||
mutex_->Lock(); |
|||
} |
|||
~ScopedLock() { |
|||
mutex_->Unlock(); |
|||
} |
|||
|
|||
private: |
|||
Mutex* mutex_; |
|||
DISALLOW_COPY_AND_ASSIGN(ScopedLock); |
|||
}; |
|||
|
|||
|
|||
// ----------------------------------------------------------------------------
|
|||
// Semaphore
|
|||
//
|
|||
// A semaphore object is a synchronization object that maintains a count. The
|
|||
// count is decremented each time a thread completes a wait for the semaphore
|
|||
// object and incremented each time a thread signals the semaphore. When the
|
|||
// count reaches zero, threads waiting for the semaphore blocks until the
|
|||
// count becomes non-zero.
|
|||
|
|||
class Semaphore { |
|||
public: |
|||
virtual ~Semaphore() {} |
|||
|
|||
// Suspends the calling thread until the semaphore counter is non zero
|
|||
// and then decrements the semaphore counter.
|
|||
virtual void Wait() = 0; |
|||
|
|||
// Suspends the calling thread until the counter is non zero or the timeout
|
|||
// time has passsed. If timeout happens the return value is false and the
|
|||
// counter is unchanged. Otherwise the semaphore counter is decremented and
|
|||
// true is returned. The timeout value is specified in microseconds.
|
|||
virtual bool Wait(int timeout) = 0; |
|||
|
|||
// Increments the semaphore counter.
|
|||
virtual void Signal() = 0; |
|||
}; |
|||
|
|||
|
|||
// ----------------------------------------------------------------------------
|
|||
// Socket
|
|||
//
|
|||
|
|||
class Socket { |
|||
public: |
|||
virtual ~Socket() {} |
|||
|
|||
// Server initialization.
|
|||
virtual bool Bind(const int port) = 0; |
|||
virtual bool Listen(int backlog) const = 0; |
|||
virtual Socket* Accept() const = 0; |
|||
|
|||
// Client initialization.
|
|||
virtual bool Connect(const char* host, const char* port) = 0; |
|||
|
|||
// Shutdown socket for both read and write. This causes blocking Send and
|
|||
// Receive calls to exit. After Shutdown the Socket object cannot be used for
|
|||
// any communication.
|
|||
virtual bool Shutdown() = 0; |
|||
|
|||
// Data Transimission
|
|||
virtual int Send(const char* data, int len) const = 0; |
|||
virtual int Receive(char* data, int len) const = 0; |
|||
|
|||
// Set the value of the SO_REUSEADDR socket option.
|
|||
virtual bool SetReuseAddress(bool reuse_address) = 0; |
|||
|
|||
virtual bool IsValid() const = 0; |
|||
|
|||
static bool Setup(); |
|||
static int LastError(); |
|||
static uint16_t HToN(uint16_t value); |
|||
static uint16_t NToH(uint16_t value); |
|||
static uint32_t HToN(uint32_t value); |
|||
static uint32_t NToH(uint32_t value); |
|||
}; |
|||
|
|||
|
|||
// ----------------------------------------------------------------------------
|
|||
// Sampler
|
|||
//
|
|||
// A sampler periodically samples the state of the VM and optionally
|
|||
// (if used for profiling) the program counter and stack pointer for
|
|||
// the thread that created it.
|
|||
|
|||
// TickSample captures the information collected for each sample.
|
|||
class TickSample { |
|||
public: |
|||
TickSample() |
|||
: state(OTHER), |
|||
pc(NULL), |
|||
sp(NULL), |
|||
fp(NULL), |
|||
function(NULL), |
|||
frames_count(0) {} |
|||
StateTag state; // The state of the VM.
|
|||
Address pc; // Instruction pointer.
|
|||
Address sp; // Stack pointer.
|
|||
Address fp; // Frame pointer.
|
|||
Address function; // The last called JS function.
|
|||
static const int kMaxFramesCount = 64; |
|||
Address stack[kMaxFramesCount]; // Call stack.
|
|||
int frames_count; // Number of captured frames.
|
|||
}; |
|||
|
|||
#ifdef ENABLE_LOGGING_AND_PROFILING |
|||
class Sampler { |
|||
public: |
|||
// Initialize sampler.
|
|||
explicit Sampler(int interval, bool profiling); |
|||
virtual ~Sampler(); |
|||
|
|||
// Performs stack sampling.
|
|||
virtual void SampleStack(TickSample* sample) = 0; |
|||
|
|||
// This method is called for each sampling period with the current
|
|||
// program counter.
|
|||
virtual void Tick(TickSample* sample) = 0; |
|||
|
|||
// Start and stop sampler.
|
|||
void Start(); |
|||
void Stop(); |
|||
|
|||
// Is the sampler used for profiling.
|
|||
inline bool IsProfiling() { return profiling_; } |
|||
|
|||
// Whether the sampler is running (that is, consumes resources).
|
|||
inline bool IsActive() { return active_; } |
|||
|
|||
class PlatformData; |
|||
|
|||
private: |
|||
const int interval_; |
|||
const bool profiling_; |
|||
bool active_; |
|||
PlatformData* data_; // Platform specific data.
|
|||
DISALLOW_IMPLICIT_CONSTRUCTORS(Sampler); |
|||
}; |
|||
|
|||
#endif // ENABLE_LOGGING_AND_PROFILING
|
|||
|
|||
} } // namespace v8::internal
|
|||
|
|||
#endif // V8_PLATFORM_H_
|
File diff suppressed because it is too large
Some files were not shown because too many files changed in this diff
Loading…
Reference in new issue