Browse Source

Upgrade v8 to 1.3.13

v0.7.4-release
Ryan Dahl 15 years ago
parent
commit
2df13c7310
  1. 1
      deps/v8/AUTHORS
  2. 24
      deps/v8/ChangeLog
  3. 13
      deps/v8/LICENSE
  4. 17
      deps/v8/include/v8.h
  5. 17
      deps/v8/src/SConscript
  6. 9
      deps/v8/src/api.cc
  7. 16
      deps/v8/src/arm/builtins-arm.cc
  8. 2
      deps/v8/src/arm/codegen-arm.cc
  9. 24
      deps/v8/src/arm/constants-arm.h
  10. 16
      deps/v8/src/arm/macro-assembler-arm.cc
  11. 4
      deps/v8/src/bootstrapper.cc
  12. 6
      deps/v8/src/builtins.cc
  13. 11
      deps/v8/src/builtins.h
  14. 19
      deps/v8/src/handles.cc
  15. 6
      deps/v8/src/handles.h
  16. 548
      deps/v8/src/heap-profiler.cc
  17. 266
      deps/v8/src/heap-profiler.h
  18. 176
      deps/v8/src/heap.cc
  19. 14
      deps/v8/src/heap.h
  20. 460
      deps/v8/src/ia32/builtins-ia32.cc
  21. 14
      deps/v8/src/log-utils.cc
  22. 9
      deps/v8/src/log-utils.h
  23. 43
      deps/v8/src/log.cc
  24. 2
      deps/v8/src/log.h
  25. 49
      deps/v8/src/mark-compact.cc
  26. 3
      deps/v8/src/messages.js
  27. 1
      deps/v8/src/objects-debug.cc
  28. 10
      deps/v8/src/objects-inl.h
  29. 98
      deps/v8/src/objects.cc
  30. 30
      deps/v8/src/objects.h
  31. 79
      deps/v8/src/runtime.cc
  32. 3
      deps/v8/src/runtime.h
  33. 38
      deps/v8/src/serialize.cc
  34. 3
      deps/v8/src/serialize.h
  35. 12
      deps/v8/src/spaces.cc
  36. 2
      deps/v8/src/string-stream.cc
  37. 2
      deps/v8/src/string-stream.h
  38. 2
      deps/v8/src/v8-counters.h
  39. 18
      deps/v8/src/v8natives.js
  40. 4
      deps/v8/src/version.cc
  41. 16
      deps/v8/src/x64/builtins-x64.cc
  42. 17
      deps/v8/src/zone-inl.h
  43. 8
      deps/v8/src/zone.h
  44. 1
      deps/v8/test/cctest/SConscript
  45. 330
      deps/v8/test/cctest/test-heap-profiler.cc
  46. 14
      deps/v8/test/es5conform/README
  47. 68
      deps/v8/test/es5conform/es5conform.status
  48. 78
      deps/v8/test/es5conform/harness-adapt.js
  49. 108
      deps/v8/test/es5conform/testcfg.py
  50. 14
      deps/v8/test/mjsunit/arguments-enum.js
  51. 119
      deps/v8/test/mjsunit/array-constructor.js
  52. 1
      deps/v8/test/mjsunit/mjsunit.status
  53. 3
      deps/v8/test/mjsunit/testcfg.py
  54. 62
      deps/v8/test/mjsunit/third_party/array-splice-webkit.js
  55. 66
      deps/v8/test/mjsunit/third_party/object-keys.js
  56. 0
      deps/v8/test/mjsunit/third_party/regexp-pcre.js
  57. 7
      deps/v8/test/mozilla/mozilla.status
  58. 2
      deps/v8/tools/gyp/v8.gyp
  59. 2
      deps/v8/tools/run-valgrind.py
  60. 10
      deps/v8/tools/v8.xcodeproj/project.pbxproj
  61. 8
      deps/v8/tools/visual_studio/v8_base.vcproj
  62. 8
      deps/v8/tools/visual_studio/v8_base_arm.vcproj
  63. 8
      deps/v8/tools/visual_studio/v8_base_x64.vcproj
  64. 4
      deps/v8/tools/visual_studio/v8_cctest.vcproj
  65. 4
      deps/v8/tools/visual_studio/v8_cctest_arm.vcproj
  66. 4
      deps/v8/tools/visual_studio/v8_cctest_x64.vcproj

1
deps/v8/AUTHORS

@ -10,6 +10,7 @@ Alexandre Vassalotti <avassalotti@gmail.com>
Craig Schlenter <craig.schlenter@gmail.com>
Daniel Andersson <kodandersson@gmail.com>
Daniel James <dnljms@gmail.com>
Jan de Mooij <jandemooij@gmail.com>
Jay Freeman <saurik@saurik.com>
Joel Stanley <joel.stan@gmail.com>
Matt Hanselman <mjhanselman@gmail.com>

24
deps/v8/ChangeLog

@ -1,3 +1,27 @@
2009-09-23: Version 1.3.13
Fixed uninitialized memory problem.
Improved heap profiler support.
2009-09-22: Version 1.3.12
Changed behavior of |function|.toString() on built-in functions to
be compatible with other implementations. Patch by Jan de Mooij.
Added Object::IsDirty in the API.
Optimized array construction; it is now handled purely in native
code.
[ES5] Made properties of the arguments array enumerable.
[ES5] Added test suite adapter for the es5conform test suite.
[ES5] Added Object.keys function.
2009-09-15: Version 1.3.11
Fixed crash in error reporting during bootstrapping.

13
deps/v8/LICENSE

@ -2,10 +2,15 @@ This license applies to all parts of V8 that are not externally
maintained libraries. The externally maintained libraries used by V8
are:
- PCRE test suite, located in test/mjsunit/regexp-pcre.js. This is
based on the test suite from PCRE-7.3, which is copyrighted by the
University of Cambridge and Google, Inc. The copyright notice and
license are embedded in regexp-pcre.js.
- PCRE test suite, located in
test/mjsunit/third_party/regexp-pcre.js. This is based on the
test suite from PCRE-7.3, which is copyrighted by the University
of Cambridge and Google, Inc. The copyright notice and license
are embedded in regexp-pcre.js.
- Layout tests, located in test/mjsunit/third_party. These are
based on layout tests from webkit.org which are copyrighted by
Apple Computer, Inc. and released under a 3-clause BSD license.
- Dtoa, located under third_party/dtoa. This code is copyrighted by
David M. Gay and released under an MIT license.

17
deps/v8/include/v8.h

@ -1239,6 +1239,15 @@ class V8EXPORT Object : public Value {
Local<Value> GetHiddenValue(Handle<String> key);
bool DeleteHiddenValue(Handle<String> key);
/**
* Returns true if this is an instance of an api function (one
* created from a function created from a function template) and has
* been modified since it was created. Note that this method is
* conservative and may return true for objects that haven't actually
* been modified.
*/
bool IsDirty();
/**
* Clone this object with a fast but shallow copy. Values will point
* to the same values as the original object.
@ -1537,9 +1546,9 @@ enum AccessType {
/**
* Returns true if cross-context access should be allowed to the named
* property with the given key on the global object.
* property with the given key on the host object.
*/
typedef bool (*NamedSecurityCallback)(Local<Object> global,
typedef bool (*NamedSecurityCallback)(Local<Object> host,
Local<Value> key,
AccessType type,
Local<Value> data);
@ -1547,9 +1556,9 @@ typedef bool (*NamedSecurityCallback)(Local<Object> global,
/**
* Returns true if cross-context access should be allowed to the indexed
* property with the given index on the global object.
* property with the given index on the host object.
*/
typedef bool (*IndexedSecurityCallback)(Local<Object> global,
typedef bool (*IndexedSecurityCallback)(Local<Object> host,
uint32_t index,
AccessType type,
Local<Value> data);

17
deps/v8/src/SConscript

@ -42,14 +42,15 @@ SOURCES = {
'debug.cc', 'debug-agent.cc', 'disassembler.cc', 'execution.cc',
'factory.cc', 'flags.cc', 'frame-element.cc', 'frames.cc',
'func-name-inferrer.cc', 'global-handles.cc', 'handles.cc',
'hashmap.cc', 'heap.cc', 'ic.cc', 'interpreter-irregexp.cc',
'jsregexp.cc', 'jump-target.cc', 'log.cc', 'log-utils.cc',
'mark-compact.cc', 'messages.cc', 'objects.cc', 'oprofile-agent.cc',
'parser.cc', 'property.cc', 'regexp-macro-assembler.cc',
'regexp-macro-assembler-irregexp.cc', 'regexp-stack.cc',
'register-allocator.cc', 'rewriter.cc', 'runtime.cc', 'scanner.cc',
'scopeinfo.cc', 'scopes.cc', 'serialize.cc', 'snapshot-common.cc',
'spaces.cc', 'string-stream.cc', 'stub-cache.cc', 'token.cc', 'top.cc',
'hashmap.cc', 'heap.cc', 'heap-profiler.cc', 'ic.cc',
'interpreter-irregexp.cc', 'jsregexp.cc', 'jump-target.cc',
'log.cc', 'log-utils.cc', 'mark-compact.cc', 'messages.cc',
'objects.cc', 'oprofile-agent.cc', 'parser.cc', 'property.cc',
'regexp-macro-assembler.cc', 'regexp-macro-assembler-irregexp.cc',
'regexp-stack.cc', 'register-allocator.cc', 'rewriter.cc',
'runtime.cc', 'scanner.cc', 'scopeinfo.cc', 'scopes.cc',
'serialize.cc', 'snapshot-common.cc', 'spaces.cc',
'string-stream.cc', 'stub-cache.cc', 'token.cc', 'top.cc',
'unicode.cc', 'usage-analyzer.cc', 'utils.cc', 'v8-counters.cc',
'v8.cc', 'v8threads.cc', 'variables.cc', 'version.cc',
'virtual-frame.cc', 'zone.cc'

9
deps/v8/src/api.cc

@ -1191,6 +1191,7 @@ v8::TryCatch::TryCatch()
exception_(i::Heap::the_hole_value()),
message_(i::Smi::FromInt(0)),
is_verbose_(false),
can_continue_(true),
capture_message_(true),
js_handler_(NULL) {
i::Top::RegisterTryCatchHandler(this);
@ -1988,7 +1989,8 @@ Local<Array> v8::Object::GetPropertyNames() {
ENTER_V8;
v8::HandleScope scope;
i::Handle<i::JSObject> self = Utils::OpenHandle(this);
i::Handle<i::FixedArray> value = i::GetKeysInFixedArrayFor(self);
i::Handle<i::FixedArray> value =
i::GetKeysInFixedArrayFor(self, i::INCLUDE_PROTOS);
// Because we use caching to speed up enumeration it is important
// to never change the result of the basic enumeration function so
// we clone the result.
@ -2155,6 +2157,11 @@ void v8::Object::TurnOnAccessCheck() {
}
bool v8::Object::IsDirty() {
return Utils::OpenHandle(this)->IsDirty();
}
Local<v8::Object> v8::Object::Clone() {
ON_BAILOUT("v8::Object::Clone()", return Local<Object>());
ENTER_V8;

16
deps/v8/src/arm/builtins-arm.cc

@ -51,6 +51,22 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
}
void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
// Just jump to the generic array code.
Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
Handle<Code> array_code(code);
__ Jump(array_code, RelocInfo::CODE_TARGET);
}
void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
// Just jump to the generic construct code.
Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
Handle<Code> generic_construct_stub(code);
__ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
}
void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- r0 : number of arguments

2
deps/v8/src/arm/codegen-arm.cc

@ -4335,7 +4335,7 @@ static void CountLeadingZeros(
Register source,
Register scratch,
Register zeros) {
#ifdef __ARM_ARCH_5__
#ifdef CAN_USE_ARMV5_INSTRUCTIONS
__ clz(zeros, source); // This instruction is only supported after ARM5.
#else
__ mov(zeros, Operand(0));

24
deps/v8/src/arm/constants-arm.h

@ -43,10 +43,30 @@
# define USE_THUMB_INTERWORK 1
#endif
#if defined(__ARM_ARCH_5T__) || \
defined(__ARM_ARCH_5TE__) || \
defined(__ARM_ARCH_6__) || \
defined(__ARM_ARCH_7A__) || \
defined(__ARM_ARCH_7__)
# define CAN_USE_ARMV5_INSTRUCTIONS 1
# define CAN_USE_THUMB_INSTRUCTIONS 1
#endif
#if defined(__ARM_ARCH_6__) || \
defined(__ARM_ARCH_7A__) || \
defined(__ARM_ARCH_7__)
# define CAN_USE_ARMV6_INSTRUCTIONS 1
#endif
#if defined(__ARM_ARCH_7A__) || \
defined(__ARM_ARCH_7__)
# define CAN_USE_ARMV7_INSTRUCTIONS 1
#endif
// Simulator should support ARM5 instructions.
#if !defined(__arm__)
# define __ARM_ARCH_5__ 1
# define __ARM_ARCH_5T__ 1
# define CAN_USE_ARMV5_INSTRUCTIONS 1
# define CAN_USE_THUMB_INSTRUCTIONS 1
#endif
namespace assembler {

16
deps/v8/src/arm/macro-assembler-arm.cc

@ -52,21 +52,15 @@ MacroAssembler::MacroAssembler(void* buffer, int size)
// We do not support thumb inter-working with an arm architecture not supporting
// the blx instruction (below v5t)
#if defined(USE_THUMB_INTERWORK)
#if !defined(__ARM_ARCH_5T__) && \
!defined(__ARM_ARCH_5TE__) && \
!defined(__ARM_ARCH_6__) && \
!defined(__ARM_ARCH_7A__) && \
!defined(__ARM_ARCH_7__)
// add tests for other versions above v5t as required
#error "for thumb inter-working we require architecture v5t or above"
#endif
// the blx instruction (below v5t). If you know what CPU you are compiling for
// you can use -march=armv7 or similar.
#if defined(USE_THUMB_INTERWORK) && !defined(CAN_USE_THUMB_INSTRUCTIONS)
# error "For thumb inter-working we require an architecture which supports blx"
#endif
// Using blx may yield better code, so use it when required or when available
#if defined(USE_THUMB_INTERWORK) || defined(__ARM_ARCH_5__)
#if defined(USE_THUMB_INTERWORK) || defined(CAN_USE_ARMV5_INSTRUCTIONS)
#define USE_BLX 1
#endif

4
deps/v8/src/bootstrapper.cc

@ -654,6 +654,8 @@ void Genesis::CreateRoots(v8::Handle<v8::ObjectTemplate> global_template,
InstallFunction(global, "Array", JS_ARRAY_TYPE, JSArray::kSize,
Top::initial_object_prototype(), Builtins::ArrayCode,
true);
array_function->shared()->set_construct_stub(
Builtins::builtin(Builtins::ArrayConstructCode));
array_function->shared()->DontAdaptArguments();
// This seems a bit hackish, but we need to make sure Array.length
@ -1471,7 +1473,7 @@ void Genesis::MakeFunctionInstancePrototypeWritable() {
HandleScope scope;
Handle<DescriptorArray> function_map_descriptors =
ComputeFunctionInstanceDescriptor(false, true);
ComputeFunctionInstanceDescriptor(false);
Handle<Map> fm = Factory::CopyMapDropDescriptors(Top::function_map());
fm->set_instance_descriptors(*function_map_descriptors);
Top::context()->global_context()->set_function_map(*fm);

6
deps/v8/src/builtins.cc

@ -135,7 +135,9 @@ BUILTIN(EmptyFunction) {
BUILTIN_END
BUILTIN(ArrayCode) {
BUILTIN(ArrayCodeGeneric) {
Counters::array_function_runtime.Increment();
JSArray* array;
if (CalledAsConstructor()) {
array = JSArray::cast(*receiver);
@ -166,7 +168,7 @@ BUILTIN(ArrayCode) {
// Take the argument as the length.
obj = array->Initialize(0);
if (obj->IsFailure()) return obj;
if (args.length() == 2) return array->SetElementsLength(args[1]);
return array->SetElementsLength(args[1]);
}
// Optimize the case where there are no parameters passed.

11
deps/v8/src/builtins.h

@ -37,7 +37,7 @@ namespace internal {
\
V(EmptyFunction) \
\
V(ArrayCode) \
V(ArrayCodeGeneric) \
\
V(ArrayPush) \
V(ArrayPop) \
@ -83,8 +83,10 @@ namespace internal {
\
/* Uses KeyedLoadIC_Initialize; must be after in list. */ \
V(FunctionCall, BUILTIN, UNINITIALIZED) \
V(FunctionApply, BUILTIN, UNINITIALIZED)
V(FunctionApply, BUILTIN, UNINITIALIZED) \
\
V(ArrayCode, BUILTIN, UNINITIALIZED) \
V(ArrayConstructCode, BUILTIN, UNINITIALIZED)
#ifdef ENABLE_DEBUGGER_SUPPORT
// Define list of builtins used by the debugger implemented in assembly.
@ -217,6 +219,9 @@ class Builtins : public AllStatic {
static void Generate_FunctionCall(MacroAssembler* masm);
static void Generate_FunctionApply(MacroAssembler* masm);
static void Generate_ArrayCode(MacroAssembler* masm);
static void Generate_ArrayConstructCode(MacroAssembler* masm);
};
} } // namespace v8::internal

19
deps/v8/src/handles.cc

@ -527,17 +527,11 @@ v8::Handle<v8::Array> GetKeysForIndexedInterceptor(Handle<JSObject> receiver,
}
Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSObject> object) {
Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSObject> object,
KeyCollectionType type) {
Handle<FixedArray> content = Factory::empty_fixed_array();
JSObject* arguments_boilerplate =
Top::context()->global_context()->arguments_boilerplate();
JSFunction* arguments_function =
JSFunction::cast(arguments_boilerplate->map()->constructor());
bool allow_enumeration = (object->map()->constructor() != arguments_function);
// Only collect keys if access is permitted.
if (allow_enumeration) {
for (Handle<Object> p = object;
*p != Heap::null_value();
p = Handle<Object>(p->GetPrototype())) {
@ -575,7 +569,11 @@ Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSObject> object) {
if (!result.IsEmpty())
content = AddKeysFromJSArray(content, v8::Utils::OpenHandle(*result));
}
}
// If we only want local properties we bail out after the first
// iteration.
if (type == LOCAL_ONLY)
break;
}
return content;
}
@ -583,7 +581,8 @@ Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSObject> object) {
Handle<JSArray> GetKeysFor(Handle<JSObject> object) {
Counters::for_in.Increment();
Handle<FixedArray> elements = GetKeysInFixedArrayFor(object);
Handle<FixedArray> elements = GetKeysInFixedArrayFor(object,
INCLUDE_PROTOS);
return Factory::NewJSArrayWithElements(elements);
}

6
deps/v8/src/handles.h

@ -265,9 +265,13 @@ v8::Handle<v8::Array> GetKeysForNamedInterceptor(Handle<JSObject> receiver,
Handle<JSObject> object);
v8::Handle<v8::Array> GetKeysForIndexedInterceptor(Handle<JSObject> receiver,
Handle<JSObject> object);
enum KeyCollectionType { LOCAL_ONLY, INCLUDE_PROTOS };
// Computes the enumerable keys for a JSObject. Used for implementing
// "for (n in object) { }".
Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSObject> object);
Handle<FixedArray> GetKeysInFixedArrayFor(Handle<JSObject> object,
KeyCollectionType type);
Handle<JSArray> GetKeysFor(Handle<JSObject> object);
Handle<FixedArray> GetEnumPropertyKeys(Handle<JSObject> object);

548
deps/v8/src/heap-profiler.cc

@ -0,0 +1,548 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "v8.h"
#include "heap-profiler.h"
#include "string-stream.h"
namespace v8 {
namespace internal {
#ifdef ENABLE_LOGGING_AND_PROFILING
namespace {
// Clusterizer is a set of helper functions for converting
// object references into clusters.
class Clusterizer : public AllStatic {
public:
static JSObjectsCluster Clusterize(HeapObject* obj) {
return Clusterize(obj, true);
}
static void InsertIntoTree(JSObjectsClusterTree* tree,
HeapObject* obj, bool fine_grain);
static void InsertReferenceIntoTree(JSObjectsClusterTree* tree,
const JSObjectsCluster& cluster) {
InsertIntoTree(tree, cluster, 0);
}
private:
static JSObjectsCluster Clusterize(HeapObject* obj, bool fine_grain);
static int CalculateNetworkSize(JSObject* obj);
static int GetObjectSize(HeapObject* obj) {
return obj->IsJSObject() ?
CalculateNetworkSize(JSObject::cast(obj)) : obj->Size();
}
static void InsertIntoTree(JSObjectsClusterTree* tree,
const JSObjectsCluster& cluster, int size);
};
JSObjectsCluster Clusterizer::Clusterize(HeapObject* obj, bool fine_grain) {
if (obj->IsJSObject()) {
JSObject* js_obj = JSObject::cast(obj);
String* constructor = JSObject::cast(js_obj)->constructor_name();
// Differentiate Array, Function, and Object instances.
if (fine_grain && (constructor == Heap::Object_symbol() ||
constructor == Heap::Array_symbol() ||
constructor == Heap::function_class_symbol())) {
return JSObjectsCluster(constructor, obj);
} else {
return JSObjectsCluster(constructor);
}
} else if (obj->IsString()) {
return JSObjectsCluster(Heap::String_symbol());
}
return JSObjectsCluster();
}
void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree,
HeapObject* obj, bool fine_grain) {
JSObjectsCluster cluster = Clusterize(obj, fine_grain);
if (cluster.is_null()) return;
InsertIntoTree(tree, cluster, GetObjectSize(obj));
}
void Clusterizer::InsertIntoTree(JSObjectsClusterTree* tree,
const JSObjectsCluster& cluster, int size) {
JSObjectsClusterTree::Locator loc;
tree->Insert(cluster, &loc);
NumberAndSizeInfo number_and_size = loc.value();
number_and_size.increment_number(1);
number_and_size.increment_bytes(size);
loc.set_value(number_and_size);
}
int Clusterizer::CalculateNetworkSize(JSObject* obj) {
int size = obj->Size();
// If 'properties' and 'elements' are non-empty (thus, non-shared),
// take their size into account.
if (FixedArray::cast(obj->properties())->length() != 0) {
size += obj->properties()->Size();
}
if (FixedArray::cast(obj->elements())->length() != 0) {
size += obj->elements()->Size();
}
return size;
}
// A helper class for recording back references.
class ReferencesExtractor : public ObjectVisitor {
public:
ReferencesExtractor(const JSObjectsCluster& cluster,
RetainerHeapProfile* profile)
: cluster_(cluster),
profile_(profile),
inside_array_(false) {
}
void VisitPointer(Object** o) {
if ((*o)->IsJSObject() || (*o)->IsString()) {
profile_->StoreReference(cluster_, HeapObject::cast(*o));
} else if ((*o)->IsFixedArray() && !inside_array_) {
// Traverse one level deep for data members that are fixed arrays.
// This covers the case of 'elements' and 'properties' of JSObject,
// and function contexts.
inside_array_ = true;
FixedArray::cast(*o)->Iterate(this);
inside_array_ = false;
}
}
void VisitPointers(Object** start, Object** end) {
for (Object** p = start; p < end; p++) VisitPointer(p);
}
private:
const JSObjectsCluster& cluster_;
RetainerHeapProfile* profile_;
bool inside_array_;
};
// A printer interface implementation for the Retainers profile.
class RetainersPrinter : public RetainerHeapProfile::Printer {
public:
void PrintRetainers(const JSObjectsCluster& cluster,
const StringStream& retainers) {
HeapStringAllocator allocator;
StringStream stream(&allocator);
cluster.Print(&stream);
LOG(HeapSampleJSRetainersEvent(
*(stream.ToCString()), *(retainers.ToCString())));
}
};
class RetainerTreePrinter BASE_EMBEDDED {
public:
explicit RetainerTreePrinter(StringStream* stream) : stream_(stream) {}
void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
Print(stream_, cluster, number_and_size);
}
static void Print(StringStream* stream,
const JSObjectsCluster& cluster,
const NumberAndSizeInfo& numNNber_and_size);
private:
StringStream* stream_;
};
void RetainerTreePrinter::Print(StringStream* stream,
const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
stream->Put(',');
cluster.Print(stream);
stream->Add(";%d", number_and_size.number());
}
} // namespace
const JSObjectsClusterTreeConfig::Key JSObjectsClusterTreeConfig::kNoKey;
const JSObjectsClusterTreeConfig::Value JSObjectsClusterTreeConfig::kNoValue;
ConstructorHeapProfile::ConstructorHeapProfile()
: zscope_(DELETE_ON_EXIT) {
}
void ConstructorHeapProfile::Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
HeapStringAllocator allocator;
StringStream stream(&allocator);
cluster.Print(&stream);
LOG(HeapSampleJSConstructorEvent(*(stream.ToCString()),
number_and_size.number(),
number_and_size.bytes()));
}
void ConstructorHeapProfile::CollectStats(HeapObject* obj) {
Clusterizer::InsertIntoTree(&js_objects_info_tree_, obj, false);
}
void ConstructorHeapProfile::PrintStats() {
js_objects_info_tree_.ForEach(this);
}
void JSObjectsCluster::Print(StringStream* accumulator) const {
ASSERT(!is_null());
if (constructor_ == FromSpecialCase(ROOTS)) {
accumulator->Add("(roots)");
} else if (constructor_ == FromSpecialCase(GLOBAL_PROPERTY)) {
accumulator->Add("(global property)");
} else {
SmartPointer<char> s_name(
constructor_->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
accumulator->Add("%s", (*s_name)[0] != '\0' ? *s_name : "(anonymous)");
if (instance_ != NULL) {
accumulator->Add(":%p", static_cast<void*>(instance_));
}
}
}
void JSObjectsCluster::DebugPrint(StringStream* accumulator) const {
if (!is_null()) {
Print(accumulator);
} else {
accumulator->Add("(null cluster)");
}
}
inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs(
const JSObjectsCluster& cluster_)
: cluster(cluster_), refs(kInitialBackrefsListCapacity) {
}
inline ClustersCoarser::ClusterBackRefs::ClusterBackRefs(
const ClustersCoarser::ClusterBackRefs& src)
: cluster(src.cluster), refs(src.refs.capacity()) {
refs.AddAll(src.refs);
}
inline ClustersCoarser::ClusterBackRefs&
ClustersCoarser::ClusterBackRefs::operator=(
const ClustersCoarser::ClusterBackRefs& src) {
if (this == &src) return *this;
cluster = src.cluster;
refs.Clear();
refs.AddAll(src.refs);
return *this;
}
inline int ClustersCoarser::ClusterBackRefs::Compare(
const ClustersCoarser::ClusterBackRefs& a,
const ClustersCoarser::ClusterBackRefs& b) {
int cmp = JSObjectsCluster::CompareConstructors(a.cluster, b.cluster);
if (cmp != 0) return cmp;
if (a.refs.length() < b.refs.length()) return -1;
if (a.refs.length() > b.refs.length()) return 1;
for (int i = 0; i < a.refs.length(); ++i) {
int cmp = JSObjectsCluster::Compare(a.refs[i], b.refs[i]);
if (cmp != 0) return cmp;
}
return 0;
}
ClustersCoarser::ClustersCoarser()
: zscope_(DELETE_ON_EXIT),
sim_list_(ClustersCoarser::kInitialSimilarityListCapacity),
current_pair_(NULL) {
}
void ClustersCoarser::Call(const JSObjectsCluster& cluster,
JSObjectsClusterTree* tree) {
if (!cluster.can_be_coarsed()) return;
ClusterBackRefs pair(cluster);
ASSERT(current_pair_ == NULL);
current_pair_ = &pair;
current_set_ = new JSObjectsRetainerTree();
tree->ForEach(this);
sim_list_.Add(pair);
current_pair_ = NULL;
current_set_ = NULL;
}
void ClustersCoarser::Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
ASSERT(current_pair_ != NULL);
ASSERT(current_set_ != NULL);
JSObjectsCluster eq = GetCoarseEquivalent(cluster);
JSObjectsRetainerTree::Locator loc;
if (!eq.is_null()) {
if (current_set_->Find(eq, &loc)) return;
current_pair_->refs.Add(eq);
current_set_->Insert(eq, &loc);
} else {
current_pair_->refs.Add(cluster);
}
}
void ClustersCoarser::Process(JSObjectsRetainerTree* tree) {
int last_eq_clusters = -1;
for (int i = 0; i < kMaxPassesCount; ++i) {
sim_list_.Clear();
const int curr_eq_clusters = DoProcess(tree);
// If no new cluster equivalents discovered, abort processing.
if (last_eq_clusters == curr_eq_clusters) break;
last_eq_clusters = curr_eq_clusters;
}
}
int ClustersCoarser::DoProcess(JSObjectsRetainerTree* tree) {
tree->ForEach(this);
// To sort similarity list properly, references list of a cluster is
// required to be sorted, thus 'O1 <- A, B' and 'O2 <- B, A' would
// be considered equivalent. But we don't sort them explicitly
// because we know that they come from a splay tree traversal, so
// they are already sorted.
sim_list_.Sort(ClusterBackRefsCmp);
return FillEqualityTree();
}
JSObjectsCluster ClustersCoarser::GetCoarseEquivalent(
const JSObjectsCluster& cluster) {
if (!cluster.can_be_coarsed()) return JSObjectsCluster();
EqualityTree::Locator loc;
return eq_tree_.Find(cluster, &loc) ? loc.value() : JSObjectsCluster();
}
bool ClustersCoarser::HasAnEquivalent(const JSObjectsCluster& cluster) {
// Return true for coarsible clusters that have a non-identical equivalent.
return cluster.can_be_coarsed() &&
JSObjectsCluster::Compare(cluster, GetCoarseEquivalent(cluster)) != 0;
}
int ClustersCoarser::FillEqualityTree() {
int eq_clusters_count = 0;
int eq_to = 0;
bool first_added = false;
for (int i = 1; i < sim_list_.length(); ++i) {
if (ClusterBackRefs::Compare(sim_list_[i], sim_list_[eq_to]) == 0) {
EqualityTree::Locator loc;
if (!first_added) {
// Add self-equivalence, if we have more than one item in this
// equivalence class.
eq_tree_.Insert(sim_list_[eq_to].cluster, &loc);
loc.set_value(sim_list_[eq_to].cluster);
first_added = true;
}
eq_tree_.Insert(sim_list_[i].cluster, &loc);
loc.set_value(sim_list_[eq_to].cluster);
++eq_clusters_count;
} else {
eq_to = i;
first_added = false;
}
}
return eq_clusters_count;
}
const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoKey;
const JSObjectsCluster ClustersCoarser::ClusterEqualityConfig::kNoValue;
const JSObjectsRetainerTreeConfig::Key JSObjectsRetainerTreeConfig::kNoKey;
const JSObjectsRetainerTreeConfig::Value JSObjectsRetainerTreeConfig::kNoValue =
NULL;
RetainerHeapProfile::RetainerHeapProfile()
: zscope_(DELETE_ON_EXIT),
coarse_cluster_tree_(NULL),
current_printer_(NULL),
current_stream_(NULL) {
JSObjectsCluster roots(JSObjectsCluster::ROOTS);
ReferencesExtractor extractor(roots, this);
Heap::IterateRoots(&extractor);
}
void RetainerHeapProfile::StoreReference(const JSObjectsCluster& cluster,
HeapObject* ref) {
JSObjectsCluster ref_cluster = Clusterizer::Clusterize(ref);
JSObjectsRetainerTree::Locator ref_loc;
if (retainers_tree_.Insert(ref_cluster, &ref_loc)) {
ref_loc.set_value(new JSObjectsClusterTree());
}
JSObjectsClusterTree* referenced_by = ref_loc.value();
Clusterizer::InsertReferenceIntoTree(referenced_by, cluster);
}
void RetainerHeapProfile::CollectStats(HeapObject* obj) {
if (obj->IsJSObject()) {
const JSObjectsCluster cluster = Clusterizer::Clusterize(obj);
ReferencesExtractor extractor(cluster, this);
obj->Iterate(&extractor);
} else if (obj->IsJSGlobalPropertyCell()) {
JSObjectsCluster global_prop(JSObjectsCluster::GLOBAL_PROPERTY);
ReferencesExtractor extractor(global_prop, this);
obj->Iterate(&extractor);
}
}
void RetainerHeapProfile::DebugPrintStats(
RetainerHeapProfile::Printer* printer) {
coarser_.Process(&retainers_tree_);
ASSERT(current_printer_ == NULL);
current_printer_ = printer;
retainers_tree_.ForEach(this);
current_printer_ = NULL;
}
void RetainerHeapProfile::PrintStats() {
RetainersPrinter printer;
DebugPrintStats(&printer);
}
void RetainerHeapProfile::Call(const JSObjectsCluster& cluster,
JSObjectsClusterTree* tree) {
// First level of retainer graph.
if (coarser_.HasAnEquivalent(cluster)) return;
ASSERT(current_stream_ == NULL);
HeapStringAllocator allocator;
StringStream stream(&allocator);
current_stream_ = &stream;
ASSERT(coarse_cluster_tree_ == NULL);
coarse_cluster_tree_ = new JSObjectsClusterTree();
tree->ForEach(this);
// Print aggregated counts and sizes.
RetainerTreePrinter printer(current_stream_);
coarse_cluster_tree_->ForEach(&printer);
coarse_cluster_tree_ = NULL;
current_printer_->PrintRetainers(cluster, stream);
current_stream_ = NULL;
}
void RetainerHeapProfile::Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size) {
ASSERT(coarse_cluster_tree_ != NULL);
ASSERT(current_stream_ != NULL);
JSObjectsCluster eq = coarser_.GetCoarseEquivalent(cluster);
if (eq.is_null()) {
RetainerTreePrinter::Print(current_stream_, cluster, number_and_size);
} else {
// Aggregate counts and sizes for equivalent clusters.
JSObjectsClusterTree::Locator loc;
coarse_cluster_tree_->Insert(eq, &loc);
NumberAndSizeInfo eq_number_and_size = loc.value();
eq_number_and_size.increment_number(number_and_size.number());
loc.set_value(eq_number_and_size);
}
}
//
// HeapProfiler class implementation.
//
void HeapProfiler::CollectStats(HeapObject* obj, HistogramInfo* info) {
InstanceType type = obj->map()->instance_type();
ASSERT(0 <= type && type <= LAST_TYPE);
info[type].increment_number(1);
info[type].increment_bytes(obj->Size());
}
void HeapProfiler::WriteSample() {
LOG(HeapSampleBeginEvent("Heap", "allocated"));
LOG(HeapSampleStats(
"Heap", "allocated", Heap::Capacity(), Heap::SizeOfObjects()));
HistogramInfo info[LAST_TYPE+1];
#define DEF_TYPE_NAME(name) info[name].set_name(#name);
INSTANCE_TYPE_LIST(DEF_TYPE_NAME)
#undef DEF_TYPE_NAME
ConstructorHeapProfile js_cons_profile;
RetainerHeapProfile js_retainer_profile;
HeapIterator iterator;
while (iterator.has_next()) {
HeapObject* obj = iterator.next();
CollectStats(obj, info);
js_cons_profile.CollectStats(obj);
js_retainer_profile.CollectStats(obj);
}
// Lump all the string types together.
int string_number = 0;
int string_bytes = 0;
#define INCREMENT_SIZE(type, size, name, camel_name) \
string_number += info[type].number(); \
string_bytes += info[type].bytes();
STRING_TYPE_LIST(INCREMENT_SIZE)
#undef INCREMENT_SIZE
if (string_bytes > 0) {
LOG(HeapSampleItemEvent("STRING_TYPE", string_number, string_bytes));
}
for (int i = FIRST_NONSTRING_TYPE; i <= LAST_TYPE; ++i) {
if (info[i].bytes() > 0) {
LOG(HeapSampleItemEvent(info[i].name(), info[i].number(),
info[i].bytes()));
}
}
js_cons_profile.PrintStats();
js_retainer_profile.PrintStats();
LOG(HeapSampleEndEvent("Heap", "allocated"));
}
#endif // ENABLE_LOGGING_AND_PROFILING
} } // namespace v8::internal

266
deps/v8/src/heap-profiler.h

@ -0,0 +1,266 @@
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef V8_HEAP_PROFILER_H_
#define V8_HEAP_PROFILER_H_
namespace v8 {
namespace internal {
#ifdef ENABLE_LOGGING_AND_PROFILING
// The HeapProfiler writes data to the log files, which can be postprocessed
// to generate .hp files for use by the GHC/Valgrind tool hp2ps.
class HeapProfiler {
public:
// Write a single heap sample to the log file.
static void WriteSample();
private:
// Update the array info with stats from obj.
static void CollectStats(HeapObject* obj, HistogramInfo* info);
};
// JSObjectsCluster describes a group of JS objects that are
// considered equivalent in terms of a particular profile.
class JSObjectsCluster BASE_EMBEDDED {
public:
// These special cases are used in retainer profile.
enum SpecialCase {
ROOTS = 1,
GLOBAL_PROPERTY = 2
};
JSObjectsCluster() : constructor_(NULL), instance_(NULL) {}
explicit JSObjectsCluster(String* constructor)
: constructor_(constructor), instance_(NULL) {}
explicit JSObjectsCluster(SpecialCase special)
: constructor_(FromSpecialCase(special)), instance_(NULL) {}
JSObjectsCluster(String* constructor, Object* instance)
: constructor_(constructor), instance_(instance) {}
static int CompareConstructors(const JSObjectsCluster& a,
const JSObjectsCluster& b) {
// Strings are unique, so it is sufficient to compare their pointers.
return a.constructor_ == b.constructor_ ? 0
: (a.constructor_ < b.constructor_ ? -1 : 1);
}
static int Compare(const JSObjectsCluster& a, const JSObjectsCluster& b) {
// Strings are unique, so it is sufficient to compare their pointers.
const int cons_cmp = CompareConstructors(a, b);
return cons_cmp == 0 ?
(a.instance_ == b.instance_ ? 0 : (a.instance_ < b.instance_ ? -1 : 1))
: cons_cmp;
}
bool is_null() const { return constructor_ == NULL; }
bool can_be_coarsed() const { return instance_ != NULL; }
String* constructor() const { return constructor_; }
void Print(StringStream* accumulator) const;
// Allows null clusters to be printed.
void DebugPrint(StringStream* accumulator) const;
private:
static String* FromSpecialCase(SpecialCase special) {
// We use symbols that are illegal JS identifiers to identify special cases.
// Their actual value is irrelevant for us.
switch (special) {
case ROOTS: return Heap::result_symbol();
case GLOBAL_PROPERTY: return Heap::code_symbol();
default:
UNREACHABLE();
return NULL;
}
}
String* constructor_;
Object* instance_;
};
struct JSObjectsClusterTreeConfig {
typedef JSObjectsCluster Key;
typedef NumberAndSizeInfo Value;
static const Key kNoKey;
static const Value kNoValue;
static int Compare(const Key& a, const Key& b) {
return Key::Compare(a, b);
}
};
typedef ZoneSplayTree<JSObjectsClusterTreeConfig> JSObjectsClusterTree;
// ConstructorHeapProfile is responsible for gathering and logging
// "constructor profile" of JS objects allocated on heap.
// It is run during garbage collection cycle, thus it doesn't need
// to use handles.
class ConstructorHeapProfile BASE_EMBEDDED {
public:
ConstructorHeapProfile();
virtual ~ConstructorHeapProfile() {}
void CollectStats(HeapObject* obj);
void PrintStats();
// Used by ZoneSplayTree::ForEach. Made virtual to allow overriding in tests.
virtual void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size);
private:
ZoneScope zscope_;
JSObjectsClusterTree js_objects_info_tree_;
};
// JSObjectsRetainerTree is used to represent retainer graphs using
// adjacency list form:
//
// Cluster -> (Cluster -> NumberAndSizeInfo)
//
// Subordinate splay trees are stored by pointer. They are zone-allocated,
// so it isn't needed to manage their lifetime.
//
struct JSObjectsRetainerTreeConfig {
typedef JSObjectsCluster Key;
typedef JSObjectsClusterTree* Value;
static const Key kNoKey;
static const Value kNoValue;
static int Compare(const Key& a, const Key& b) {
return Key::Compare(a, b);
}
};
typedef ZoneSplayTree<JSObjectsRetainerTreeConfig> JSObjectsRetainerTree;
class ClustersCoarser BASE_EMBEDDED {
public:
ClustersCoarser();
// Processes a given retainer graph.
void Process(JSObjectsRetainerTree* tree);
// Returns an equivalent cluster (can be the cluster itself).
// If the given cluster doesn't have an equivalent, returns null cluster.
JSObjectsCluster GetCoarseEquivalent(const JSObjectsCluster& cluster);
// Returns whether a cluster can be substitued with an equivalent and thus,
// skipped in some cases.
bool HasAnEquivalent(const JSObjectsCluster& cluster);
// Used by JSObjectsRetainerTree::ForEach.
void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size);
private:
// Stores a list of back references for a cluster.
struct ClusterBackRefs {
explicit ClusterBackRefs(const JSObjectsCluster& cluster_);
ClusterBackRefs(const ClusterBackRefs& src);
ClusterBackRefs& operator=(const ClusterBackRefs& src);
static int Compare(const ClusterBackRefs& a, const ClusterBackRefs& b);
JSObjectsCluster cluster;
ZoneList<JSObjectsCluster> refs;
};
typedef ZoneList<ClusterBackRefs> SimilarityList;
// A tree for storing a list of equivalents for a cluster.
struct ClusterEqualityConfig {
typedef JSObjectsCluster Key;
typedef JSObjectsCluster Value;
static const Key kNoKey;
static const Value kNoValue;
static int Compare(const Key& a, const Key& b) {
return Key::Compare(a, b);
}
};
typedef ZoneSplayTree<ClusterEqualityConfig> EqualityTree;
static int ClusterBackRefsCmp(const ClusterBackRefs* a,
const ClusterBackRefs* b) {
return ClusterBackRefs::Compare(*a, *b);
}
int DoProcess(JSObjectsRetainerTree* tree);
int FillEqualityTree();
static const int kInitialBackrefsListCapacity = 2;
static const int kInitialSimilarityListCapacity = 2000;
// Number of passes for finding equivalents. Limits the length of paths
// that can be considered equivalent.
static const int kMaxPassesCount = 10;
ZoneScope zscope_;
SimilarityList sim_list_;
EqualityTree eq_tree_;
ClusterBackRefs* current_pair_;
JSObjectsRetainerTree* current_set_;
};
// RetainerHeapProfile is responsible for gathering and logging
// "retainer profile" of JS objects allocated on heap.
// It is run during garbage collection cycle, thus it doesn't need
// to use handles.
class RetainerHeapProfile BASE_EMBEDDED {
public:
class Printer {
public:
virtual ~Printer() {}
virtual void PrintRetainers(const JSObjectsCluster& cluster,
const StringStream& retainers) = 0;
};
RetainerHeapProfile();
void CollectStats(HeapObject* obj);
void PrintStats();
void DebugPrintStats(Printer* printer);
void StoreReference(const JSObjectsCluster& cluster, HeapObject* ref);
private:
// Limit on the number of retainers to be printed per cluster.
static const int kMaxRetainersToPrint = 50;
ZoneScope zscope_;
JSObjectsRetainerTree retainers_tree_;
ClustersCoarser coarser_;
// TODO(mnaganov): Use some helper class to hold these state variables.
JSObjectsClusterTree* coarse_cluster_tree_;
Printer* current_printer_;
StringStream* current_stream_;
public:
// Used by JSObjectsRetainerTree::ForEach.
void Call(const JSObjectsCluster& cluster, JSObjectsClusterTree* tree);
void Call(const JSObjectsCluster& cluster,
const NumberAndSizeInfo& number_and_size);
};
#endif // ENABLE_LOGGING_AND_PROFILING
} } // namespace v8::internal
#endif // V8_HEAP_PROFILER_H_

176
deps/v8/src/heap.cc

@ -33,6 +33,7 @@
#include "codegen-inl.h"
#include "compilation-cache.h"
#include "debug.h"
#include "heap-profiler.h"
#include "global-handles.h"
#include "mark-compact.h"
#include "natives.h"
@ -636,15 +637,7 @@ static void VerifyNonPointerSpacePointers() {
HeapObjectIterator code_it(Heap::code_space());
while (code_it.has_next()) {
HeapObject* object = code_it.next();
if (object->IsCode()) {
Code::cast(object)->ConvertICTargetsFromAddressToObject();
object->Iterate(&v);
Code::cast(object)->ConvertICTargetsFromObjectToAddress();
} else {
// If we find non-code objects in code space (e.g., free list
// nodes) we want to verify them as well.
object->Iterate(&v);
}
}
HeapObjectIterator data_it(Heap::old_data_space());
@ -1934,7 +1927,6 @@ Object* Heap::CreateCode(const CodeDesc& desc,
code->set_relocation_size(desc.reloc_size);
code->set_sinfo_size(sinfo_size);
code->set_flags(flags);
code->set_ic_flag(Code::IC_TARGET_IS_ADDRESS);
// Allow self references to created code object by patching the handle to
// point to the newly allocated Code object.
if (!self_reference.is_null()) {
@ -3544,164 +3536,6 @@ void HeapIterator::reset() {
}
#ifdef ENABLE_LOGGING_AND_PROFILING
namespace {
// JSConstructorProfile is responsible for gathering and logging
// "constructor profile" of JS object allocated on heap.
// It is run during garbage collection cycle, thus it doesn't need
// to use handles.
class JSConstructorProfile BASE_EMBEDDED {
public:
JSConstructorProfile() : zscope_(DELETE_ON_EXIT) {}
void CollectStats(HeapObject* obj);
void PrintStats();
// Used by ZoneSplayTree::ForEach.
void Call(String* name, const NumberAndSizeInfo& number_and_size);
private:
struct TreeConfig {
typedef String* Key;
typedef NumberAndSizeInfo Value;
static const Key kNoKey;
static const Value kNoValue;
// Strings are unique, so it is sufficient to compare their pointers.
static int Compare(const Key& a, const Key& b) {
return a == b ? 0 : (a < b ? -1 : 1);
}
};
typedef ZoneSplayTree<TreeConfig> JSObjectsInfoTree;
static int CalculateJSObjectNetworkSize(JSObject* obj);
ZoneScope zscope_;
JSObjectsInfoTree js_objects_info_tree_;
};
const JSConstructorProfile::TreeConfig::Key
JSConstructorProfile::TreeConfig::kNoKey = NULL;
const JSConstructorProfile::TreeConfig::Value
JSConstructorProfile::TreeConfig::kNoValue;
int JSConstructorProfile::CalculateJSObjectNetworkSize(JSObject* obj) {
int size = obj->Size();
// If 'properties' and 'elements' are non-empty (thus, non-shared),
// take their size into account.
if (FixedArray::cast(obj->properties())->length() != 0) {
size += obj->properties()->Size();
}
if (FixedArray::cast(obj->elements())->length() != 0) {
size += obj->elements()->Size();
}
return size;
}
void JSConstructorProfile::Call(String* name,
const NumberAndSizeInfo& number_and_size) {
ASSERT(name != NULL);
SmartPointer<char> s_name(
name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL));
LOG(HeapSampleJSConstructorEvent(*s_name,
number_and_size.number(),
number_and_size.bytes()));
}
void JSConstructorProfile::CollectStats(HeapObject* obj) {
String* constructor = NULL;
int size;
if (obj->IsString()) {
constructor = Heap::String_symbol();
size = obj->Size();
} else if (obj->IsJSObject()) {
JSObject* js_obj = JSObject::cast(obj);
constructor = js_obj->constructor_name();
size = CalculateJSObjectNetworkSize(js_obj);
} else {
return;
}
JSObjectsInfoTree::Locator loc;
if (!js_objects_info_tree_.Find(constructor, &loc)) {
js_objects_info_tree_.Insert(constructor, &loc);
}
NumberAndSizeInfo number_and_size = loc.value();
number_and_size.increment_number(1);
number_and_size.increment_bytes(size);
loc.set_value(number_and_size);
}
void JSConstructorProfile::PrintStats() {
js_objects_info_tree_.ForEach(this);
}
} // namespace
#endif
//
// HeapProfiler class implementation.
//
#ifdef ENABLE_LOGGING_AND_PROFILING
void HeapProfiler::CollectStats(HeapObject* obj, HistogramInfo* info) {
InstanceType type = obj->map()->instance_type();
ASSERT(0 <= type && type <= LAST_TYPE);
info[type].increment_number(1);
info[type].increment_bytes(obj->Size());
}
#endif
#ifdef ENABLE_LOGGING_AND_PROFILING
void HeapProfiler::WriteSample() {
LOG(HeapSampleBeginEvent("Heap", "allocated"));
LOG(HeapSampleStats(
"Heap", "allocated", Heap::Capacity(), Heap::SizeOfObjects()));
HistogramInfo info[LAST_TYPE+1];
#define DEF_TYPE_NAME(name) info[name].set_name(#name);
INSTANCE_TYPE_LIST(DEF_TYPE_NAME)
#undef DEF_TYPE_NAME
JSConstructorProfile js_cons_profile;
HeapIterator iterator;
while (iterator.has_next()) {
HeapObject* obj = iterator.next();
CollectStats(obj, info);
js_cons_profile.CollectStats(obj);
}
// Lump all the string types together.
int string_number = 0;
int string_bytes = 0;
#define INCREMENT_SIZE(type, size, name, camel_name) \
string_number += info[type].number(); \
string_bytes += info[type].bytes();
STRING_TYPE_LIST(INCREMENT_SIZE)
#undef INCREMENT_SIZE
if (string_bytes > 0) {
LOG(HeapSampleItemEvent("STRING_TYPE", string_number, string_bytes));
}
for (int i = FIRST_NONSTRING_TYPE; i <= LAST_TYPE; ++i) {
if (info[i].bytes() > 0) {
LOG(HeapSampleItemEvent(info[i].name(), info[i].number(),
info[i].bytes()));
}
}
js_cons_profile.PrintStats();
LOG(HeapSampleEndEvent("Heap", "allocated"));
}
#endif
#ifdef DEBUG
static bool search_for_any_global;
@ -3744,10 +3578,6 @@ static void MarkObjectRecursively(Object** p) {
return;
}
if (obj->IsCode()) {
Code::cast(obj)->ConvertICTargetsFromAddressToObject();
}
// not visited yet
Map* map_p = reinterpret_cast<Map*>(HeapObject::cast(map));
@ -3803,10 +3633,6 @@ static void UnmarkObjectRecursively(Object** p) {
obj->IterateBody(Map::cast(map_p)->instance_type(),
obj->SizeFromMap(Map::cast(map_p)),
&unmark_visitor);
if (obj->IsCode()) {
Code::cast(obj)->ConvertICTargetsFromObjectToAddress();
}
}

14
deps/v8/src/heap.h

@ -1443,20 +1443,6 @@ class DisableAssertNoAllocation {
#endif
#ifdef ENABLE_LOGGING_AND_PROFILING
// The HeapProfiler writes data to the log files, which can be postprocessed
// to generate .hp files for use by the GHC/Valgrind tool hp2ps.
class HeapProfiler {
public:
// Write a single heap sample to the log file.
static void WriteSample();
private:
// Update the array info with stats from obj.
static void CollectStats(HeapObject* obj, HistogramInfo* info);
};
#endif
// GCTracer collects and prints ONE line after each garbage collector
// invocation IFF --trace_gc is used.

460
deps/v8/src/ia32/builtins-ia32.cc

@ -658,6 +658,466 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
}
// Load the built-in Array function from the current context.
static void GenerateLoadArrayFunction(MacroAssembler* masm, Register result) {
// Load the global context.
__ mov(result, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ mov(result, FieldOperand(result, GlobalObject::kGlobalContextOffset));
// Load the Array function from the global context.
__ mov(result,
Operand(result, Context::SlotOffset(Context::ARRAY_FUNCTION_INDEX)));
}
// Number of empty elements to allocate for an empty array.
static const int kPreallocatedArrayElements = 4;
// Allocate an empty JSArray. The allocated array is put into the result
// register. If the parameter holes is larger than zero an elements backing
// store is allocated with this size and filled with the hole values. Otherwise
// the elements backing store is set to the empty FixedArray.
static void AllocateEmptyJSArray(MacroAssembler* masm,
Register array_function,
Register result,
Register scratch1,
Register scratch2,
Register scratch3,
int holes,
Label* gc_required) {
ASSERT(holes >= 0);
// Load the initial map from the array function.
__ mov(scratch1, FieldOperand(array_function,
JSFunction::kPrototypeOrInitialMapOffset));
// Allocate the JSArray object together with space for a fixed array with the
// requested elements.
int size = JSArray::kSize;
if (holes > 0) {
size += FixedArray::SizeFor(holes);
}
__ AllocateObjectInNewSpace(size,
result,
scratch2,
scratch3,
gc_required,
TAG_OBJECT);
// Allocated the JSArray. Now initialize the fields except for the elements
// array.
// result: JSObject
// scratch1: initial map
// scratch2: start of next object
__ mov(FieldOperand(result, JSObject::kMapOffset), scratch1);
__ mov(FieldOperand(result, JSArray::kPropertiesOffset),
Factory::empty_fixed_array());
// Field JSArray::kElementsOffset is initialized later.
__ mov(FieldOperand(result, JSArray::kLengthOffset), Immediate(0));
// If no storage is requested for the elements array just set the empty
// fixed array.
if (holes == 0) {
__ mov(FieldOperand(result, JSArray::kElementsOffset),
Factory::empty_fixed_array());
return;
}
// Calculate the location of the elements array and set elements array member
// of the JSArray.
// result: JSObject
// scratch2: start of next object
__ lea(scratch1, Operand(result, JSArray::kSize));
__ mov(FieldOperand(result, JSArray::kElementsOffset), scratch1);
// Initialize the FixedArray and fill it with holes. FixedArray length is not
// stored as a smi.
// result: JSObject
// scratch1: elements array
// scratch2: start of next object
__ mov(FieldOperand(scratch1, JSObject::kMapOffset),
Factory::fixed_array_map());
__ mov(FieldOperand(scratch1, Array::kLengthOffset), Immediate(holes));
// Fill the FixedArray with the hole value. Inline the code if short.
// Reconsider loop unfolding if kPreallocatedArrayElements gets changed.
static const int kLoopUnfoldLimit = 4;
ASSERT(kPreallocatedArrayElements <= kLoopUnfoldLimit);
if (holes <= kLoopUnfoldLimit) {
// Use a scratch register here to have only one reloc info when unfolding
// the loop.
__ mov(scratch3, Factory::the_hole_value());
for (int i = 0; i < holes; i++) {
__ mov(FieldOperand(scratch1,
FixedArray::kHeaderSize + i * kPointerSize),
scratch3);
}
} else {
Label loop, entry;
__ jmp(&entry);
__ bind(&loop);
__ mov(Operand(scratch1, 0), Factory::the_hole_value());
__ add(Operand(scratch1), Immediate(kPointerSize));
__ bind(&entry);
__ cmp(scratch1, Operand(scratch2));
__ j(below, &loop);
}
}
// Allocate a JSArray with the number of elements stored in a register. The
// register array_function holds the built-in Array function and the register
// array_size holds the size of the array as a smi. The allocated array is put
// into the result register and beginning and end of the FixedArray elements
// storage is put into registers elements_array and elements_array_end (see
// below for when that is not the case). If the parameter fill_with_holes is
// true the allocated elements backing store is filled with the hole values
// otherwise it is left uninitialized. When the backing store is filled the
// register elements_array is scratched.
static void AllocateJSArray(MacroAssembler* masm,
Register array_function, // Array function.
Register array_size, // As a smi.
Register result,
Register elements_array,
Register elements_array_end,
Register scratch,
bool fill_with_hole,
Label* gc_required) {
Label not_empty, allocated;
// Load the initial map from the array function.
__ mov(elements_array,
FieldOperand(array_function,
JSFunction::kPrototypeOrInitialMapOffset));
// Check whether an empty sized array is requested.
__ test(array_size, Operand(array_size));
__ j(not_zero, &not_empty);
// If an empty array is requested allocate a small elements array anyway. This
// keeps the code below free of special casing for the empty array.
int size = JSArray::kSize + FixedArray::SizeFor(kPreallocatedArrayElements);
__ AllocateObjectInNewSpace(size,
result,
elements_array_end,
scratch,
gc_required,
TAG_OBJECT);
__ jmp(&allocated);
// Allocate the JSArray object together with space for a FixedArray with the
// requested elements.
__ bind(&not_empty);
ASSERT(kSmiTagSize == 1 && kSmiTag == 0);
__ AllocateObjectInNewSpace(JSArray::kSize + FixedArray::kHeaderSize,
times_half_pointer_size, // array_size is a smi.
array_size,
result,
elements_array_end,
scratch,
gc_required,
TAG_OBJECT);
// Allocated the JSArray. Now initialize the fields except for the elements
// array.
// result: JSObject
// elements_array: initial map
// elements_array_end: start of next object
// array_size: size of array (smi)
__ bind(&allocated);
__ mov(FieldOperand(result, JSObject::kMapOffset), elements_array);
__ mov(elements_array, Factory::empty_fixed_array());
__ mov(FieldOperand(result, JSArray::kPropertiesOffset), elements_array);
// Field JSArray::kElementsOffset is initialized later.
__ mov(FieldOperand(result, JSArray::kLengthOffset), array_size);
// Calculate the location of the elements array and set elements array member
// of the JSArray.
// result: JSObject
// elements_array_end: start of next object
// array_size: size of array (smi)
__ lea(elements_array, Operand(result, JSArray::kSize));
__ mov(FieldOperand(result, JSArray::kElementsOffset), elements_array);
// Initialize the fixed array. FixedArray length is not stored as a smi.
// result: JSObject
// elements_array: elements array
// elements_array_end: start of next object
// array_size: size of array (smi)
ASSERT(kSmiTag == 0);
__ shr(array_size, kSmiTagSize); // Convert from smi to value.
__ mov(FieldOperand(elements_array, JSObject::kMapOffset),
Factory::fixed_array_map());
Label not_empty_2, fill_array;
__ test(array_size, Operand(array_size));
__ j(not_zero, &not_empty_2);
// Length of the FixedArray is the number of pre-allocated elements even
// though the actual JSArray has length 0.
__ mov(FieldOperand(elements_array, Array::kLengthOffset),
Immediate(kPreallocatedArrayElements));
__ jmp(&fill_array);
__ bind(&not_empty_2);
// For non-empty JSArrays the length of the FixedArray and the JSArray is the
// same.
__ mov(FieldOperand(elements_array, Array::kLengthOffset), array_size);
// Fill the allocated FixedArray with the hole value if requested.
// result: JSObject
// elements_array: elements array
// elements_array_end: start of next object
__ bind(&fill_array);
if (fill_with_hole) {
Label loop, entry;
__ mov(scratch, Factory::the_hole_value());
__ lea(elements_array, Operand(elements_array,
FixedArray::kHeaderSize - kHeapObjectTag));
__ jmp(&entry);
__ bind(&loop);
__ mov(Operand(elements_array, 0), scratch);
__ add(Operand(elements_array), Immediate(kPointerSize));
__ bind(&entry);
__ cmp(elements_array, Operand(elements_array_end));
__ j(below, &loop);
}
}
// Create a new array for the built-in Array function. This function allocates
// the JSArray object and the FixedArray elements array and initializes these.
// If the Array cannot be constructed in native code the runtime is called. This
// function assumes the following state:
// edi: constructor (built-in Array function)
// eax: argc
// esp[0]: return address
// esp[4]: last argument
// This function is used for both construct and normal calls of Array. Whether
// it is a construct call or not is indicated by the construct_call parameter.
// The only difference between handling a construct call and a normal call is
// that for a construct call the constructor function in edi needs to be
// preserved for entering the generic code. In both cases argc in eax needs to
// be preserved.
static void ArrayNativeCode(MacroAssembler* masm,
bool construct_call,
Label *call_generic_code) {
Label argc_one_or_more, argc_two_or_more, prepare_generic_code_call;
// Push the constructor and argc. No need to tag argc as a smi, as there will
// be no garbage collection with this on the stack.
int push_count = 0;
if (construct_call) {
push_count++;
__ push(edi);
}
push_count++;
__ push(eax);
// Check for array construction with zero arguments.
__ test(eax, Operand(eax));
__ j(not_zero, &argc_one_or_more);
// Handle construction of an empty array.
AllocateEmptyJSArray(masm,
edi,
eax,
ebx,
ecx,
edi,
kPreallocatedArrayElements,
&prepare_generic_code_call);
__ IncrementCounter(&Counters::array_function_native, 1);
__ pop(ebx);
if (construct_call) {
__ pop(edi);
}
__ ret(kPointerSize);
// Check for one argument. Bail out if argument is not smi or if it is
// negative.
__ bind(&argc_one_or_more);
__ cmp(eax, 1);
__ j(not_equal, &argc_two_or_more);
ASSERT(kSmiTag == 0);
__ test(Operand(esp, (push_count + 1) * kPointerSize),
Immediate(kIntptrSignBit | kSmiTagMask));
__ j(not_zero, &prepare_generic_code_call);
// Handle construction of an empty array of a certain size. Get the size from
// the stack and bail out if size is to large to actually allocate an elements
// array.
__ mov(edx, Operand(esp, (push_count + 1) * kPointerSize));
ASSERT(kSmiTag == 0);
__ cmp(edx, JSObject::kInitialMaxFastElementArray << kSmiTagSize);
__ j(greater_equal, &prepare_generic_code_call);
// edx: array_size (smi)
// edi: constructor
// esp[0]: argc
// esp[4]: constructor (only if construct_call)
// esp[8]: return address
// esp[C]: argument
AllocateJSArray(masm,
edi,
edx,
eax,
ebx,
ecx,
edi,
true,
&prepare_generic_code_call);
__ IncrementCounter(&Counters::array_function_native, 1);
__ pop(ebx);
if (construct_call) {
__ pop(edi);
}
__ ret(2 * kPointerSize);
// Handle construction of an array from a list of arguments.
__ bind(&argc_two_or_more);
ASSERT(kSmiTag == 0);
__ shl(eax, kSmiTagSize); // Convet argc to a smi.
// eax: array_size (smi)
// edi: constructor
// esp[0] : argc
// esp[4]: constructor (only if construct_call)
// esp[8] : return address
// esp[C] : last argument
AllocateJSArray(masm,
edi,
eax,
ebx,
ecx,
edx,
edi,
false,
&prepare_generic_code_call);
__ IncrementCounter(&Counters::array_function_native, 1);
__ mov(eax, ebx);
__ pop(ebx);
if (construct_call) {
__ pop(edi);
}
__ push(eax);
// eax: JSArray
// ebx: argc
// edx: elements_array_end (untagged)
// esp[0]: JSArray
// esp[4]: return address
// esp[8]: last argument
// Location of the last argument
__ lea(edi, Operand(esp, 2 * kPointerSize));
// Location of the first array element (Parameter fill_with_holes to
// AllocateJSArrayis false, so the FixedArray is returned in ecx).
__ lea(edx, Operand(ecx, FixedArray::kHeaderSize - kHeapObjectTag));
// ebx: argc
// edx: location of the first array element
// edi: location of the last argument
// esp[0]: JSArray
// esp[4]: return address
// esp[8]: last argument
Label loop, entry;
__ mov(ecx, ebx);
__ jmp(&entry);
__ bind(&loop);
__ mov(eax, Operand(edi, ecx, times_pointer_size, 0));
__ mov(Operand(edx, 0), eax);
__ add(Operand(edx), Immediate(kPointerSize));
__ bind(&entry);
__ dec(ecx);
__ j(greater_equal, &loop);
// Remove caller arguments from the stack and return.
// ebx: argc
// esp[0]: JSArray
// esp[4]: return address
// esp[8]: last argument
__ pop(eax);
__ pop(ecx);
__ lea(esp, Operand(esp, ebx, times_pointer_size, 1 * kPointerSize));
__ push(ecx);
__ ret(0);
// Restore argc and constructor before running the generic code.
__ bind(&prepare_generic_code_call);
__ pop(eax);
if (construct_call) {
__ pop(edi);
}
__ jmp(call_generic_code);
}
void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : argc
// -- esp[0] : return address
// -- esp[4] : last argument
// -----------------------------------
Label generic_array_code, one_or_more_arguments, two_or_more_arguments;
// Get the Array function.
GenerateLoadArrayFunction(masm, edi);
if (FLAG_debug_code) {
// Initial map for the builtin Array function shoud be a map.
__ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ test(ebx, Immediate(kSmiTagMask));
__ Assert(not_zero, "Unexpected initial map for Array function");
__ CmpObjectType(ebx, MAP_TYPE, ecx);
__ Assert(equal, "Unexpected initial map for Array function");
}
// Run the native code for the Array function called as a normal function.
ArrayNativeCode(masm, false, &generic_array_code);
// Jump to the generic array code in case the specialized code cannot handle
// the construction.
__ bind(&generic_array_code);
Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
Handle<Code> array_code(code);
__ jmp(array_code, RelocInfo::CODE_TARGET);
}
void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : argc
// -- edi : constructor
// -- esp[0] : return address
// -- esp[4] : last argument
// -----------------------------------
Label generic_constructor;
if (FLAG_debug_code) {
// The array construct code is only set for the builtin Array function which
// does always have a map.
GenerateLoadArrayFunction(masm, ebx);
__ cmp(edi, Operand(ebx));
__ Assert(equal, "Unexpected Array function");
// Initial map for the builtin Array function should be a map.
__ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ test(ebx, Immediate(kSmiTagMask));
__ Assert(not_zero, "Unexpected initial map for Array function");
__ CmpObjectType(ebx, MAP_TYPE, ecx);
__ Assert(equal, "Unexpected initial map for Array function");
}
// Run the native code for the Array function called as constructor.
ArrayNativeCode(masm, true, &generic_constructor);
// Jump to the generic construct code in case the specialized code cannot
// handle the construction.
__ bind(&generic_constructor);
Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
Handle<Code> generic_construct_stub(code);
__ jmp(generic_construct_stub, RelocInfo::CODE_TARGET);
}
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
__ push(ebp);
__ mov(ebp, Operand(esp));

14
deps/v8/src/log-utils.cc

@ -310,6 +310,20 @@ void LogMessageBuilder::AppendDetailed(String* str, bool show_impl_info) {
}
void LogMessageBuilder::AppendStringPart(const char* str, int len) {
if (pos_ + len > Log::kMessageBufferSize) {
len = Log::kMessageBufferSize - pos_;
ASSERT(len >= 0);
if (len == 0) return;
}
Vector<char> buf(Log::message_buffer_ + pos_,
Log::kMessageBufferSize - pos_);
OS::StrNCpy(buf, str, len);
pos_ += len;
ASSERT(pos_ <= Log::kMessageBufferSize);
}
bool LogMessageBuilder::StoreInCompressor(LogRecordCompressor* compressor) {
return compressor->Store(Vector<const char>(Log::message_buffer_, pos_));
}

9
deps/v8/src/log-utils.h

@ -114,6 +114,9 @@ class Log : public AllStatic {
return !is_stopped_ && (output_handle_ != NULL || output_buffer_ != NULL);
}
// Size of buffer used for formatting log messages.
static const int kMessageBufferSize = 2048;
private:
typedef int (*WritePtr)(const char* msg, int length);
@ -162,9 +165,6 @@ class Log : public AllStatic {
// access to the formatting buffer and the log file or log memory buffer.
static Mutex* mutex_;
// Size of buffer used for formatting log messages.
static const int kMessageBufferSize = 2048;
// Buffer used for formatting log messages. This is a singleton buffer and
// mutex_ should be acquired before using it.
static char* message_buffer_;
@ -247,6 +247,9 @@ class LogMessageBuilder BASE_EMBEDDED {
void AppendDetailed(String* str, bool show_impl_info);
// Append a portion of a string.
void AppendStringPart(const char* str, int len);
// Stores log message into compressor, returns true if the message
// was stored (i.e. doesn't repeat the previous one).
bool StoreInCompressor(LogRecordCompressor* compressor);

43
deps/v8/src/log.cc

@ -889,14 +889,51 @@ void Logger::HeapSampleJSConstructorEvent(const char* constructor,
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!Log::IsEnabled() || !FLAG_log_gc) return;
LogMessageBuilder msg;
msg.Append("heap-js-cons-item,%s,%d,%d\n",
constructor[0] != '\0' ? constructor : "(anonymous)",
number, bytes);
msg.Append("heap-js-cons-item,%s,%d,%d\n", constructor, number, bytes);
msg.WriteToLogFile();
#endif
}
void Logger::HeapSampleJSRetainersEvent(
const char* constructor, const char* event) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!Log::IsEnabled() || !FLAG_log_gc) return;
// Event starts with comma, so we don't have it in the format string.
static const char* event_text = "heap-js-ret-item,%s";
// We take placeholder strings into account, but it's OK to be conservative.
static const int event_text_len = strlen(event_text);
const int cons_len = strlen(constructor), event_len = strlen(event);
int pos = 0;
// Retainer lists can be long. We may need to split them into multiple events.
do {
LogMessageBuilder msg;
msg.Append(event_text, constructor);
int to_write = event_len - pos;
if (to_write > Log::kMessageBufferSize - (cons_len + event_text_len)) {
int cut_pos = pos + Log::kMessageBufferSize - (cons_len + event_text_len);
ASSERT(cut_pos < event_len);
while (cut_pos > pos && event[cut_pos] != ',') --cut_pos;
if (event[cut_pos] != ',') {
// Crash in debug mode, skip in release mode.
ASSERT(false);
return;
}
// Append a piece of event that fits, without trailing comma.
msg.AppendStringPart(event + pos, cut_pos - pos);
// Start next piece with comma.
pos = cut_pos;
} else {
msg.Append("%s", event + pos);
pos += event_len;
}
msg.Append('\n');
msg.WriteToLogFile();
} while (pos < event_len);
#endif
}
void Logger::DebugTag(const char* call_site_tag) {
#ifdef ENABLE_LOGGING_AND_PROFILING
if (!Log::IsEnabled() || !FLAG_log) return;

2
deps/v8/src/log.h

@ -221,6 +221,8 @@ class Logger {
static void HeapSampleItemEvent(const char* type, int number, int bytes);
static void HeapSampleJSConstructorEvent(const char* constructor,
int number, int bytes);
static void HeapSampleJSRetainersEvent(const char* constructor,
const char* event);
static void HeapSampleStats(const char* space, const char* kind,
int capacity, int used);

49
deps/v8/src/mark-compact.cc

@ -265,18 +265,6 @@ class MarkingVisitor : public ObjectVisitor {
for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
}
void BeginCodeIteration(Code* code) {
// When iterating over a code object during marking
// ic targets are derived pointers.
ASSERT(code->ic_flag() == Code::IC_TARGET_IS_ADDRESS);
}
void EndCodeIteration(Code* code) {
// If this is a compacting collection, set ic targets
// are pointing to object headers.
if (IsCompacting()) code->set_ic_flag(Code::IC_TARGET_IS_OBJECT);
}
void VisitCodeTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
Code* code = Code::GetCodeFromTargetAddress(rinfo->target_address());
@ -287,11 +275,6 @@ class MarkingVisitor : public ObjectVisitor {
} else {
MarkCompactCollector::MarkObject(code);
}
if (IsCompacting()) {
// When compacting we convert the target to a real object pointer.
code = Code::GetCodeFromTargetAddress(rinfo->target_address());
rinfo->set_target_object(code);
}
}
void VisitDebugTarget(RelocInfo* rinfo) {
@ -1187,12 +1170,6 @@ static void SweepSpace(PagedSpace* space, DeallocateFunction dealloc) {
if (object->IsMarked()) {
object->ClearMark();
MarkCompactCollector::tracer()->decrement_marked_count();
if (MarkCompactCollector::IsCompacting() && object->IsCode()) {
// If this is compacting collection marked code objects have had
// their IC targets converted to objects.
// They need to be converted back to addresses.
Code::cast(object)->ConvertICTargetsFromObjectToAddress();
}
if (!is_previous_alive) { // Transition from free to live.
dealloc(free_start, current - free_start);
is_previous_alive = true;
@ -1398,6 +1375,14 @@ class UpdatingVisitor: public ObjectVisitor {
for (Object** p = start; p < end; p++) UpdatePointer(p);
}
void VisitCodeTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
VisitPointer(&target);
rinfo->set_target_address(
reinterpret_cast<Code*>(target)->instruction_start());
}
private:
void UpdatePointer(Object** p) {
if (!(*p)->IsHeapObject()) return;
@ -1631,11 +1616,6 @@ void MarkCompactCollector::RelocateObjects() {
ASSERT(live_cells == live_cell_objects_);
ASSERT(live_news == live_young_objects_);
// Notify code object in LO to convert IC target to address
// This must happen after lo_space_->Compact
LargeObjectIterator it(Heap::lo_space());
while (it.has_next()) { ConvertCodeICTargetToAddress(it.next()); }
// Flip from and to spaces
Heap::new_space()->Flip();
@ -1654,14 +1634,6 @@ void MarkCompactCollector::RelocateObjects() {
}
int MarkCompactCollector::ConvertCodeICTargetToAddress(HeapObject* obj) {
if (obj->IsCode()) {
Code::cast(obj)->ConvertICTargetsFromObjectToAddress();
}
return obj->Size();
}
int MarkCompactCollector::RelocateMapObject(HeapObject* obj) {
// Recover map pointer.
MapWord encoding = obj->map_word();
@ -1770,11 +1742,6 @@ int MarkCompactCollector::RelocateCodeObject(HeapObject* obj) {
// Reset the map pointer.
int obj_size = RestoreMap(obj, Heap::code_space(), new_addr, map_addr);
// Convert inline cache target to address using old address.
if (obj->IsCode()) {
Code::cast(obj)->ConvertICTargetsFromObjectToAddress();
}
Address old_addr = obj->address();
if (new_addr != old_addr) {

3
deps/v8/src/messages.js

@ -167,7 +167,8 @@ function FormatMessage(message) {
no_input_to_regexp: "No input to %0",
result_not_primitive: "Result of %0 must be a primitive, was %1",
invalid_json: "String '%0' is not valid JSON",
circular_structure: "Converting circular structure to JSON"
circular_structure: "Converting circular structure to JSON",
object_keys_non_object: "Object.keys called on non-object"
};
}
var format = kMessages[message.type];

1
deps/v8/src/objects-debug.cc

@ -733,7 +733,6 @@ void Code::CodePrint() {
void Code::CodeVerify() {
CHECK(ic_flag() == IC_TARGET_IS_ADDRESS);
CHECK(IsAligned(reinterpret_cast<intptr_t>(instruction_start()),
static_cast<intptr_t>(kCodeAlignment)));
Address last_gc_pc = NULL;

10
deps/v8/src/objects-inl.h

@ -2538,16 +2538,6 @@ INT_ACCESSORS(Code, relocation_size, kRelocationSizeOffset)
INT_ACCESSORS(Code, sinfo_size, kSInfoSizeOffset)
Code::ICTargetState Code::ic_flag() {
return static_cast<ICTargetState>(READ_BYTE_FIELD(this, kICFlagOffset));
}
void Code::set_ic_flag(ICTargetState value) {
WRITE_BYTE_FIELD(this, kICFlagOffset, value);
}
byte* Code::instruction_start() {
return FIELD_ADDR(this, kHeaderSize);
}

98
deps/v8/src/objects.cc

@ -476,6 +476,21 @@ Object* JSObject::DeleteNormalizedProperty(String* name, DeleteMode mode) {
}
bool JSObject::IsDirty() {
Object* cons_obj = map()->constructor();
if (!cons_obj->IsJSFunction())
return true;
JSFunction* fun = JSFunction::cast(cons_obj);
if (!fun->shared()->function_data()->IsFunctionTemplateInfo())
return true;
// If the object is fully fast case and has the same map it was
// created with then no changes can have been made to it.
return map() != fun->initial_map()
|| !HasFastElements()
|| !HasFastProperties();
}
Object* Object::GetProperty(Object* receiver,
LookupResult* result,
String* name,
@ -4940,60 +4955,25 @@ void SharedFunctionInfo::SharedFunctionInfoIterateBody(ObjectVisitor* v) {
}
void ObjectVisitor::BeginCodeIteration(Code* code) {
ASSERT(code->ic_flag() == Code::IC_TARGET_IS_OBJECT);
}
void ObjectVisitor::VisitCodeTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
VisitPointer(rinfo->target_object_address());
Object* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
Object* old_target = target;
VisitPointer(&target);
CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
}
void ObjectVisitor::VisitDebugTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsJSReturn(rinfo->rmode()) && rinfo->IsCallInstruction());
VisitPointer(rinfo->call_object_address());
}
// Convert relocatable targets from address to code object address. This is
// mainly IC call targets but for debugging straight-line code can be replaced
// with a call instruction which also has to be relocated.
void Code::ConvertICTargetsFromAddressToObject() {
ASSERT(ic_flag() == IC_TARGET_IS_ADDRESS);
for (RelocIterator it(this, RelocInfo::kCodeTargetMask);
!it.done(); it.next()) {
Address ic_addr = it.rinfo()->target_address();
ASSERT(ic_addr != NULL);
HeapObject* code = Code::GetCodeFromTargetAddress(ic_addr);
ASSERT(code->IsHeapObject());
it.rinfo()->set_target_object(code);
}
#ifdef ENABLE_DEBUGGER_SUPPORT
if (Debug::has_break_points()) {
for (RelocIterator it(this, RelocInfo::ModeMask(RelocInfo::JS_RETURN));
!it.done();
it.next()) {
if (it.rinfo()->IsCallInstruction()) {
Address addr = it.rinfo()->call_address();
ASSERT(addr != NULL);
HeapObject* code = Code::GetCodeFromTargetAddress(addr);
ASSERT(code->IsHeapObject());
it.rinfo()->set_call_object(code);
}
}
}
#endif
set_ic_flag(IC_TARGET_IS_OBJECT);
Object* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
Object* old_target = target;
VisitPointer(&target);
CHECK_EQ(target, old_target); // VisitPointer doesn't change Code* *target.
}
void Code::CodeIterateBody(ObjectVisitor* v) {
v->BeginCodeIteration(this);
int mode_mask = RelocInfo::kCodeTargetMask |
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
@ -5020,38 +5000,6 @@ void Code::CodeIterateBody(ObjectVisitor* v) {
}
ScopeInfo<>::IterateScopeInfo(this, v);
v->EndCodeIteration(this);
}
void Code::ConvertICTargetsFromObjectToAddress() {
ASSERT(ic_flag() == IC_TARGET_IS_OBJECT);
for (RelocIterator it(this, RelocInfo::kCodeTargetMask);
!it.done(); it.next()) {
// We cannot use the safe cast (Code::cast) here, because we may be in
// the middle of relocating old objects during GC and the map pointer in
// the code object may be mangled
Code* code = reinterpret_cast<Code*>(it.rinfo()->target_object());
ASSERT((code != NULL) && code->IsHeapObject());
it.rinfo()->set_target_address(code->instruction_start());
}
#ifdef ENABLE_DEBUGGER_SUPPORT
if (Debug::has_break_points()) {
for (RelocIterator it(this, RelocInfo::ModeMask(RelocInfo::JS_RETURN));
!it.done();
it.next()) {
if (it.rinfo()->IsCallInstruction()) {
Code* code = reinterpret_cast<Code*>(it.rinfo()->call_object());
ASSERT((code != NULL) && code->IsHeapObject());
it.rinfo()->set_call_address(code->instruction_start());
}
}
}
#endif
set_ic_flag(IC_TARGET_IS_ADDRESS);
}

30
deps/v8/src/objects.h

@ -1428,6 +1428,10 @@ class JSObject: public HeapObject {
// Tells whether this object needs to be loaded.
inline bool IsLoaded();
// Returns true if this is an instance of an api function and has
// been modified since it was created. May give false positives.
bool IsDirty();
bool HasProperty(String* name) {
return GetPropertyAttribute(name) != ABSENT;
}
@ -2516,13 +2520,6 @@ class Code: public HeapObject {
NUMBER_OF_KINDS = KEYED_STORE_IC + 1
};
// A state indicates that inline cache in this Code object contains
// objects or relative instruction addresses.
enum ICTargetState {
IC_TARGET_IS_ADDRESS,
IC_TARGET_IS_OBJECT
};
#ifdef ENABLE_DISASSEMBLER
// Printing
static const char* Kind2String(Kind kind);
@ -2562,12 +2559,6 @@ class Code: public HeapObject {
inline bool is_keyed_store_stub() { return kind() == KEYED_STORE_IC; }
inline bool is_call_stub() { return kind() == CALL_IC; }
// [ic_flag]: State of inline cache targets. The flag is set to the
// object variant in ConvertICTargetsFromAddressToObject, and set to
// the address variant in ConvertICTargetsFromObjectToAddress.
inline ICTargetState ic_flag();
inline void set_ic_flag(ICTargetState value);
// [major_key]: For kind STUB, the major key.
inline CodeStub::Major major_key();
inline void set_major_key(CodeStub::Major major);
@ -2613,12 +2604,6 @@ class Code: public HeapObject {
// Returns the address of the scope information.
inline byte* sinfo_start();
// Convert inline cache target from address to code object before GC.
void ConvertICTargetsFromAddressToObject();
// Convert inline cache target from code object to address after GC
void ConvertICTargetsFromObjectToAddress();
// Relocate the code by delta bytes. Called to signal that this code
// object has been moved by delta bytes.
void Relocate(int delta);
@ -2674,7 +2659,6 @@ class Code: public HeapObject {
~kCodeAlignmentMask;
// Byte offsets within kKindSpecificFlagsOffset.
static const int kICFlagOffset = kKindSpecificFlagsOffset + 0;
static const int kStubMajorKeyOffset = kKindSpecificFlagsOffset + 1;
// Flags layout.
@ -4806,9 +4790,6 @@ class ObjectVisitor BASE_EMBEDDED {
// To allow lazy clearing of inline caches the visitor has
// a rich interface for iterating over Code objects..
// Called prior to visiting the body of a Code object.
virtual void BeginCodeIteration(Code* code);
// Visits a code target in the instruction stream.
virtual void VisitCodeTarget(RelocInfo* rinfo);
@ -4818,9 +4799,6 @@ class ObjectVisitor BASE_EMBEDDED {
// Visits a debug call target in the instruction stream.
virtual void VisitDebugTarget(RelocInfo* rinfo);
// Called after completing visiting the body of a Code object.
virtual void EndCodeIteration(Code* code) {}
// Handy shorthand for visiting a single pointer.
virtual void VisitPointer(Object** p) { VisitPointers(p, p + 1); }

79
deps/v8/src/runtime.cc

@ -1208,6 +1208,14 @@ static Object* Runtime_FunctionIsAPIFunction(Arguments args) {
: Heap::false_value();
}
static Object* Runtime_FunctionIsBuiltin(Arguments args) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
CONVERT_CHECKED(JSFunction, f, args[0]);
return f->IsBuiltin() ? Heap::true_value() : Heap::false_value();
}
static Object* Runtime_SetCode(Arguments args) {
HandleScope scope;
@ -2992,7 +3000,8 @@ static Object* Runtime_GetPropertyNamesFast(Arguments args) {
HandleScope scope;
Handle<JSObject> object(raw_object);
Handle<FixedArray> content = GetKeysInFixedArrayFor(object);
Handle<FixedArray> content = GetKeysInFixedArrayFor(object,
INCLUDE_PROTOS);
// Test again, since cache may have been built by preceding call.
if (object->IsSimpleEnum()) return object->map();
@ -3001,6 +3010,22 @@ static Object* Runtime_GetPropertyNamesFast(Arguments args) {
}
static Object* Runtime_LocalKeys(Arguments args) {
ASSERT_EQ(args.length(), 1);
CONVERT_CHECKED(JSObject, raw_object, args[0]);
HandleScope scope;
Handle<JSObject> object(raw_object);
Handle<FixedArray> contents = GetKeysInFixedArrayFor(object,
LOCAL_ONLY);
// Some fast paths through GetKeysInFixedArrayFor reuse a cached
// property array and since the result is mutable we have to create
// a fresh clone on each invocation.
Handle<FixedArray> copy = Factory::NewFixedArray(contents->length());
contents->CopyTo(0, *copy, 0, contents->length());
return *Factory::NewJSArrayWithElements(copy);
}
static Object* Runtime_GetArgumentsProperty(Arguments args) {
NoHandleAllocation ha;
ASSERT(args.length() == 1);
@ -5516,7 +5541,7 @@ static Object* Runtime_GetArrayKeys(Arguments args) {
if (array->elements()->IsDictionary()) {
// Create an array and get all the keys into it, then remove all the
// keys that are not integers in the range 0 to length-1.
Handle<FixedArray> keys = GetKeysInFixedArrayFor(array);
Handle<FixedArray> keys = GetKeysInFixedArrayFor(array, INCLUDE_PROTOS);
int keys_length = keys->length();
for (int i = 0; i < keys_length; i++) {
Object* key = keys->get(i);
@ -5738,37 +5763,27 @@ static Object* Runtime_DebugGetPropertyDetails(Arguments args) {
int length = LocalPrototypeChainLength(*obj);
// Try local lookup on each of the objects.
LookupResult result;
Handle<JSObject> jsproto = obj;
for (int i = 0; i < length; i++) {
LookupResult result;
jsproto->LocalLookup(*name, &result);
if (result.IsProperty()) {
break;
}
if (i < length - 1) {
jsproto = Handle<JSObject>(JSObject::cast(jsproto->GetPrototype()));
}
}
if (result.IsProperty()) {
// LookupResult is not GC safe as all its members are raw object pointers.
// When calling DebugLookupResultValue GC can happen as this might invoke
// callbacks. After the call to DebugLookupResultValue the callback object
// in the LookupResult might still be needed. Put it into a handle for later
// use.
// LookupResult is not GC safe as it holds raw object pointers.
// GC can happen later in this code so put the required fields into
// local variables using handles when required for later use.
PropertyType result_type = result.type();
Handle<Object> result_callback_obj;
if (result_type == CALLBACKS) {
result_callback_obj = Handle<Object>(result.GetCallbackObject());
}
// Find the actual value. Don't use result after this call as it's content
// can be invalid.
Smi* property_details = result.GetPropertyDetails().AsSmi();
// DebugLookupResultValue can cause GC so details from LookupResult needs
// to be copied to handles before this.
bool caught_exception = false;
Object* value = DebugLookupResultValue(*obj, *name, &result,
Object* raw_value = DebugLookupResultValue(*obj, *name, &result,
&caught_exception);
if (value->IsFailure()) return value;
Handle<Object> value_handle(value);
if (raw_value->IsFailure()) return raw_value;
Handle<Object> value(raw_value);
// If the callback object is a fixed array then it contains JavaScript
// getter and/or setter.
@ -5776,17 +5791,23 @@ static Object* Runtime_DebugGetPropertyDetails(Arguments args) {
result_callback_obj->IsFixedArray();
Handle<FixedArray> details =
Factory::NewFixedArray(hasJavaScriptAccessors ? 5 : 2);
details->set(0, *value_handle);
details->set(1, result.GetPropertyDetails().AsSmi());
details->set(0, *value);
details->set(1, property_details);
if (hasJavaScriptAccessors) {
details->set(2,
caught_exception ? Heap::true_value() : Heap::false_value());
details->set(3, FixedArray::cast(result.GetCallbackObject())->get(0));
details->set(4, FixedArray::cast(result.GetCallbackObject())->get(1));
caught_exception ? Heap::true_value()
: Heap::false_value());
details->set(3, FixedArray::cast(*result_callback_obj)->get(0));
details->set(4, FixedArray::cast(*result_callback_obj)->get(1));
}
return *Factory::NewJSArrayWithElements(details);
}
if (i < length - 1) {
jsproto = Handle<JSObject>(JSObject::cast(jsproto->GetPrototype()));
}
}
return Heap::undefined_value();
}
@ -6271,7 +6292,7 @@ static Handle<JSObject> MaterializeLocalScope(JavaScriptFrame* frame) {
if (function_context->has_extension() &&
!function_context->IsGlobalContext()) {
Handle<JSObject> ext(JSObject::cast(function_context->extension()));
Handle<FixedArray> keys = GetKeysInFixedArrayFor(ext);
Handle<FixedArray> keys = GetKeysInFixedArrayFor(ext, INCLUDE_PROTOS);
for (int i = 0; i < keys->length(); i++) {
// Names of variables introduced by eval are strings.
ASSERT(keys->get(i)->IsString());
@ -6320,7 +6341,7 @@ static Handle<JSObject> MaterializeClosure(Handle<Context> context) {
// be variables introduced by eval.
if (context->has_extension()) {
Handle<JSObject> ext(JSObject::cast(context->extension()));
Handle<FixedArray> keys = GetKeysInFixedArrayFor(ext);
Handle<FixedArray> keys = GetKeysInFixedArrayFor(ext, INCLUDE_PROTOS);
for (int i = 0; i < keys->length(); i++) {
// Names of variables introduced by eval are strings.
ASSERT(keys->get(i)->IsString());

3
deps/v8/src/runtime.h

@ -171,6 +171,7 @@ namespace internal {
F(FunctionGetScriptSourcePosition, 1, 1) \
F(FunctionGetPositionForOffset, 2, 1) \
F(FunctionIsAPIFunction, 1, 1) \
F(FunctionIsBuiltin, 1, 1) \
F(GetScript, 1, 1) \
F(CollectStackTrace, 2, 1) \
\
@ -258,6 +259,8 @@ namespace internal {
F(Abort, 2, 1) \
/* Logging */ \
F(Log, 2, 1) \
/* ES5 */ \
F(LocalKeys, 1, 1) \
\
/* Pseudo functions - handled as macros by parser */ \
F(IS_VAR, 1, 1)

38
deps/v8/src/serialize.cc

@ -935,6 +935,15 @@ class ReferenceUpdater: public ObjectVisitor {
}
}
virtual void VisitCodeTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
Address encoded_target = serializer_->GetSavedAddress(target);
offsets_.Add(rinfo->target_address_address() - obj_address_);
addresses_.Add(encoded_target);
}
virtual void VisitExternalReferences(Address* start, Address* end) {
for (Address* p = start; p < end; ++p) {
uint32_t code = reference_encoder_->Encode(*p);
@ -1093,6 +1102,14 @@ void Serializer::VisitPointers(Object** start, Object** end) {
}
void Serializer::VisitCodeTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
bool serialized;
Encode(target, &serialized);
}
class GlobalHandlesRetriever: public ObjectVisitor {
public:
explicit GlobalHandlesRetriever(List<Object**>* handles)
@ -1255,10 +1272,7 @@ Address Serializer::PutObject(HeapObject* obj) {
SaveAddress(obj, addr);
if (type == CODE_TYPE) {
Code* code = Code::cast(obj);
// Ensure Code objects contain Object pointers, not Addresses.
code->ConvertICTargetsFromAddressToObject();
LOG(CodeMoveEvent(code->address(), addr));
LOG(CodeMoveEvent(obj->address(), addr));
}
// Write out the object prologue: type, size, and simulated address of obj.
@ -1290,12 +1304,6 @@ Address Serializer::PutObject(HeapObject* obj) {
}
#endif
if (type == CODE_TYPE) {
Code* code = Code::cast(obj);
// Convert relocations from Object* to Address in Code objects
code->ConvertICTargetsFromObjectToAddress();
}
objects_++;
return addr;
}
@ -1422,6 +1430,14 @@ void Deserializer::VisitPointers(Object** start, Object** end) {
}
void Deserializer::VisitCodeTarget(RelocInfo* rinfo) {
ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
Address encoded_address = reinterpret_cast<Address>(rinfo->target_object());
Code* target_object = reinterpret_cast<Code*>(Resolve(encoded_address));
rinfo->set_target_address(target_object->instruction_start());
}
void Deserializer::VisitExternalReferences(Address* start, Address* end) {
for (Address* p = start; p < end; ++p) {
uint32_t code = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(*p));
@ -1617,8 +1633,6 @@ Object* Deserializer::GetObject() {
if (type == CODE_TYPE) {
Code* code = Code::cast(obj);
// Convert relocations from Object* to Address in Code objects
code->ConvertICTargetsFromObjectToAddress();
LOG(CodeMoveEvent(a, code->address()));
}
objects_++;

3
deps/v8/src/serialize.h

@ -155,7 +155,7 @@ class Serializer: public ObjectVisitor {
friend class ReferenceUpdater;
virtual void VisitPointers(Object** start, Object** end);
virtual void VisitCodeTarget(RelocInfo* rinfo);
bool IsVisited(HeapObject* obj);
Address GetSavedAddress(HeapObject* obj);
@ -289,6 +289,7 @@ class Deserializer: public ObjectVisitor {
private:
virtual void VisitPointers(Object** start, Object** end);
virtual void VisitCodeTarget(RelocInfo* rinfo);
virtual void VisitExternalReferences(Address* start, Address* end);
virtual void VisitRuntimeEntry(RelocInfo* rinfo);

12
deps/v8/src/spaces.cc

@ -827,13 +827,7 @@ void PagedSpace::Verify(ObjectVisitor* visitor) {
// have their remembered set bits set if required as determined
// by the visitor.
int size = object->Size();
if (object->IsCode()) {
Code::cast(object)->ConvertICTargetsFromAddressToObject();
object->IterateBody(map->instance_type(), size, visitor);
Code::cast(object)->ConvertICTargetsFromObjectToAddress();
} else {
object->IterateBody(map->instance_type(), size, visitor);
}
current += size;
}
@ -1906,7 +1900,7 @@ void OldSpace::ReportStatistics() {
int rset = Memory::int_at(rset_addr);
if (rset != 0) {
// Bits were set
int intoff = rset_addr - p->address();
int intoff = rset_addr - p->address() - Page::kRSetOffset;
int bitoff = 0;
for (; bitoff < kBitsPerInt; ++bitoff) {
if ((rset & (1 << bitoff)) != 0) {
@ -2171,7 +2165,7 @@ void FixedSpace::ReportStatistics() {
int rset = Memory::int_at(rset_addr);
if (rset != 0) {
// Bits were set
int intoff = rset_addr - p->address();
int intoff = rset_addr - p->address() - Page::kRSetOffset;
int bitoff = 0;
for (; bitoff < kBitsPerInt; ++bitoff) {
if ((rset & (1 << bitoff)) != 0) {
@ -2574,11 +2568,9 @@ void LargeObjectSpace::Verify() {
// Byte arrays and strings don't have interior pointers.
if (object->IsCode()) {
VerifyPointersVisitor code_visitor;
Code::cast(object)->ConvertICTargetsFromAddressToObject();
object->IterateBody(map->instance_type(),
object->Size(),
&code_visitor);
Code::cast(object)->ConvertICTargetsFromObjectToAddress();
} else if (object->IsFixedArray()) {
// We loop over fixed arrays ourselves, rather then using the visitor,
// because the visitor doesn't support the start/offset iteration

2
deps/v8/src/string-stream.cc

@ -251,7 +251,7 @@ void StringStream::Add(const char* format, FmtElm arg0, FmtElm arg1,
}
SmartPointer<const char> StringStream::ToCString() {
SmartPointer<const char> StringStream::ToCString() const {
char* str = NewArray<char>(length_ + 1);
memcpy(str, buffer_, length_);
str[length_] = '\0';

2
deps/v8/src/string-stream.h

@ -141,7 +141,7 @@ class StringStream {
void OutputToStdOut();
void Log();
Handle<String> ToString();
SmartPointer<const char> ToCString();
SmartPointer<const char> ToCString() const;
// Object printing support.
void PrintName(Object* o);

2
deps/v8/src/v8-counters.h

@ -142,6 +142,8 @@ namespace internal {
SC(constructed_objects, V8.ConstructedObjects) \
SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime) \
SC(constructed_objects_stub, V8.ConstructedObjectsStub) \
SC(array_function_runtime, V8.ArrayFunctionRuntime) \
SC(array_function_native, V8.ArrayFunctionNative) \
SC(for_in, V8.ForIn) \
SC(enum_cache_hits, V8.EnumCacheHits) \
SC(enum_cache_misses, V8.EnumCacheMisses) \

18
deps/v8/src/v8natives.js

@ -276,6 +276,13 @@ function ObjectLookupSetter(name) {
}
function ObjectKeys(obj) {
if ((!IS_OBJECT(obj) || IS_NULL_OR_UNDEFINED(obj)) && !IS_FUNCTION(obj))
throw MakeTypeError('object_keys_non_object', [obj]);
return %LocalKeys(obj);
}
%SetCode($Object, function(x) {
if (%_IsConstructCall()) {
if (x == null) return this;
@ -304,6 +311,9 @@ function SetupObject() {
"__defineSetter__", ObjectDefineSetter,
"__lookupSetter__", ObjectLookupSetter
));
InstallFunctions($Object, DONT_ENUM, $Array(
"keys", ObjectKeys
));
}
SetupObject();
@ -514,7 +524,7 @@ function FunctionSourceString(func) {
}
var source = %FunctionGetSourceCode(func);
if (!IS_STRING(source)) {
if (!IS_STRING(source) || %FunctionIsBuiltin(func)) {
var name = %FunctionGetName(func);
if (name) {
// Mimic what KJS does.
@ -524,12 +534,6 @@ function FunctionSourceString(func) {
}
}
// Censor occurrences of internal calls. We do that for all
// functions and don't cache under the assumption that people rarly
// convert functions to strings. Note that we (apparently) can't
// use regular expression literals in natives files.
var regexp = ORIGINAL_REGEXP("%(\\w+\\()", "gm");
if (source.match(regexp)) source = source.replace(regexp, "$1");
var name = %FunctionGetName(func);
return 'function ' + name + source;
}

4
deps/v8/src/version.cc

@ -34,8 +34,8 @@
// cannot be changed without changing the SCons build script.
#define MAJOR_VERSION 1
#define MINOR_VERSION 3
#define BUILD_NUMBER 11
#define PATCH_LEVEL 1
#define BUILD_NUMBER 13
#define PATCH_LEVEL 0
#define CANDIDATE_VERSION false
// Define SONAME to have the SCons build the put a specific SONAME into the

16
deps/v8/src/x64/builtins-x64.cc

@ -452,6 +452,22 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
}
void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
// Just jump to the generic array code.
Code* code = Builtins::builtin(Builtins::ArrayCodeGeneric);
Handle<Code> array_code(code);
__ Jump(array_code, RelocInfo::CODE_TARGET);
}
void Builtins::Generate_ArrayConstructCode(MacroAssembler* masm) {
// Just jump to the generic construct code.
Code* code = Builtins::builtin(Builtins::JSConstructStubGeneric);
Handle<Code> generic_construct_stub(code);
__ Jump(generic_construct_stub, RelocInfo::CODE_TARGET);
}
void Builtins::Generate_JSConstructCall(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- rax: number of arguments

17
deps/v8/src/zone-inl.h

@ -276,12 +276,19 @@ void ZoneSplayTree<C>::Splay(const Key& key) {
}
template <typename Node, class Callback>
static void DoForEach(Node* node, Callback* callback) {
if (node == NULL) return;
DoForEach<Node, Callback>(node->left(), callback);
template <typename Config> template <class Callback>
void ZoneSplayTree<Config>::ForEach(Callback* callback) {
// Pre-allocate some space for tiny trees.
ZoneList<Node*> nodes_to_visit(10);
nodes_to_visit.Add(root_);
int pos = 0;
while (pos < nodes_to_visit.length()) {
Node* node = nodes_to_visit[pos++];
if (node == NULL) continue;
callback->Call(node->key(), node->value());
DoForEach<Node, Callback>(node->right(), callback);
nodes_to_visit.Add(node->left());
nodes_to_visit.Add(node->right());
}
}

8
deps/v8/src/zone.h

@ -204,10 +204,6 @@ class ZoneScope BASE_EMBEDDED {
};
template <typename Node, class Callback>
static void DoForEach(Node* node, Callback* callback);
// A zone splay tree. The config type parameter encapsulates the
// different configurations of a concrete splay tree:
//
@ -297,9 +293,7 @@ class ZoneSplayTree : public ZoneObject {
};
template <class Callback>
void ForEach(Callback* c) {
DoForEach<typename ZoneSplayTree<Config>::Node, Callback>(root_, c);
}
void ForEach(Callback* callback);
private:
Node* root_;

1
deps/v8/test/cctest/SConscript

@ -45,6 +45,7 @@ SOURCES = {
'test-func-name-inference.cc',
'test-hashmap.cc',
'test-heap.cc',
'test-heap-profiler.cc',
'test-list.cc',
'test-lock.cc',
'test-log.cc',

330
deps/v8/test/cctest/test-heap-profiler.cc

@ -0,0 +1,330 @@
// Copyright 2009 the V8 project authors. All rights reserved.
//
// Tests for heap profiler
#ifdef ENABLE_LOGGING_AND_PROFILING
#include "v8.h"
#include "heap-profiler.h"
#include "string-stream.h"
#include "cctest.h"
namespace i = v8::internal;
using i::ClustersCoarser;
using i::JSObjectsCluster;
using i::JSObjectsRetainerTree;
using i::JSObjectsClusterTree;
using i::RetainerHeapProfile;
static void CompileAndRunScript(const char *src) {
v8::Script::Compile(v8::String::New(src))->Run();
}
namespace {
class ConstructorHeapProfileTestHelper : public i::ConstructorHeapProfile {
public:
ConstructorHeapProfileTestHelper()
: i::ConstructorHeapProfile(),
f_name_(i::Factory::NewStringFromAscii(i::CStrVector("F"))),
f_count_(0) {
}
void Call(const JSObjectsCluster& cluster,
const i::NumberAndSizeInfo& number_and_size) {
if (f_name_->Equals(cluster.constructor())) {
CHECK_EQ(f_count_, 0);
f_count_ = number_and_size.number();
CHECK_GT(f_count_, 0);
}
}
int f_count() { return f_count_; }
private:
i::Handle<i::String> f_name_;
int f_count_;
};
} // namespace
TEST(ConstructorProfile) {
v8::HandleScope scope;
v8::Handle<v8::Context> env = v8::Context::New();
env->Enter();
CompileAndRunScript(
"function F() {} // A constructor\n"
"var f1 = new F();\n"
"var f2 = new F();\n");
ConstructorHeapProfileTestHelper cons_profile;
i::AssertNoAllocation no_alloc;
i::HeapIterator iterator;
while (iterator.has_next()) {
i::HeapObject* obj = iterator.next();
cons_profile.CollectStats(obj);
}
CHECK_EQ(0, cons_profile.f_count());
cons_profile.PrintStats();
CHECK_EQ(2, cons_profile.f_count());
}
static JSObjectsCluster AddHeapObjectToTree(
JSObjectsRetainerTree* tree,
i::String* constructor,
int instance,
JSObjectsCluster* ref1 = NULL,
JSObjectsCluster* ref2 = NULL,
JSObjectsCluster* ref3 = NULL) {
JSObjectsCluster o(constructor, reinterpret_cast<i::Object*>(instance));
JSObjectsClusterTree* o_tree = new JSObjectsClusterTree();
JSObjectsClusterTree::Locator o_loc;
if (ref1 != NULL) o_tree->Insert(*ref1, &o_loc);
if (ref2 != NULL) o_tree->Insert(*ref2, &o_loc);
if (ref3 != NULL) o_tree->Insert(*ref3, &o_loc);
JSObjectsRetainerTree::Locator loc;
tree->Insert(o, &loc);
loc.set_value(o_tree);
return o;
}
static inline void CheckEqualsHelper(const char* file, int line,
const char* expected_source,
const JSObjectsCluster& expected,
const char* value_source,
const JSObjectsCluster& value) {
if (JSObjectsCluster::Compare(expected, value) != 0) {
i::HeapStringAllocator allocator;
i::StringStream stream(&allocator);
stream.Add("# Expected: ");
expected.DebugPrint(&stream);
stream.Add("\n# Found: ");
value.DebugPrint(&stream);
V8_Fatal(file, line, "CHECK_EQ(%s, %s) failed\n%s",
expected_source, value_source,
*stream.ToCString());
}
}
static inline void CheckNonEqualsHelper(const char* file, int line,
const char* expected_source,
const JSObjectsCluster& expected,
const char* value_source,
const JSObjectsCluster& value) {
if (JSObjectsCluster::Compare(expected, value) == 0) {
i::HeapStringAllocator allocator;
i::StringStream stream(&allocator);
stream.Add("# Expected: ");
expected.DebugPrint(&stream);
stream.Add("\n# Found: ");
value.DebugPrint(&stream);
V8_Fatal(file, line, "CHECK_NE(%s, %s) failed\n%s",
expected_source, value_source,
*stream.ToCString());
}
}
TEST(ClustersCoarserSimple) {
v8::HandleScope scope;
v8::Handle<v8::Context> env = v8::Context::New();
env->Enter();
i::ZoneScope zn_scope(i::DELETE_ON_EXIT);
JSObjectsRetainerTree tree;
JSObjectsCluster function(i::Heap::function_class_symbol());
JSObjectsCluster a(*i::Factory::NewStringFromAscii(i::CStrVector("A")));
JSObjectsCluster b(*i::Factory::NewStringFromAscii(i::CStrVector("B")));
// o1 <- Function
JSObjectsCluster o1 =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x100, &function);
// o2 <- Function
JSObjectsCluster o2 =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x200, &function);
// o3 <- A, B
JSObjectsCluster o3 =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x300, &a, &b);
// o4 <- B, A
JSObjectsCluster o4 =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x400, &b, &a);
// o5 <- A, B, Function
JSObjectsCluster o5 =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x500,
&a, &b, &function);
ClustersCoarser coarser;
coarser.Process(&tree);
CHECK_EQ(coarser.GetCoarseEquivalent(o1), coarser.GetCoarseEquivalent(o2));
CHECK_EQ(coarser.GetCoarseEquivalent(o3), coarser.GetCoarseEquivalent(o4));
CHECK_NE(coarser.GetCoarseEquivalent(o1), coarser.GetCoarseEquivalent(o3));
CHECK_EQ(JSObjectsCluster(), coarser.GetCoarseEquivalent(o5));
}
TEST(ClustersCoarserMultipleConstructors) {
v8::HandleScope scope;
v8::Handle<v8::Context> env = v8::Context::New();
env->Enter();
i::ZoneScope zn_scope(i::DELETE_ON_EXIT);
JSObjectsRetainerTree tree;
JSObjectsCluster function(i::Heap::function_class_symbol());
// o1 <- Function
JSObjectsCluster o1 =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x100, &function);
// a1 <- Function
JSObjectsCluster a1 =
AddHeapObjectToTree(&tree, i::Heap::Array_symbol(), 0x1000, &function);
// o2 <- Function
JSObjectsCluster o2 =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x200, &function);
// a2 <- Function
JSObjectsCluster a2 =
AddHeapObjectToTree(&tree, i::Heap::Array_symbol(), 0x2000, &function);
ClustersCoarser coarser;
coarser.Process(&tree);
CHECK_EQ(coarser.GetCoarseEquivalent(o1), coarser.GetCoarseEquivalent(o2));
CHECK_EQ(coarser.GetCoarseEquivalent(a1), coarser.GetCoarseEquivalent(a2));
}
TEST(ClustersCoarserPathsTraversal) {
v8::HandleScope scope;
v8::Handle<v8::Context> env = v8::Context::New();
env->Enter();
i::ZoneScope zn_scope(i::DELETE_ON_EXIT);
JSObjectsRetainerTree tree;
// On the following graph:
//
// p
// <- o21 <- o11 <-
// q o
// <- o22 <- o12 <-
// r
//
// we expect that coarser will deduce equivalences: p ~ q ~ r,
// o21 ~ o22, and o11 ~ o12.
JSObjectsCluster o =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x100);
JSObjectsCluster o11 =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x110, &o);
JSObjectsCluster o12 =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x120, &o);
JSObjectsCluster o21 =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x210, &o11);
JSObjectsCluster o22 =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x220, &o12);
JSObjectsCluster p =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x300, &o21);
JSObjectsCluster q =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x310, &o21, &o22);
JSObjectsCluster r =
AddHeapObjectToTree(&tree, i::Heap::Object_symbol(), 0x320, &o22);
ClustersCoarser coarser;
coarser.Process(&tree);
CHECK_EQ(JSObjectsCluster(), coarser.GetCoarseEquivalent(o));
CHECK_EQ(coarser.GetCoarseEquivalent(o11), coarser.GetCoarseEquivalent(o12));
CHECK_EQ(coarser.GetCoarseEquivalent(o21), coarser.GetCoarseEquivalent(o22));
CHECK_NE(coarser.GetCoarseEquivalent(o11), coarser.GetCoarseEquivalent(o21));
CHECK_EQ(coarser.GetCoarseEquivalent(p), coarser.GetCoarseEquivalent(q));
CHECK_EQ(coarser.GetCoarseEquivalent(q), coarser.GetCoarseEquivalent(r));
CHECK_NE(coarser.GetCoarseEquivalent(o11), coarser.GetCoarseEquivalent(p));
CHECK_NE(coarser.GetCoarseEquivalent(o21), coarser.GetCoarseEquivalent(p));
}
namespace {
class RetainerProfilePrinter : public RetainerHeapProfile::Printer {
public:
RetainerProfilePrinter() : stream_(&allocator_), lines_(100) {}
void PrintRetainers(const JSObjectsCluster& cluster,
const i::StringStream& retainers) {
cluster.Print(&stream_);
stream_.Add("%s", *(retainers.ToCString()));
stream_.Put('\0');
}
const char* GetRetainers(const char* constructor) {
FillLines();
const size_t cons_len = strlen(constructor);
for (int i = 0; i < lines_.length(); ++i) {
if (strncmp(constructor, lines_[i], cons_len) == 0 &&
lines_[i][cons_len] == ',') {
return lines_[i] + cons_len + 1;
}
}
return NULL;
}
private:
void FillLines() {
if (lines_.length() > 0) return;
stream_.Put('\0');
stream_str_ = stream_.ToCString();
const char* pos = *stream_str_;
while (pos != NULL && *pos != '\0') {
lines_.Add(pos);
pos = strchr(pos, '\0');
if (pos != NULL) ++pos;
}
}
i::HeapStringAllocator allocator_;
i::StringStream stream_;
i::SmartPointer<const char> stream_str_;
i::List<const char*> lines_;
};
} // namespace
TEST(RetainerProfile) {
v8::HandleScope scope;
v8::Handle<v8::Context> env = v8::Context::New();
env->Enter();
CompileAndRunScript(
"function A() {}\n"
"function B(x) { this.x = x; }\n"
"function C(x) { this.x1 = x; this.x2 = x; }\n"
"var a = new A();\n"
"var b1 = new B(a), b2 = new B(a);\n"
"var c = new C(a);");
RetainerHeapProfile ret_profile;
i::AssertNoAllocation no_alloc;
i::HeapIterator iterator;
while (iterator.has_next()) {
i::HeapObject* obj = iterator.next();
ret_profile.CollectStats(obj);
}
RetainerProfilePrinter printer;
ret_profile.DebugPrintStats(&printer);
CHECK_EQ("(global property);1,B;2,C;2", printer.GetRetainers("A"));
CHECK_EQ("(global property);2", printer.GetRetainers("B"));
CHECK_EQ("(global property);1", printer.GetRetainers("C"));
}
#endif // ENABLE_LOGGING_AND_PROFILING

14
deps/v8/test/es5conform/README

@ -0,0 +1,14 @@
This directory contains code for binding the es5conform test suite
into the v8 test harness. To use the tests check out the es5conform
tests from
https://es5conform.svn.codeplex.com/svn
in revision 59101 as 'data' in this directory. Using later version
may be possible but the tests are only known to pass (and indeed run)
with that revision.
If you do update to a newer revision you may have to change the test
harness adapter code since it uses internal functionality from the
harness that comes bundled with the tests. You will most likely also
have to update the test expectation file.

68
deps/v8/test/es5conform/es5conform.status

@ -0,0 +1,68 @@
# Copyright 2009 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
prefix es5conform
def UNIMPLEMENTED = PASS || FAIL
def FAIL_OK = FAIL, OKAY
chapter07: UNIMPLEMENTED
chapter08: UNIMPLEMENTED
chapter10: UNIMPLEMENTED
chapter11: UNIMPLEMENTED
chapter12: UNIMPLEMENTED
chapter13: UNIMPLEMENTED
chapter14: UNIMPLEMENTED
chapter15/15.1: UNIMPLEMENTED
chapter15/15.2/15.2.3/15.2.3.1: UNIMPLEMENTED
chapter15/15.2/15.2.3/15.2.3.2: UNIMPLEMENTED
chapter15/15.2/15.2.3/15.2.3.3: UNIMPLEMENTED
chapter15/15.2/15.2.3/15.2.3.4: UNIMPLEMENTED
chapter15/15.2/15.2.3/15.2.3.5: UNIMPLEMENTED
chapter15/15.2/15.2.3/15.2.3.6: UNIMPLEMENTED
chapter15/15.2/15.2.3/15.2.3.7: UNIMPLEMENTED
chapter15/15.2/15.2.3/15.2.3.8: UNIMPLEMENTED
chapter15/15.2/15.2.3/15.2.3.9: UNIMPLEMENTED
chapter15/15.2/15.2.3/15.2.3.10: UNIMPLEMENTED
chapter15/15.2/15.2.3/15.2.3.11: UNIMPLEMENTED
chapter15/15.2/15.2.3/15.2.3.12: UNIMPLEMENTED
chapter15/15.2/15.2.3/15.2.3.13: UNIMPLEMENTED
# Object.keys
chapter15/15.2/15.2.3/15.2.3.14: PASS
# We fail this because Object.keys returns numbers for element indices
# rather than strings.
chapter15/15.2/15.2.3/15.2.3.14/15.2.3.14-3-3: FAIL_OK
chapter15/15.3: UNIMPLEMENTED
chapter15/15.4: UNIMPLEMENTED
chapter15/15.5: UNIMPLEMENTED
chapter15/15.6: UNIMPLEMENTED
chapter15/15.7: UNIMPLEMENTED
chapter15/15.9: UNIMPLEMENTED
chapter15/15.10: UNIMPLEMENTED
chapter15/15.12: UNIMPLEMENTED

78
deps/v8/test/mjsunit/array-splice-webkit.js → deps/v8/test/es5conform/harness-adapt.js

@ -1,4 +1,4 @@
// Copyright 2008 the V8 project authors. All rights reserved.
// Copyright 2009 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@ -25,36 +25,50 @@
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Simple splice tests based on webkit layout tests.
var arr = ['a','b','c','d'];
assertArrayEquals(['a','b','c','d'], arr);
assertArrayEquals(['c','d'], arr.splice(2));
assertArrayEquals(['a','b'], arr);
assertArrayEquals(['a','b'], arr.splice(0));
assertArrayEquals([], arr)
arr = ['a','b','c','d'];
assertEquals(undefined, arr.splice())
assertArrayEquals(['a','b','c','d'], arr);
assertArrayEquals(['a','b','c','d'], arr.splice(undefined))
assertArrayEquals([], arr);
arr = ['a','b','c','d'];
assertArrayEquals(['a','b','c','d'], arr.splice(null))
assertArrayEquals([], arr);
arr = ['a','b','c','d'];
assertArrayEquals([], arr.splice(100))
assertArrayEquals(['a','b','c','d'], arr);
assertArrayEquals(['d'], arr.splice(-1))
assertArrayEquals(['a','b','c'], arr);
assertArrayEquals([], arr.splice(2, undefined))
assertArrayEquals([], arr.splice(2, null))
assertArrayEquals([], arr.splice(2, -1))
assertArrayEquals([], arr.splice(2, 0))
assertArrayEquals(['a','b','c'], arr);
assertArrayEquals(['c'], arr.splice(2, 100))
assertArrayEquals(['a','b'], arr);
var global = this;
function ES5Error(ut) {
this.ut = ut;
}
ES5Error.prototype.toString = function () {
return this.ut.res;
};
// The harness uses the IE specific .description property of exceptions but
// that's nothing we can't hack our way around.
Error.prototype.__defineGetter__('description', function () {
return this.message;
});
function TestHarness() {
sth.call(this, global);
this._testResults = []
}
// Borrow sth's registerTest method.
TestHarness.prototype.registerTest = sth.prototype.registerTest;
// Drop the before/after stuff, just run the test.
TestHarness.prototype.startTesting = function () {
sth.prototype.run.call(this);
this.report();
};
TestHarness.prototype.report = function () {
for (var i = 0; i < this._testResults.length; i++) {
var ut = this._testResults[i];
// We don't fail on preconditions. Yet.
if (ut.res == "Precondition failed")
continue;
if (ut.res != 'pass')
throw new ES5Error(ut);
}
};
TestHarness.prototype.startingTest = function (ut) {
this.currentTest = ut;
this._testResults.push(ut);
};
var ES5Harness = new TestHarness();

108
deps/v8/test/es5conform/testcfg.py

@ -0,0 +1,108 @@
# Copyright 2008 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import test
import os
from os.path import join, exists
HARNESS_FILES = ['sth.js']
class ES5ConformTestCase(test.TestCase):
def __init__(self, filename, path, context, root, mode, framework):
super(ES5ConformTestCase, self).__init__(context, path)
self.filename = filename
self.mode = mode
self.framework = framework
self.root = root
def IsNegative(self):
return self.filename.endswith('-n.js')
def GetLabel(self):
return "%s es5conform %s" % (self.mode, self.GetName())
def IsFailureOutput(self, output):
if output.exit_code != 0:
return True
return 'FAILED!' in output.stdout
def GetCommand(self):
result = [self.context.GetVm(self.mode)]
result += ['-e', 'var window = this']
result += self.framework
result.append(self.filename)
result += ['-e', 'ES5Harness.startTesting()']
return result
def GetName(self):
return self.path[-1]
def GetSource(self):
return open(self.filename).read()
class ES5ConformTestConfiguration(test.TestConfiguration):
def __init__(self, context, root):
super(ES5ConformTestConfiguration, self).__init__(context, root)
def ListTests(self, current_path, path, mode):
tests = []
current_root = join(self.root, 'data', 'TestCases')
harness = []
harness += [join(self.root, 'data', 'SimpleTestHarness', f) for f in HARNESS_FILES]
harness += [join(self.root, 'harness-adapt.js')]
for root, dirs, files in os.walk(current_root):
for dotted in [x for x in dirs if x.startswith('.')]:
dirs.remove(dotted)
root_path = root[len(self.root):].split(os.path.sep)
root_path = current_path + [x for x in root_path if x]
for file in files:
if file.endswith('.js'):
full_path = root_path + [file[:-3]]
full_path = [x for x in full_path if not (x in ['data', 'TestCases'])]
if self.Contains(path, full_path):
test = ES5ConformTestCase(join(root, file), full_path, self.context,
self.root, mode, harness)
tests.append(test)
return tests
def GetBuildRequirements(self):
return ['sample', 'sample=shell']
def GetTestStatus(self, sections, defs):
status_file = join(self.root, 'es5conform.status')
if exists(status_file):
test.ReadConfigurationInto(status_file, sections, defs)
def GetConfiguration(context, root):
return ES5ConformTestConfiguration(context, root)

14
deps/v8/test/mjsunit/arguments-enum.js

@ -42,11 +42,11 @@ function setArgumentCount() {
}
assertEquals(0, countArguments());
assertEquals(0, countArguments(1));
assertEquals(0, countArguments(1, 2));
assertEquals(0, countArguments(1, 2, 3, 4, 5));
assertEquals(1, countArguments(1));
assertEquals(2, countArguments(1, 2));
assertEquals(5, countArguments(1, 2, 3, 4, 5));
assertEquals(0, setArgumentCount());
assertEquals(0, setArgumentCount(1));
assertEquals(0, setArgumentCount(1, 2));
assertEquals(0, setArgumentCount(1, 2, 3, 4, 5));
assertEquals(2, setArgumentCount());
assertEquals(3, setArgumentCount(1));
assertEquals(4, setArgumentCount(1, 2));
assertEquals(7, setArgumentCount(1, 2, 3, 4, 5));

119
deps/v8/test/mjsunit/array-constructor.js

@ -0,0 +1,119 @@
// Copyright 2008 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
var loop_count = 5
for (var i = 0; i < loop_count; i++) {
var a = new Array();
var b = Array();
assertEquals(0, a.length);
assertEquals(0, b.length);
for (var k = 0; k < 10; k++) {
assertEquals('undefined', typeof a[k]);
assertEquals('undefined', typeof b[k]);
}
}
for (var i = 0; i < loop_count; i++) {
for (var j = 0; j < 100; j++) {
var a = new Array(j);
var b = Array(j);
assertEquals(j, a.length);
assertEquals(j, b.length);
for (var k = 0; k < j; k++) {
assertEquals('undefined', typeof a[k]);
assertEquals('undefined', typeof b[k]);
}
}
}
for (var i = 0; i < loop_count; i++) {
a = new Array(0, 1);
assertArrayEquals([0, 1], a);
a = new Array(0, 1, 2);
assertArrayEquals([0, 1, 2], a);
a = new Array(0, 1, 2, 3);
assertArrayEquals([0, 1, 2, 3], a);
a = new Array(0, 1, 2, 3, 4);
assertArrayEquals([0, 1, 2, 3, 4], a);
a = new Array(0, 1, 2, 3, 4, 5);
assertArrayEquals([0, 1, 2, 3, 4, 5], a);
a = new Array(0, 1, 2, 3, 4, 5, 6);
assertArrayEquals([0, 1, 2, 3, 4, 5, 6], a);
a = new Array(0, 1, 2, 3, 4, 5, 6, 7);
assertArrayEquals([0, 1, 2, 3, 4, 5, 6, 7], a);
a = new Array(0, 1, 2, 3, 4, 5, 6, 7, 8);
assertArrayEquals([0, 1, 2, 3, 4, 5, 6, 7, 8], a);
a = new Array(0, 1, 2, 3, 4, 5, 6, 7, 8, 9);
assertArrayEquals([0, 1, 2, 3, 4, 5, 6, 7, 8, 9], a);
}
function innerArrayLiteral(n) {
var a = new Array(n);
for (var i = 0; i < n; i++) {
a[i] = i * 2 + 7;
}
return a.join();
}
function testConstructOfSizeSize(n) {
var str = innerArrayLiteral(n);
var a = eval('[' + str + ']');
var b = eval('new Array(' + str + ')')
var c = eval('Array(' + str + ')')
assertEquals(n, a.length);
assertArrayEquals(a, b);
assertArrayEquals(a, c);
}
for (var i = 0; i < loop_count; i++) {
// JSObject::kInitialMaxFastElementArray is 10000.
for (var j = 1000; j < 12000; j += 1000) {
testConstructOfSizeSize(j);
}
}
for (var i = 0; i < loop_count; i++) {
assertArrayEquals(['xxx'], new Array('xxx'));
assertArrayEquals(['xxx'], Array('xxx'));
assertArrayEquals([true], new Array(true));
assertArrayEquals([false], Array(false));
assertArrayEquals([{a:1}], new Array({a:1}));
assertArrayEquals([{b:2}], Array({b:2}));
}
assertThrows('new Array(3.14)');
assertThrows('Array(2.72)');

1
deps/v8/test/mjsunit/mjsunit.status

@ -41,6 +41,7 @@ big-object-literal: PASS, SKIP if ($arch == arm)
# Slow tests which times out in debug mode.
try: PASS, SKIP if $mode == debug
debug-scripts-request: PASS, SKIP if $mode == debug
array-constructor: PASS, SKIP if $mode == debug
# Flaky test that can hit compilation-time stack overflow in debug mode.
unicode-test: PASS, (PASS || FAIL) if $mode == debug

3
deps/v8/test/mjsunit/testcfg.py

@ -112,8 +112,9 @@ class MjsunitTestConfiguration(test.TestConfiguration):
mjsunit = [current_path + [t] for t in self.Ls(self.root)]
regress = [current_path + ['regress', t] for t in self.Ls(join(self.root, 'regress'))]
bugs = [current_path + ['bugs', t] for t in self.Ls(join(self.root, 'bugs'))]
third_party = [current_path + ['third_party', t] for t in self.Ls(join(self.root, 'third_party'))]
tools = [current_path + ['tools', t] for t in self.Ls(join(self.root, 'tools'))]
all_tests = mjsunit + regress + bugs + tools
all_tests = mjsunit + regress + bugs + third_party + tools
result = []
for test in all_tests:
if self.Contains(path, test):

62
deps/v8/test/mjsunit/third_party/array-splice-webkit.js

@ -0,0 +1,62 @@
// Copyright (c) 2006 Apple Computer, Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
//
// 3. Neither the name of the copyright holder(s) nor the names of any
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE.
// Simple splice tests based on webkit layout tests.
var arr = ['a','b','c','d'];
assertArrayEquals(['a','b','c','d'], arr);
assertArrayEquals(['c','d'], arr.splice(2));
assertArrayEquals(['a','b'], arr);
assertArrayEquals(['a','b'], arr.splice(0));
assertArrayEquals([], arr)
arr = ['a','b','c','d'];
assertEquals(undefined, arr.splice())
assertArrayEquals(['a','b','c','d'], arr);
assertArrayEquals(['a','b','c','d'], arr.splice(undefined))
assertArrayEquals([], arr);
arr = ['a','b','c','d'];
assertArrayEquals(['a','b','c','d'], arr.splice(null))
assertArrayEquals([], arr);
arr = ['a','b','c','d'];
assertArrayEquals([], arr.splice(100))
assertArrayEquals(['a','b','c','d'], arr);
assertArrayEquals(['d'], arr.splice(-1))
assertArrayEquals(['a','b','c'], arr);
assertArrayEquals([], arr.splice(2, undefined))
assertArrayEquals([], arr.splice(2, null))
assertArrayEquals([], arr.splice(2, -1))
assertArrayEquals([], arr.splice(2, 0))
assertArrayEquals(['a','b','c'], arr);
assertArrayEquals(['c'], arr.splice(2, 100))
assertArrayEquals(['a','b'], arr);

66
deps/v8/test/mjsunit/third_party/object-keys.js

@ -0,0 +1,66 @@
// Copyright (c) 2006 Apple Computer, Inc. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// 1. Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials provided
// with the distribution.
//
// 3. Neither the name of the copyright holder(s) nor the names of any
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
// COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
// HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
// OF THE POSSIBILITY OF SUCH DAMAGE.
// Based on LayoutTests/fast/js/Object-keys.html
assertThrows(function () { Object.keys(2) }, TypeError);
assertThrows(function () { Object.keys("foo") }, TypeError);
assertThrows(function () { Object.keys(null) }, TypeError);
assertThrows(function () { Object.keys(undefined) }, TypeError);
assertEquals(Object.keys({}), []);
assertEquals(Object.keys({a:null}), ['a']);
assertEquals(Object.keys({a:null, b:null}), ['a', 'b']);
assertEquals(Object.keys({b:null, a:null}), ['b', 'a']);
assertEquals(Object.keys([]), []);
assertEquals(Object.keys([null]), ['0']);
assertEquals(Object.keys([null,null]), ['0', '1']);
assertEquals(Object.keys([null,null,,,,null]), ['0', '1', '5']);
assertEquals(Object.keys({__proto__:{a:null}}), []);
assertEquals(Object.keys({__proto__:[1,2,3]}), []);
var x = [];
x.__proto__ = [1, 2, 3];
assertEquals(Object.keys(x), []);
assertEquals(Object.keys(function () {}), []);
function argsTest(a, b, c) {
assertEquals([0, 1, 2], Object.keys(arguments));
}
argsTest(1, 2, 3);
var literal = {a: 1, b: 2, c: 3};
var keysBefore = Object.keys(literal);
assertEquals(['a', 'b', 'c'], keysBefore);
keysBefore[0] = 'x';
var keysAfter = Object.keys(literal);
assertEquals(['a', 'b', 'c'], keysAfter);
assertEquals(['x', 'b', 'c'], keysBefore);

0
deps/v8/test/mjsunit/regexp-pcre.js → deps/v8/test/mjsunit/third_party/regexp-pcre.js

7
deps/v8/test/mozilla/mozilla.status

@ -217,6 +217,8 @@ js1_5/Function/regress-338121-01: FAIL_OK
js1_5/Function/regress-338121-02: FAIL_OK
js1_5/Function/regress-338121-03: FAIL_OK
# Expectes 'prototype' property of functions to be enumerable.
js1_5/Function/10.1.6-01: FAIL_OK
# Length of objects whose prototype chain includes a function
ecma_3/Function/regress-313570: FAIL_OK
@ -567,11 +569,6 @@ js1_5/Array/regress-350256-02: FAIL
ecma_3/Function/regress-137181: FAIL
# Tests that rely on specific details of function decompilation or
# print strings for errors. Non-ECMA behavior.
js1_4/Regress/function-003: FAIL
# 'export' and 'import' are not keywords in V8.
ecma_2/Exceptions/lexical-010: FAIL
ecma_2/Exceptions/lexical-022: FAIL

2
deps/v8/tools/gyp/v8.gyp

@ -279,6 +279,8 @@
'../../src/heap-inl.h',
'../../src/heap.cc',
'../../src/heap.h',
'../../src/heap-profiler.cc',
'../../src/heap-profiler.h',
'../../src/ic-inl.h',
'../../src/ic.cc',
'../../src/ic.h',

2
deps/v8/tools/run-valgrind.py

@ -58,7 +58,7 @@ if code != 0:
# have any definitely, indirectly, and possibly lost bytes.
LEAK_RE = r"(?:definitely|indirectly|possibly) lost: "
LEAK_LINE_MATCHER = re.compile(LEAK_RE)
LEAK_OKAY_MATCHER = re.compile(r"lost: 0 bytes in 0 blocks.")
LEAK_OKAY_MATCHER = re.compile(r"lost: 0 bytes in 0 blocks")
leaks = []
for line in errors:
if LEAK_LINE_MATCHER.search(line):

10
deps/v8/tools/v8.xcodeproj/project.pbxproj

@ -207,6 +207,8 @@
89F23C9F0E78D604006B2466 /* simulator-arm.cc in Sources */ = {isa = PBXBuildFile; fileRef = 897FF17D0E719B8F00D62E90 /* simulator-arm.cc */; };
89F23CA00E78D609006B2466 /* stub-cache-arm.cc in Sources */ = {isa = PBXBuildFile; fileRef = 897FF18A0E719B8F00D62E90 /* stub-cache-arm.cc */; };
89FB0E3A0F8E533F00B04B3C /* d8-posix.cc in Sources */ = {isa = PBXBuildFile; fileRef = 89FB0E360F8E531900B04B3C /* d8-posix.cc */; };
9F11D9A0105AF0A300EBE5B2 /* heap-profiler.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9F11D99E105AF0A300EBE5B2 /* heap-profiler.cc */; };
9F11D9A1105AF0A300EBE5B2 /* heap-profiler.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9F11D99E105AF0A300EBE5B2 /* heap-profiler.cc */; };
9F4B7B890FCC877A00DC4117 /* log-utils.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9F4B7B870FCC877A00DC4117 /* log-utils.cc */; };
9F4B7B8A0FCC877A00DC4117 /* log-utils.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9F4B7B870FCC877A00DC4117 /* log-utils.cc */; };
9F92FAA90F8F28AD0089F02C /* func-name-inferrer.cc in Sources */ = {isa = PBXBuildFile; fileRef = 9F92FAA70F8F28AD0089F02C /* func-name-inferrer.cc */; };
@ -533,6 +535,8 @@
89F23C950E78D5B6006B2466 /* v8_shell-arm */ = {isa = PBXFileReference; explicitFileType = "compiled.mach-o.executable"; includeInIndex = 0; path = "v8_shell-arm"; sourceTree = BUILT_PRODUCTS_DIR; };
89FB0E360F8E531900B04B3C /* d8-posix.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = "d8-posix.cc"; path = "../src/d8-posix.cc"; sourceTree = "<group>"; };
89FB0E370F8E531900B04B3C /* d8-windows.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; name = "d8-windows.cc"; path = "../src/d8-windows.cc"; sourceTree = "<group>"; };
9F11D99E105AF0A300EBE5B2 /* heap-profiler.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = "heap-profiler.cc"; sourceTree = "<group>"; };
9F11D99F105AF0A300EBE5B2 /* heap-profiler.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "heap-profiler.h"; sourceTree = "<group>"; };
9F4B7B870FCC877A00DC4117 /* log-utils.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = "log-utils.cc"; sourceTree = "<group>"; };
9F4B7B880FCC877A00DC4117 /* log-utils.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "log-utils.h"; sourceTree = "<group>"; };
9F92FAA70F8F28AD0089F02C /* func-name-inferrer.cc */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.cpp; path = "func-name-inferrer.cc"; sourceTree = "<group>"; };
@ -626,7 +630,6 @@
897FF0D70E719AB300D62E90 /* C++ */ = {
isa = PBXGroup;
children = (
22A76C900FF259E600FDC694 /* log-inl.h */,
897FF0F60E719B8F00D62E90 /* accessors.cc */,
897FF0F70E719B8F00D62E90 /* accessors.h */,
897FF0F80E719B8F00D62E90 /* allocation.cc */,
@ -725,6 +728,8 @@
897FF1460E719B8F00D62E90 /* heap-inl.h */,
897FF1470E719B8F00D62E90 /* heap.cc */,
897FF1480E719B8F00D62E90 /* heap.h */,
9F11D99E105AF0A300EBE5B2 /* heap-profiler.cc */,
9F11D99F105AF0A300EBE5B2 /* heap-profiler.h */,
897FF1490E719B8F00D62E90 /* ic-arm.cc */,
897FF14A0E719B8F00D62E90 /* ic-ia32.cc */,
897FF14B0E719B8F00D62E90 /* ic-inl.h */,
@ -742,6 +747,7 @@
897FF1510E719B8F00D62E90 /* list.h */,
897FF1520E719B8F00D62E90 /* log.cc */,
897FF1530E719B8F00D62E90 /* log.h */,
22A76C900FF259E600FDC694 /* log-inl.h */,
9F4B7B870FCC877A00DC4117 /* log-utils.cc */,
9F4B7B880FCC877A00DC4117 /* log-utils.h */,
897FF1540E719B8F00D62E90 /* macro-assembler-arm.cc */,
@ -1201,6 +1207,7 @@
89A88E2E0E71A6D60043BA31 /* zone.cc in Sources */,
9F4B7B890FCC877A00DC4117 /* log-utils.cc in Sources */,
8981F6001010501900D1520E /* frame-element.cc in Sources */,
9F11D9A0105AF0A300EBE5B2 /* heap-profiler.cc in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
@ -1306,6 +1313,7 @@
89F23C820E78D5B2006B2466 /* zone.cc in Sources */,
9F4B7B8A0FCC877A00DC4117 /* log-utils.cc in Sources */,
8981F6011010502800D1520E /* frame-element.cc in Sources */,
9F11D9A1105AF0A300EBE5B2 /* heap-profiler.cc in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};

8
deps/v8/tools/visual_studio/v8_base.vcproj

@ -488,6 +488,14 @@
RelativePath="..\..\src\heap.h"
>
</File>
<File
RelativePath="..\..\src\heap-profiler.cc"
>
</File>
<File
RelativePath="..\..\src\heap-profiler.h"
>
</File>
<File
RelativePath="..\..\src\ia32\ic-ia32.cc"
>

8
deps/v8/tools/visual_studio/v8_base_arm.vcproj

@ -496,6 +496,14 @@
RelativePath="..\..\src\heap.h"
>
</File>
<File
RelativePath="..\..\src\heap-profiler.cc"
>
</File>
<File
RelativePath="..\..\src\heap-profiler.h"
>
</File>
<File
RelativePath="..\..\src\arm\ic-arm.cc"
>

8
deps/v8/tools/visual_studio/v8_base_x64.vcproj

@ -488,6 +488,14 @@
RelativePath="..\..\src\heap.h"
>
</File>
<File
RelativePath="..\..\src\heap-profiler.cc"
>
</File>
<File
RelativePath="..\..\src\heap-profiler.h"
>
</File>
<File
RelativePath="..\..\src\x64\ic-x64.cc"
>

4
deps/v8/tools/visual_studio/v8_cctest.vcproj

@ -197,6 +197,10 @@
RelativePath="..\..\test\cctest\test-heap.cc"
>
</File>
<File
RelativePath="..\..\test\cctest\test-heap-profiler.cc"
>
</File>
<File
RelativePath="..\..\test\cctest\test-lock.cc"
>

4
deps/v8/tools/visual_studio/v8_cctest_arm.vcproj

@ -193,6 +193,10 @@
RelativePath="..\..\test\cctest\test-heap.cc"
>
</File>
<File
RelativePath="..\..\test\cctest\test-heap-profiler.cc"
>
</File>
<File
RelativePath="..\..\test\cctest\test-lock.cc"
>

4
deps/v8/tools/visual_studio/v8_cctest_x64.vcproj

@ -199,6 +199,10 @@
RelativePath="..\..\test\cctest\test-heap.cc"
>
</File>
<File
RelativePath="..\..\test\cctest\test-heap-profiler.cc"
>
</File>
<File
RelativePath="..\..\test\cctest\test-lock.cc"
>

Loading…
Cancel
Save