mirror of https://github.com/lukechilds/node.git
310 changed files with 17278 additions and 10705 deletions
File diff suppressed because it is too large
File diff suppressed because it is too large
@ -0,0 +1,168 @@ |
|||
// Copyright 2013 the V8 project authors. All rights reserved.
|
|||
// Redistribution and use in source and binary forms, with or without
|
|||
// modification, are permitted provided that the following conditions are
|
|||
// met:
|
|||
//
|
|||
// * Redistributions of source code must retain the above copyright
|
|||
// notice, this list of conditions and the following disclaimer.
|
|||
// * Redistributions in binary form must reproduce the above
|
|||
// copyright notice, this list of conditions and the following
|
|||
// disclaimer in the documentation and/or other materials provided
|
|||
// with the distribution.
|
|||
// * Neither the name of Google Inc. nor the names of its
|
|||
// contributors may be used to endorse or promote products derived
|
|||
// from this software without specific prior written permission.
|
|||
//
|
|||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
|||
#ifndef V8_ASSERT_SCOPE_H_ |
|||
#define V8_ASSERT_SCOPE_H_ |
|||
|
|||
#include "allocation.h" |
|||
#include "platform.h" |
|||
|
|||
namespace v8 { |
|||
namespace internal { |
|||
|
|||
class Isolate; |
|||
|
|||
enum PerThreadAssertType { |
|||
HEAP_ALLOCATION_ASSERT, |
|||
HANDLE_ALLOCATION_ASSERT, |
|||
HANDLE_DEREFERENCE_ASSERT, |
|||
DEFERRED_HANDLE_DEREFERENCE_ASSERT, |
|||
LAST_PER_THREAD_ASSERT_TYPE |
|||
}; |
|||
|
|||
|
|||
#ifdef DEBUG |
|||
class PerThreadAssertData { |
|||
public: |
|||
PerThreadAssertData() : nesting_level_(0) { |
|||
for (int i = 0; i < LAST_PER_THREAD_ASSERT_TYPE; i++) { |
|||
assert_states_[i] = true; |
|||
} |
|||
} |
|||
|
|||
void set(PerThreadAssertType type, bool allow) { |
|||
assert_states_[type] = allow; |
|||
} |
|||
|
|||
bool get(PerThreadAssertType type) const { |
|||
return assert_states_[type]; |
|||
} |
|||
|
|||
void increment_level() { ++nesting_level_; } |
|||
bool decrement_level() { return --nesting_level_ == 0; } |
|||
|
|||
private: |
|||
bool assert_states_[LAST_PER_THREAD_ASSERT_TYPE]; |
|||
int nesting_level_; |
|||
|
|||
DISALLOW_COPY_AND_ASSIGN(PerThreadAssertData); |
|||
}; |
|||
#endif // DEBUG
|
|||
|
|||
|
|||
class PerThreadAssertScopeBase { |
|||
#ifdef DEBUG |
|||
|
|||
protected: |
|||
PerThreadAssertScopeBase() { |
|||
data_ = AssertData(); |
|||
data_->increment_level(); |
|||
} |
|||
|
|||
~PerThreadAssertScopeBase() { |
|||
if (!data_->decrement_level()) return; |
|||
for (int i = 0; i < LAST_PER_THREAD_ASSERT_TYPE; i++) { |
|||
ASSERT(data_->get(static_cast<PerThreadAssertType>(i))); |
|||
} |
|||
delete data_; |
|||
Thread::SetThreadLocal(thread_local_key, NULL); |
|||
} |
|||
|
|||
static PerThreadAssertData* AssertData() { |
|||
PerThreadAssertData* data = reinterpret_cast<PerThreadAssertData*>( |
|||
Thread::GetThreadLocal(thread_local_key)); |
|||
if (data == NULL) { |
|||
data = new PerThreadAssertData(); |
|||
Thread::SetThreadLocal(thread_local_key, data); |
|||
} |
|||
return data; |
|||
} |
|||
|
|||
static Thread::LocalStorageKey thread_local_key; |
|||
PerThreadAssertData* data_; |
|||
friend class Isolate; |
|||
#endif // DEBUG
|
|||
}; |
|||
|
|||
|
|||
|
|||
template <PerThreadAssertType type, bool allow> |
|||
class PerThreadAssertScope : public PerThreadAssertScopeBase { |
|||
public: |
|||
#ifndef DEBUG |
|||
PerThreadAssertScope() { } |
|||
static void SetIsAllowed(bool is_allowed) { } |
|||
#else |
|||
PerThreadAssertScope() { |
|||
old_state_ = data_->get(type); |
|||
data_->set(type, allow); |
|||
} |
|||
|
|||
~PerThreadAssertScope() { data_->set(type, old_state_); } |
|||
|
|||
static bool IsAllowed() { return AssertData()->get(type); } |
|||
|
|||
private: |
|||
bool old_state_; |
|||
#endif |
|||
}; |
|||
|
|||
// Scope to document where we do not expect handles to be created.
|
|||
typedef PerThreadAssertScope<HANDLE_ALLOCATION_ASSERT, false> |
|||
DisallowHandleAllocation; |
|||
|
|||
// Scope to introduce an exception to DisallowHandleAllocation.
|
|||
typedef PerThreadAssertScope<HANDLE_ALLOCATION_ASSERT, true> |
|||
AllowHandleAllocation; |
|||
|
|||
// Scope to document where we do not expect any allocation and GC.
|
|||
typedef PerThreadAssertScope<HEAP_ALLOCATION_ASSERT, false> |
|||
DisallowHeapAllocation; |
|||
|
|||
// Scope to introduce an exception to DisallowHeapAllocation.
|
|||
typedef PerThreadAssertScope<HEAP_ALLOCATION_ASSERT, true> |
|||
AllowHeapAllocation; |
|||
|
|||
// Scope to document where we do not expect any handle dereferences.
|
|||
typedef PerThreadAssertScope<HANDLE_DEREFERENCE_ASSERT, false> |
|||
DisallowHandleDereference; |
|||
|
|||
// Scope to introduce an exception to DisallowHandleDereference.
|
|||
typedef PerThreadAssertScope<HANDLE_DEREFERENCE_ASSERT, true> |
|||
AllowHandleDereference; |
|||
|
|||
// Scope to document where we do not expect deferred handles to be dereferenced.
|
|||
typedef PerThreadAssertScope<DEFERRED_HANDLE_DEREFERENCE_ASSERT, false> |
|||
DisallowDeferredHandleDereference; |
|||
|
|||
// Scope to introduce an exception to DisallowDeferredHandleDereference.
|
|||
typedef PerThreadAssertScope<DEFERRED_HANDLE_DEREFERENCE_ASSERT, true> |
|||
AllowDeferredHandleDereference; |
|||
|
|||
} } // namespace v8::internal
|
|||
|
|||
#endif // V8_ASSERT_SCOPE_H_
|
@ -0,0 +1,267 @@ |
|||
// Copyright 2013 the V8 project authors. All rights reserved.
|
|||
// Redistribution and use in source and binary forms, with or without
|
|||
// modification, are permitted provided that the following conditions are
|
|||
// met:
|
|||
//
|
|||
// * Redistributions of source code must retain the above copyright
|
|||
// notice, this list of conditions and the following disclaimer.
|
|||
// * Redistributions in binary form must reproduce the above
|
|||
// copyright notice, this list of conditions and the following
|
|||
// disclaimer in the documentation and/or other materials provided
|
|||
// with the distribution.
|
|||
// * Neither the name of Google Inc. nor the names of its
|
|||
// contributors may be used to endorse or promote products derived
|
|||
// from this software without specific prior written permission.
|
|||
//
|
|||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
|||
|
|||
#include "hydrogen-environment-liveness.h" |
|||
|
|||
|
|||
namespace v8 { |
|||
namespace internal { |
|||
|
|||
|
|||
EnvironmentSlotLivenessAnalyzer::EnvironmentSlotLivenessAnalyzer( |
|||
HGraph* graph) |
|||
: graph_(graph), |
|||
zone_(graph->isolate()), |
|||
zone_scope_(&zone_, DELETE_ON_EXIT), |
|||
block_count_(graph->blocks()->length()), |
|||
maximum_environment_size_(graph->maximum_environment_size()), |
|||
collect_markers_(true), |
|||
last_simulate_(NULL) { |
|||
if (maximum_environment_size_ == 0) return; |
|||
|
|||
live_at_block_start_ = |
|||
new(zone()) ZoneList<BitVector*>(block_count_, zone()); |
|||
first_simulate_ = new(zone()) ZoneList<HSimulate*>(block_count_, zone()); |
|||
first_simulate_invalid_for_index_ = |
|||
new(zone()) ZoneList<BitVector*>(block_count_, zone()); |
|||
markers_ = new(zone()) |
|||
ZoneList<HEnvironmentMarker*>(maximum_environment_size_, zone()); |
|||
went_live_since_last_simulate_ = |
|||
new(zone()) BitVector(maximum_environment_size_, zone()); |
|||
|
|||
for (int i = 0; i < block_count_; ++i) { |
|||
live_at_block_start_->Add( |
|||
new(zone()) BitVector(maximum_environment_size_, zone()), zone()); |
|||
first_simulate_->Add(NULL, zone()); |
|||
first_simulate_invalid_for_index_->Add( |
|||
new(zone()) BitVector(maximum_environment_size_, zone()), zone()); |
|||
} |
|||
} |
|||
|
|||
|
|||
void EnvironmentSlotLivenessAnalyzer::ZapEnvironmentSlot(int index, |
|||
HSimulate* simulate) { |
|||
int operand_index = simulate->ToOperandIndex(index); |
|||
if (operand_index == -1) { |
|||
simulate->AddAssignedValue(index, graph_->GetConstantUndefined()); |
|||
} else { |
|||
simulate->SetOperandAt(operand_index, graph_->GetConstantUndefined()); |
|||
} |
|||
} |
|||
|
|||
|
|||
void EnvironmentSlotLivenessAnalyzer::ZapEnvironmentSlotsInSuccessors( |
|||
HBasicBlock* block, |
|||
BitVector* live) { |
|||
// When a value is live in successor A but dead in B, we must
|
|||
// explicitly zap it in B.
|
|||
for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) { |
|||
HBasicBlock* successor = it.Current(); |
|||
int successor_id = successor->block_id(); |
|||
BitVector* live_in_successor = live_at_block_start_->at(successor_id); |
|||
if (live_in_successor->Equals(*live)) continue; |
|||
for (int i = 0; i < live->length(); ++i) { |
|||
if (!live->Contains(i)) continue; |
|||
if (live_in_successor->Contains(i)) continue; |
|||
if (first_simulate_invalid_for_index_->at(successor_id)->Contains(i)) { |
|||
continue; |
|||
} |
|||
HSimulate* simulate = first_simulate_->at(successor_id); |
|||
if (simulate == NULL) continue; |
|||
ASSERT(simulate->closure().is_identical_to( |
|||
block->last_environment()->closure())); |
|||
ZapEnvironmentSlot(i, simulate); |
|||
} |
|||
} |
|||
} |
|||
|
|||
|
|||
void EnvironmentSlotLivenessAnalyzer::ZapEnvironmentSlotsForInstruction( |
|||
HEnvironmentMarker* marker) { |
|||
if (!marker->CheckFlag(HValue::kEndsLiveRange)) return; |
|||
HSimulate* simulate = marker->next_simulate(); |
|||
if (simulate != NULL) { |
|||
ASSERT(simulate->closure().is_identical_to(marker->closure())); |
|||
ZapEnvironmentSlot(marker->index(), simulate); |
|||
} |
|||
} |
|||
|
|||
|
|||
void EnvironmentSlotLivenessAnalyzer::UpdateLivenessAtBlockEnd( |
|||
HBasicBlock* block, |
|||
BitVector* live) { |
|||
// Liveness at the end of each block: union of liveness in successors.
|
|||
live->Clear(); |
|||
for (HSuccessorIterator it(block->end()); !it.Done(); it.Advance()) { |
|||
live->Union(*live_at_block_start_->at(it.Current()->block_id())); |
|||
} |
|||
} |
|||
|
|||
|
|||
void EnvironmentSlotLivenessAnalyzer::UpdateLivenessAtInstruction( |
|||
HInstruction* instr, |
|||
BitVector* live) { |
|||
switch (instr->opcode()) { |
|||
case HValue::kEnvironmentMarker: { |
|||
HEnvironmentMarker* marker = HEnvironmentMarker::cast(instr); |
|||
int index = marker->index(); |
|||
if (!live->Contains(index)) { |
|||
marker->SetFlag(HValue::kEndsLiveRange); |
|||
} else { |
|||
marker->ClearFlag(HValue::kEndsLiveRange); |
|||
} |
|||
if (!went_live_since_last_simulate_->Contains(index)) { |
|||
marker->set_next_simulate(last_simulate_); |
|||
} |
|||
if (marker->kind() == HEnvironmentMarker::LOOKUP) { |
|||
live->Add(index); |
|||
} else { |
|||
ASSERT(marker->kind() == HEnvironmentMarker::BIND); |
|||
live->Remove(index); |
|||
went_live_since_last_simulate_->Add(index); |
|||
} |
|||
if (collect_markers_) { |
|||
// Populate |markers_| list during the first pass.
|
|||
markers_->Add(marker, &zone_); |
|||
} |
|||
break; |
|||
} |
|||
case HValue::kLeaveInlined: |
|||
// No environment values are live at the end of an inlined section.
|
|||
live->Clear(); |
|||
last_simulate_ = NULL; |
|||
|
|||
// The following ASSERTs guard the assumption used in case
|
|||
// kEnterInlined below:
|
|||
ASSERT(instr->next()->IsSimulate()); |
|||
ASSERT(instr->next()->next()->IsGoto()); |
|||
|
|||
break; |
|||
case HValue::kEnterInlined: { |
|||
// Those environment values are live that are live at any return
|
|||
// target block. Here we make use of the fact that the end of an
|
|||
// inline sequence always looks like this: HLeaveInlined, HSimulate,
|
|||
// HGoto (to return_target block), with no environment lookups in
|
|||
// between (see ASSERTs above).
|
|||
HEnterInlined* enter = HEnterInlined::cast(instr); |
|||
live->Clear(); |
|||
for (int i = 0; i < enter->return_targets()->length(); ++i) { |
|||
int return_id = enter->return_targets()->at(i)->block_id(); |
|||
// When an AbnormalExit is involved, it can happen that the return
|
|||
// target block doesn't actually exist.
|
|||
if (return_id < live_at_block_start_->length()) { |
|||
live->Union(*live_at_block_start_->at(return_id)); |
|||
} |
|||
} |
|||
last_simulate_ = NULL; |
|||
break; |
|||
} |
|||
case HValue::kDeoptimize: { |
|||
// Keep all environment slots alive.
|
|||
HDeoptimize* deopt = HDeoptimize::cast(instr); |
|||
for (int i = deopt->first_local_index(); |
|||
i < deopt->first_expression_index(); ++i) { |
|||
live->Add(i); |
|||
} |
|||
break; |
|||
} |
|||
case HValue::kSimulate: |
|||
last_simulate_ = HSimulate::cast(instr); |
|||
went_live_since_last_simulate_->Clear(); |
|||
break; |
|||
default: |
|||
break; |
|||
} |
|||
} |
|||
|
|||
|
|||
void EnvironmentSlotLivenessAnalyzer::AnalyzeAndTrim() { |
|||
HPhase phase("H_EnvironmentLivenessAnalysis", graph_); |
|||
if (maximum_environment_size_ == 0) return; |
|||
|
|||
// Main iteration. Compute liveness of environment slots, and store it
|
|||
// for each block until it doesn't change any more. For efficiency, visit
|
|||
// blocks in reverse order and walk backwards through each block. We
|
|||
// need several iterations to propagate liveness through nested loops.
|
|||
BitVector* live = new(zone()) BitVector(maximum_environment_size_, zone()); |
|||
BitVector* worklist = new(zone()) BitVector(block_count_, zone()); |
|||
for (int i = 0; i < block_count_; ++i) { |
|||
worklist->Add(i); |
|||
} |
|||
while (!worklist->IsEmpty()) { |
|||
for (int block_id = block_count_ - 1; block_id >= 0; --block_id) { |
|||
if (!worklist->Contains(block_id)) { |
|||
continue; |
|||
} |
|||
worklist->Remove(block_id); |
|||
last_simulate_ = NULL; |
|||
|
|||
HBasicBlock* block = graph_->blocks()->at(block_id); |
|||
UpdateLivenessAtBlockEnd(block, live); |
|||
|
|||
for (HInstruction* instr = block->last(); instr != NULL; |
|||
instr = instr->previous()) { |
|||
UpdateLivenessAtInstruction(instr, live); |
|||
} |
|||
|
|||
// Reached the start of the block, do necessary bookkeeping:
|
|||
// store computed information for this block and add predecessors
|
|||
// to the work list as necessary.
|
|||
first_simulate_->Set(block_id, last_simulate_); |
|||
first_simulate_invalid_for_index_->at(block_id)->CopyFrom( |
|||
*went_live_since_last_simulate_); |
|||
if (live_at_block_start_->at(block_id)->UnionIsChanged(*live)) { |
|||
for (int i = 0; i < block->predecessors()->length(); ++i) { |
|||
worklist->Add(block->predecessors()->at(i)->block_id()); |
|||
} |
|||
if (block->IsInlineReturnTarget()) { |
|||
worklist->Add(block->inlined_entry_block()->block_id()); |
|||
} |
|||
} |
|||
} |
|||
// Only collect bind/lookup instructions during the first pass.
|
|||
collect_markers_ = false; |
|||
} |
|||
|
|||
// Analysis finished. Zap dead environment slots.
|
|||
for (int i = 0; i < markers_->length(); ++i) { |
|||
ZapEnvironmentSlotsForInstruction(markers_->at(i)); |
|||
} |
|||
for (int block_id = block_count_ - 1; block_id >= 0; --block_id) { |
|||
HBasicBlock* block = graph_->blocks()->at(block_id); |
|||
UpdateLivenessAtBlockEnd(block, live); |
|||
ZapEnvironmentSlotsInSuccessors(block, live); |
|||
} |
|||
|
|||
// Finally, remove the HEnvironment{Bind,Lookup} markers.
|
|||
for (int i = 0; i < markers_->length(); ++i) { |
|||
markers_->at(i)->DeleteAndReplaceWith(NULL); |
|||
} |
|||
} |
|||
|
|||
} } // namespace v8::internal
|
@ -0,0 +1,94 @@ |
|||
// Copyright 2013 the V8 project authors. All rights reserved.
|
|||
// Redistribution and use in source and binary forms, with or without
|
|||
// modification, are permitted provided that the following conditions are
|
|||
// met:
|
|||
//
|
|||
// * Redistributions of source code must retain the above copyright
|
|||
// notice, this list of conditions and the following disclaimer.
|
|||
// * Redistributions in binary form must reproduce the above
|
|||
// copyright notice, this list of conditions and the following
|
|||
// disclaimer in the documentation and/or other materials provided
|
|||
// with the distribution.
|
|||
// * Neither the name of Google Inc. nor the names of its
|
|||
// contributors may be used to endorse or promote products derived
|
|||
// from this software without specific prior written permission.
|
|||
//
|
|||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
|||
#ifndef V8_HYDROGEN_ENVIRONMENT_LIVENESS_H_ |
|||
#define V8_HYDROGEN_ENVIRONMENT_LIVENESS_H_ |
|||
|
|||
|
|||
#include "hydrogen.h" |
|||
|
|||
namespace v8 { |
|||
namespace internal { |
|||
|
|||
|
|||
// Trims live ranges of environment slots by doing explicit liveness analysis.
|
|||
// Values in the environment are kept alive by every subsequent LInstruction
|
|||
// that is assigned an LEnvironment, which creates register pressure and
|
|||
// unnecessary spill slot moves. Therefore it is beneficial to trim the
|
|||
// live ranges of environment slots by zapping them with a constant after
|
|||
// the last lookup that refers to them.
|
|||
// Slots are identified by their index and only affected if whitelisted in
|
|||
// HOptimizedGraphBuilder::IsEligibleForEnvironmentLivenessAnalysis().
|
|||
class EnvironmentSlotLivenessAnalyzer { |
|||
public: |
|||
explicit EnvironmentSlotLivenessAnalyzer(HGraph* graph); |
|||
|
|||
void AnalyzeAndTrim(); |
|||
|
|||
private: |
|||
void ZapEnvironmentSlot(int index, HSimulate* simulate); |
|||
void ZapEnvironmentSlotsInSuccessors(HBasicBlock* block, BitVector* live); |
|||
void ZapEnvironmentSlotsForInstruction(HEnvironmentMarker* marker); |
|||
void UpdateLivenessAtBlockEnd(HBasicBlock* block, BitVector* live); |
|||
void UpdateLivenessAtInstruction(HInstruction* instr, BitVector* live); |
|||
|
|||
Zone* zone() { return &zone_; } |
|||
|
|||
HGraph* graph_; |
|||
// Use a dedicated Zone for this phase, with a ZoneScope to ensure it
|
|||
// gets freed.
|
|||
Zone zone_; |
|||
ZoneScope zone_scope_; |
|||
|
|||
int block_count_; |
|||
|
|||
// Largest number of local variables in any environment in the graph
|
|||
// (including inlined environments).
|
|||
int maximum_environment_size_; |
|||
|
|||
// Per-block data. All these lists are indexed by block_id.
|
|||
ZoneList<BitVector*>* live_at_block_start_; |
|||
ZoneList<HSimulate*>* first_simulate_; |
|||
ZoneList<BitVector*>* first_simulate_invalid_for_index_; |
|||
|
|||
// List of all HEnvironmentMarker instructions for quick iteration/deletion.
|
|||
// It is populated during the first pass over the graph, controlled by
|
|||
// |collect_markers_|.
|
|||
ZoneList<HEnvironmentMarker*>* markers_; |
|||
bool collect_markers_; |
|||
|
|||
// Keeps track of the last simulate seen, as well as the environment slots
|
|||
// for which a new live range has started since (so they must not be zapped
|
|||
// in that simulate when the end of another live range of theirs is found).
|
|||
HSimulate* last_simulate_; |
|||
BitVector* went_live_since_last_simulate_; |
|||
}; |
|||
|
|||
|
|||
} } // namespace v8::internal
|
|||
|
|||
#endif /* V8_HYDROGEN_ENVIRONMENT_LIVENESS_H_ */ |
@ -0,0 +1,855 @@ |
|||
// Copyright 2013 the V8 project authors. All rights reserved.
|
|||
// Redistribution and use in source and binary forms, with or without
|
|||
// modification, are permitted provided that the following conditions are
|
|||
// met:
|
|||
//
|
|||
// * Redistributions of source code must retain the above copyright
|
|||
// notice, this list of conditions and the following disclaimer.
|
|||
// * Redistributions in binary form must reproduce the above
|
|||
// copyright notice, this list of conditions and the following
|
|||
// disclaimer in the documentation and/or other materials provided
|
|||
// with the distribution.
|
|||
// * Neither the name of Google Inc. nor the names of its
|
|||
// contributors may be used to endorse or promote products derived
|
|||
// from this software without specific prior written permission.
|
|||
//
|
|||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
|||
#include "hydrogen.h" |
|||
#include "hydrogen-gvn.h" |
|||
#include "v8.h" |
|||
|
|||
namespace v8 { |
|||
namespace internal { |
|||
|
|||
class HValueMap: public ZoneObject { |
|||
public: |
|||
explicit HValueMap(Zone* zone) |
|||
: array_size_(0), |
|||
lists_size_(0), |
|||
count_(0), |
|||
present_flags_(0), |
|||
array_(NULL), |
|||
lists_(NULL), |
|||
free_list_head_(kNil) { |
|||
ResizeLists(kInitialSize, zone); |
|||
Resize(kInitialSize, zone); |
|||
} |
|||
|
|||
void Kill(GVNFlagSet flags); |
|||
|
|||
void Add(HValue* value, Zone* zone) { |
|||
present_flags_.Add(value->gvn_flags()); |
|||
Insert(value, zone); |
|||
} |
|||
|
|||
HValue* Lookup(HValue* value) const; |
|||
|
|||
HValueMap* Copy(Zone* zone) const { |
|||
return new(zone) HValueMap(zone, this); |
|||
} |
|||
|
|||
bool IsEmpty() const { return count_ == 0; } |
|||
|
|||
private: |
|||
// A linked list of HValue* values. Stored in arrays.
|
|||
struct HValueMapListElement { |
|||
HValue* value; |
|||
int next; // Index in the array of the next list element.
|
|||
}; |
|||
static const int kNil = -1; // The end of a linked list
|
|||
|
|||
// Must be a power of 2.
|
|||
static const int kInitialSize = 16; |
|||
|
|||
HValueMap(Zone* zone, const HValueMap* other); |
|||
|
|||
void Resize(int new_size, Zone* zone); |
|||
void ResizeLists(int new_size, Zone* zone); |
|||
void Insert(HValue* value, Zone* zone); |
|||
uint32_t Bound(uint32_t value) const { return value & (array_size_ - 1); } |
|||
|
|||
int array_size_; |
|||
int lists_size_; |
|||
int count_; // The number of values stored in the HValueMap.
|
|||
GVNFlagSet present_flags_; // All flags that are in any value in the
|
|||
// HValueMap.
|
|||
HValueMapListElement* array_; // Primary store - contains the first value
|
|||
// with a given hash. Colliding elements are stored in linked lists.
|
|||
HValueMapListElement* lists_; // The linked lists containing hash collisions.
|
|||
int free_list_head_; // Unused elements in lists_ are on the free list.
|
|||
}; |
|||
|
|||
|
|||
class HSideEffectMap BASE_EMBEDDED { |
|||
public: |
|||
HSideEffectMap(); |
|||
explicit HSideEffectMap(HSideEffectMap* other); |
|||
HSideEffectMap& operator= (const HSideEffectMap& other); |
|||
|
|||
void Kill(GVNFlagSet flags); |
|||
|
|||
void Store(GVNFlagSet flags, HInstruction* instr); |
|||
|
|||
bool IsEmpty() const { return count_ == 0; } |
|||
|
|||
inline HInstruction* operator[](int i) const { |
|||
ASSERT(0 <= i); |
|||
ASSERT(i < kNumberOfTrackedSideEffects); |
|||
return data_[i]; |
|||
} |
|||
inline HInstruction* at(int i) const { return operator[](i); } |
|||
|
|||
private: |
|||
int count_; |
|||
HInstruction* data_[kNumberOfTrackedSideEffects]; |
|||
}; |
|||
|
|||
|
|||
void TraceGVN(const char* msg, ...) { |
|||
va_list arguments; |
|||
va_start(arguments, msg); |
|||
OS::VPrint(msg, arguments); |
|||
va_end(arguments); |
|||
} |
|||
|
|||
// Wrap TraceGVN in macros to avoid the expense of evaluating its arguments when
|
|||
// --trace-gvn is off.
|
|||
#define TRACE_GVN_1(msg, a1) \ |
|||
if (FLAG_trace_gvn) { \ |
|||
TraceGVN(msg, a1); \ |
|||
} |
|||
|
|||
#define TRACE_GVN_2(msg, a1, a2) \ |
|||
if (FLAG_trace_gvn) { \ |
|||
TraceGVN(msg, a1, a2); \ |
|||
} |
|||
|
|||
#define TRACE_GVN_3(msg, a1, a2, a3) \ |
|||
if (FLAG_trace_gvn) { \ |
|||
TraceGVN(msg, a1, a2, a3); \ |
|||
} |
|||
|
|||
#define TRACE_GVN_4(msg, a1, a2, a3, a4) \ |
|||
if (FLAG_trace_gvn) { \ |
|||
TraceGVN(msg, a1, a2, a3, a4); \ |
|||
} |
|||
|
|||
#define TRACE_GVN_5(msg, a1, a2, a3, a4, a5) \ |
|||
if (FLAG_trace_gvn) { \ |
|||
TraceGVN(msg, a1, a2, a3, a4, a5); \ |
|||
} |
|||
|
|||
|
|||
HValueMap::HValueMap(Zone* zone, const HValueMap* other) |
|||
: array_size_(other->array_size_), |
|||
lists_size_(other->lists_size_), |
|||
count_(other->count_), |
|||
present_flags_(other->present_flags_), |
|||
array_(zone->NewArray<HValueMapListElement>(other->array_size_)), |
|||
lists_(zone->NewArray<HValueMapListElement>(other->lists_size_)), |
|||
free_list_head_(other->free_list_head_) { |
|||
OS::MemCopy( |
|||
array_, other->array_, array_size_ * sizeof(HValueMapListElement)); |
|||
OS::MemCopy( |
|||
lists_, other->lists_, lists_size_ * sizeof(HValueMapListElement)); |
|||
} |
|||
|
|||
|
|||
void HValueMap::Kill(GVNFlagSet flags) { |
|||
GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(flags); |
|||
if (!present_flags_.ContainsAnyOf(depends_flags)) return; |
|||
present_flags_.RemoveAll(); |
|||
for (int i = 0; i < array_size_; ++i) { |
|||
HValue* value = array_[i].value; |
|||
if (value != NULL) { |
|||
// Clear list of collisions first, so we know if it becomes empty.
|
|||
int kept = kNil; // List of kept elements.
|
|||
int next; |
|||
for (int current = array_[i].next; current != kNil; current = next) { |
|||
next = lists_[current].next; |
|||
HValue* value = lists_[current].value; |
|||
if (value->gvn_flags().ContainsAnyOf(depends_flags)) { |
|||
// Drop it.
|
|||
count_--; |
|||
lists_[current].next = free_list_head_; |
|||
free_list_head_ = current; |
|||
} else { |
|||
// Keep it.
|
|||
lists_[current].next = kept; |
|||
kept = current; |
|||
present_flags_.Add(value->gvn_flags()); |
|||
} |
|||
} |
|||
array_[i].next = kept; |
|||
|
|||
// Now possibly drop directly indexed element.
|
|||
value = array_[i].value; |
|||
if (value->gvn_flags().ContainsAnyOf(depends_flags)) { // Drop it.
|
|||
count_--; |
|||
int head = array_[i].next; |
|||
if (head == kNil) { |
|||
array_[i].value = NULL; |
|||
} else { |
|||
array_[i].value = lists_[head].value; |
|||
array_[i].next = lists_[head].next; |
|||
lists_[head].next = free_list_head_; |
|||
free_list_head_ = head; |
|||
} |
|||
} else { |
|||
present_flags_.Add(value->gvn_flags()); // Keep it.
|
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
|
|||
HValue* HValueMap::Lookup(HValue* value) const { |
|||
uint32_t hash = static_cast<uint32_t>(value->Hashcode()); |
|||
uint32_t pos = Bound(hash); |
|||
if (array_[pos].value != NULL) { |
|||
if (array_[pos].value->Equals(value)) return array_[pos].value; |
|||
int next = array_[pos].next; |
|||
while (next != kNil) { |
|||
if (lists_[next].value->Equals(value)) return lists_[next].value; |
|||
next = lists_[next].next; |
|||
} |
|||
} |
|||
return NULL; |
|||
} |
|||
|
|||
|
|||
void HValueMap::Resize(int new_size, Zone* zone) { |
|||
ASSERT(new_size > count_); |
|||
// Hashing the values into the new array has no more collisions than in the
|
|||
// old hash map, so we can use the existing lists_ array, if we are careful.
|
|||
|
|||
// Make sure we have at least one free element.
|
|||
if (free_list_head_ == kNil) { |
|||
ResizeLists(lists_size_ << 1, zone); |
|||
} |
|||
|
|||
HValueMapListElement* new_array = |
|||
zone->NewArray<HValueMapListElement>(new_size); |
|||
memset(new_array, 0, sizeof(HValueMapListElement) * new_size); |
|||
|
|||
HValueMapListElement* old_array = array_; |
|||
int old_size = array_size_; |
|||
|
|||
int old_count = count_; |
|||
count_ = 0; |
|||
// Do not modify present_flags_. It is currently correct.
|
|||
array_size_ = new_size; |
|||
array_ = new_array; |
|||
|
|||
if (old_array != NULL) { |
|||
// Iterate over all the elements in lists, rehashing them.
|
|||
for (int i = 0; i < old_size; ++i) { |
|||
if (old_array[i].value != NULL) { |
|||
int current = old_array[i].next; |
|||
while (current != kNil) { |
|||
Insert(lists_[current].value, zone); |
|||
int next = lists_[current].next; |
|||
lists_[current].next = free_list_head_; |
|||
free_list_head_ = current; |
|||
current = next; |
|||
} |
|||
// Rehash the directly stored value.
|
|||
Insert(old_array[i].value, zone); |
|||
} |
|||
} |
|||
} |
|||
USE(old_count); |
|||
ASSERT(count_ == old_count); |
|||
} |
|||
|
|||
|
|||
void HValueMap::ResizeLists(int new_size, Zone* zone) { |
|||
ASSERT(new_size > lists_size_); |
|||
|
|||
HValueMapListElement* new_lists = |
|||
zone->NewArray<HValueMapListElement>(new_size); |
|||
memset(new_lists, 0, sizeof(HValueMapListElement) * new_size); |
|||
|
|||
HValueMapListElement* old_lists = lists_; |
|||
int old_size = lists_size_; |
|||
|
|||
lists_size_ = new_size; |
|||
lists_ = new_lists; |
|||
|
|||
if (old_lists != NULL) { |
|||
OS::MemCopy(lists_, old_lists, old_size * sizeof(HValueMapListElement)); |
|||
} |
|||
for (int i = old_size; i < lists_size_; ++i) { |
|||
lists_[i].next = free_list_head_; |
|||
free_list_head_ = i; |
|||
} |
|||
} |
|||
|
|||
|
|||
void HValueMap::Insert(HValue* value, Zone* zone) { |
|||
ASSERT(value != NULL); |
|||
// Resizing when half of the hashtable is filled up.
|
|||
if (count_ >= array_size_ >> 1) Resize(array_size_ << 1, zone); |
|||
ASSERT(count_ < array_size_); |
|||
count_++; |
|||
uint32_t pos = Bound(static_cast<uint32_t>(value->Hashcode())); |
|||
if (array_[pos].value == NULL) { |
|||
array_[pos].value = value; |
|||
array_[pos].next = kNil; |
|||
} else { |
|||
if (free_list_head_ == kNil) { |
|||
ResizeLists(lists_size_ << 1, zone); |
|||
} |
|||
int new_element_pos = free_list_head_; |
|||
ASSERT(new_element_pos != kNil); |
|||
free_list_head_ = lists_[free_list_head_].next; |
|||
lists_[new_element_pos].value = value; |
|||
lists_[new_element_pos].next = array_[pos].next; |
|||
ASSERT(array_[pos].next == kNil || lists_[array_[pos].next].value != NULL); |
|||
array_[pos].next = new_element_pos; |
|||
} |
|||
} |
|||
|
|||
|
|||
HSideEffectMap::HSideEffectMap() : count_(0) { |
|||
memset(data_, 0, kNumberOfTrackedSideEffects * kPointerSize); |
|||
} |
|||
|
|||
|
|||
HSideEffectMap::HSideEffectMap(HSideEffectMap* other) : count_(other->count_) { |
|||
*this = *other; // Calls operator=.
|
|||
} |
|||
|
|||
|
|||
HSideEffectMap& HSideEffectMap::operator= (const HSideEffectMap& other) { |
|||
if (this != &other) { |
|||
OS::MemCopy(data_, other.data_, kNumberOfTrackedSideEffects * kPointerSize); |
|||
} |
|||
return *this; |
|||
} |
|||
|
|||
void HSideEffectMap::Kill(GVNFlagSet flags) { |
|||
for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { |
|||
GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); |
|||
if (flags.Contains(changes_flag)) { |
|||
if (data_[i] != NULL) count_--; |
|||
data_[i] = NULL; |
|||
} |
|||
} |
|||
} |
|||
|
|||
|
|||
void HSideEffectMap::Store(GVNFlagSet flags, HInstruction* instr) { |
|||
for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { |
|||
GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); |
|||
if (flags.Contains(changes_flag)) { |
|||
if (data_[i] == NULL) count_++; |
|||
data_[i] = instr; |
|||
} |
|||
} |
|||
} |
|||
|
|||
|
|||
HGlobalValueNumberer::HGlobalValueNumberer(HGraph* graph, CompilationInfo* info) |
|||
: graph_(graph), |
|||
info_(info), |
|||
removed_side_effects_(false), |
|||
block_side_effects_(graph->blocks()->length(), graph->zone()), |
|||
loop_side_effects_(graph->blocks()->length(), graph->zone()), |
|||
visited_on_paths_(graph->zone(), graph->blocks()->length()) { |
|||
ASSERT(!AllowHandleAllocation::IsAllowed()); |
|||
block_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length(), |
|||
graph_->zone()); |
|||
loop_side_effects_.AddBlock(GVNFlagSet(), graph_->blocks()->length(), |
|||
graph_->zone()); |
|||
} |
|||
|
|||
bool HGlobalValueNumberer::Analyze() { |
|||
removed_side_effects_ = false; |
|||
ComputeBlockSideEffects(); |
|||
if (FLAG_loop_invariant_code_motion) { |
|||
LoopInvariantCodeMotion(); |
|||
} |
|||
AnalyzeGraph(); |
|||
return removed_side_effects_; |
|||
} |
|||
|
|||
|
|||
void HGlobalValueNumberer::ComputeBlockSideEffects() { |
|||
// The Analyze phase of GVN can be called multiple times. Clear loop side
|
|||
// effects before computing them to erase the contents from previous Analyze
|
|||
// passes.
|
|||
for (int i = 0; i < loop_side_effects_.length(); ++i) { |
|||
loop_side_effects_[i].RemoveAll(); |
|||
} |
|||
for (int i = graph_->blocks()->length() - 1; i >= 0; --i) { |
|||
// Compute side effects for the block.
|
|||
HBasicBlock* block = graph_->blocks()->at(i); |
|||
HInstruction* instr = block->first(); |
|||
int id = block->block_id(); |
|||
GVNFlagSet side_effects; |
|||
while (instr != NULL) { |
|||
side_effects.Add(instr->ChangesFlags()); |
|||
if (instr->IsSoftDeoptimize()) { |
|||
block_side_effects_[id].RemoveAll(); |
|||
side_effects.RemoveAll(); |
|||
break; |
|||
} |
|||
instr = instr->next(); |
|||
} |
|||
block_side_effects_[id].Add(side_effects); |
|||
|
|||
// Loop headers are part of their loop.
|
|||
if (block->IsLoopHeader()) { |
|||
loop_side_effects_[id].Add(side_effects); |
|||
} |
|||
|
|||
// Propagate loop side effects upwards.
|
|||
if (block->HasParentLoopHeader()) { |
|||
int header_id = block->parent_loop_header()->block_id(); |
|||
loop_side_effects_[header_id].Add(block->IsLoopHeader() |
|||
? loop_side_effects_[id] |
|||
: side_effects); |
|||
} |
|||
} |
|||
} |
|||
|
|||
|
|||
SmartArrayPointer<char> GetGVNFlagsString(GVNFlagSet flags) { |
|||
char underlying_buffer[kLastFlag * 128]; |
|||
Vector<char> buffer(underlying_buffer, sizeof(underlying_buffer)); |
|||
#if DEBUG |
|||
int offset = 0; |
|||
const char* separator = ""; |
|||
const char* comma = ", "; |
|||
buffer[0] = 0; |
|||
uint32_t set_depends_on = 0; |
|||
uint32_t set_changes = 0; |
|||
for (int bit = 0; bit < kLastFlag; ++bit) { |
|||
if ((flags.ToIntegral() & (1 << bit)) != 0) { |
|||
if (bit % 2 == 0) { |
|||
set_changes++; |
|||
} else { |
|||
set_depends_on++; |
|||
} |
|||
} |
|||
} |
|||
bool positive_changes = set_changes < (kLastFlag / 2); |
|||
bool positive_depends_on = set_depends_on < (kLastFlag / 2); |
|||
if (set_changes > 0) { |
|||
if (positive_changes) { |
|||
offset += OS::SNPrintF(buffer + offset, "changes ["); |
|||
} else { |
|||
offset += OS::SNPrintF(buffer + offset, "changes all except ["); |
|||
} |
|||
for (int bit = 0; bit < kLastFlag; ++bit) { |
|||
if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_changes) { |
|||
switch (static_cast<GVNFlag>(bit)) { |
|||
#define DECLARE_FLAG(type) \ |
|||
case kChanges##type: \ |
|||
offset += OS::SNPrintF(buffer + offset, separator); \ |
|||
offset += OS::SNPrintF(buffer + offset, #type); \ |
|||
separator = comma; \ |
|||
break; |
|||
GVN_TRACKED_FLAG_LIST(DECLARE_FLAG) |
|||
GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG) |
|||
#undef DECLARE_FLAG |
|||
default: |
|||
break; |
|||
} |
|||
} |
|||
} |
|||
offset += OS::SNPrintF(buffer + offset, "]"); |
|||
} |
|||
if (set_depends_on > 0) { |
|||
separator = ""; |
|||
if (set_changes > 0) { |
|||
offset += OS::SNPrintF(buffer + offset, ", "); |
|||
} |
|||
if (positive_depends_on) { |
|||
offset += OS::SNPrintF(buffer + offset, "depends on ["); |
|||
} else { |
|||
offset += OS::SNPrintF(buffer + offset, "depends on all except ["); |
|||
} |
|||
for (int bit = 0; bit < kLastFlag; ++bit) { |
|||
if (((flags.ToIntegral() & (1 << bit)) != 0) == positive_depends_on) { |
|||
switch (static_cast<GVNFlag>(bit)) { |
|||
#define DECLARE_FLAG(type) \ |
|||
case kDependsOn##type: \ |
|||
offset += OS::SNPrintF(buffer + offset, separator); \ |
|||
offset += OS::SNPrintF(buffer + offset, #type); \ |
|||
separator = comma; \ |
|||
break; |
|||
GVN_TRACKED_FLAG_LIST(DECLARE_FLAG) |
|||
GVN_UNTRACKED_FLAG_LIST(DECLARE_FLAG) |
|||
#undef DECLARE_FLAG |
|||
default: |
|||
break; |
|||
} |
|||
} |
|||
} |
|||
offset += OS::SNPrintF(buffer + offset, "]"); |
|||
} |
|||
#else |
|||
OS::SNPrintF(buffer, "0x%08X", flags.ToIntegral()); |
|||
#endif |
|||
size_t string_len = strlen(underlying_buffer) + 1; |
|||
ASSERT(string_len <= sizeof(underlying_buffer)); |
|||
char* result = new char[strlen(underlying_buffer) + 1]; |
|||
OS::MemCopy(result, underlying_buffer, string_len); |
|||
return SmartArrayPointer<char>(result); |
|||
} |
|||
|
|||
|
|||
void HGlobalValueNumberer::LoopInvariantCodeMotion() { |
|||
TRACE_GVN_1("Using optimistic loop invariant code motion: %s\n", |
|||
graph_->use_optimistic_licm() ? "yes" : "no"); |
|||
for (int i = graph_->blocks()->length() - 1; i >= 0; --i) { |
|||
HBasicBlock* block = graph_->blocks()->at(i); |
|||
if (block->IsLoopHeader()) { |
|||
GVNFlagSet side_effects = loop_side_effects_[block->block_id()]; |
|||
TRACE_GVN_2("Try loop invariant motion for block B%d %s\n", |
|||
block->block_id(), |
|||
*GetGVNFlagsString(side_effects)); |
|||
|
|||
GVNFlagSet accumulated_first_time_depends; |
|||
GVNFlagSet accumulated_first_time_changes; |
|||
HBasicBlock* last = block->loop_information()->GetLastBackEdge(); |
|||
for (int j = block->block_id(); j <= last->block_id(); ++j) { |
|||
ProcessLoopBlock(graph_->blocks()->at(j), block, side_effects, |
|||
&accumulated_first_time_depends, |
|||
&accumulated_first_time_changes); |
|||
} |
|||
} |
|||
} |
|||
} |
|||
|
|||
|
|||
void HGlobalValueNumberer::ProcessLoopBlock( |
|||
HBasicBlock* block, |
|||
HBasicBlock* loop_header, |
|||
GVNFlagSet loop_kills, |
|||
GVNFlagSet* first_time_depends, |
|||
GVNFlagSet* first_time_changes) { |
|||
HBasicBlock* pre_header = loop_header->predecessors()->at(0); |
|||
GVNFlagSet depends_flags = HValue::ConvertChangesToDependsFlags(loop_kills); |
|||
TRACE_GVN_2("Loop invariant motion for B%d %s\n", |
|||
block->block_id(), |
|||
*GetGVNFlagsString(depends_flags)); |
|||
HInstruction* instr = block->first(); |
|||
while (instr != NULL) { |
|||
HInstruction* next = instr->next(); |
|||
bool hoisted = false; |
|||
if (instr->CheckFlag(HValue::kUseGVN)) { |
|||
TRACE_GVN_4("Checking instruction %d (%s) %s. Loop %s\n", |
|||
instr->id(), |
|||
instr->Mnemonic(), |
|||
*GetGVNFlagsString(instr->gvn_flags()), |
|||
*GetGVNFlagsString(loop_kills)); |
|||
bool can_hoist = !instr->gvn_flags().ContainsAnyOf(depends_flags); |
|||
if (can_hoist && !graph()->use_optimistic_licm()) { |
|||
can_hoist = block->IsLoopSuccessorDominator(); |
|||
} |
|||
|
|||
if (can_hoist) { |
|||
bool inputs_loop_invariant = true; |
|||
for (int i = 0; i < instr->OperandCount(); ++i) { |
|||
if (instr->OperandAt(i)->IsDefinedAfter(pre_header)) { |
|||
inputs_loop_invariant = false; |
|||
} |
|||
} |
|||
|
|||
if (inputs_loop_invariant && ShouldMove(instr, loop_header)) { |
|||
TRACE_GVN_1("Hoisting loop invariant instruction %d\n", instr->id()); |
|||
// Move the instruction out of the loop.
|
|||
instr->Unlink(); |
|||
instr->InsertBefore(pre_header->end()); |
|||
if (instr->HasSideEffects()) removed_side_effects_ = true; |
|||
hoisted = true; |
|||
} |
|||
} |
|||
} |
|||
if (!hoisted) { |
|||
// If an instruction is not hoisted, we have to account for its side
|
|||
// effects when hoisting later HTransitionElementsKind instructions.
|
|||
GVNFlagSet previous_depends = *first_time_depends; |
|||
GVNFlagSet previous_changes = *first_time_changes; |
|||
first_time_depends->Add(instr->DependsOnFlags()); |
|||
first_time_changes->Add(instr->ChangesFlags()); |
|||
if (!(previous_depends == *first_time_depends)) { |
|||
TRACE_GVN_1("Updated first-time accumulated %s\n", |
|||
*GetGVNFlagsString(*first_time_depends)); |
|||
} |
|||
if (!(previous_changes == *first_time_changes)) { |
|||
TRACE_GVN_1("Updated first-time accumulated %s\n", |
|||
*GetGVNFlagsString(*first_time_changes)); |
|||
} |
|||
} |
|||
instr = next; |
|||
} |
|||
} |
|||
|
|||
|
|||
bool HGlobalValueNumberer::AllowCodeMotion() { |
|||
return info()->IsStub() || info()->opt_count() + 1 < FLAG_max_opt_count; |
|||
} |
|||
|
|||
|
|||
bool HGlobalValueNumberer::ShouldMove(HInstruction* instr, |
|||
HBasicBlock* loop_header) { |
|||
// If we've disabled code motion or we're in a block that unconditionally
|
|||
// deoptimizes, don't move any instructions.
|
|||
return AllowCodeMotion() && !instr->block()->IsDeoptimizing(); |
|||
} |
|||
|
|||
|
|||
GVNFlagSet HGlobalValueNumberer::CollectSideEffectsOnPathsToDominatedBlock( |
|||
HBasicBlock* dominator, HBasicBlock* dominated) { |
|||
GVNFlagSet side_effects; |
|||
for (int i = 0; i < dominated->predecessors()->length(); ++i) { |
|||
HBasicBlock* block = dominated->predecessors()->at(i); |
|||
if (dominator->block_id() < block->block_id() && |
|||
block->block_id() < dominated->block_id() && |
|||
visited_on_paths_.Add(block->block_id())) { |
|||
side_effects.Add(block_side_effects_[block->block_id()]); |
|||
if (block->IsLoopHeader()) { |
|||
side_effects.Add(loop_side_effects_[block->block_id()]); |
|||
} |
|||
side_effects.Add(CollectSideEffectsOnPathsToDominatedBlock( |
|||
dominator, block)); |
|||
} |
|||
} |
|||
return side_effects; |
|||
} |
|||
|
|||
|
|||
// Each instance of this class is like a "stack frame" for the recursive
|
|||
// traversal of the dominator tree done during GVN (the stack is handled
|
|||
// as a double linked list).
|
|||
// We reuse frames when possible so the list length is limited by the depth
|
|||
// of the dominator tree but this forces us to initialize each frame calling
|
|||
// an explicit "Initialize" method instead of a using constructor.
|
|||
class GvnBasicBlockState: public ZoneObject { |
|||
public: |
|||
static GvnBasicBlockState* CreateEntry(Zone* zone, |
|||
HBasicBlock* entry_block, |
|||
HValueMap* entry_map) { |
|||
return new(zone) |
|||
GvnBasicBlockState(NULL, entry_block, entry_map, NULL, zone); |
|||
} |
|||
|
|||
HBasicBlock* block() { return block_; } |
|||
HValueMap* map() { return map_; } |
|||
HSideEffectMap* dominators() { return &dominators_; } |
|||
|
|||
GvnBasicBlockState* next_in_dominator_tree_traversal( |
|||
Zone* zone, |
|||
HBasicBlock** dominator) { |
|||
// This assignment needs to happen before calling next_dominated() because
|
|||
// that call can reuse "this" if we are at the last dominated block.
|
|||
*dominator = block(); |
|||
GvnBasicBlockState* result = next_dominated(zone); |
|||
if (result == NULL) { |
|||
GvnBasicBlockState* dominator_state = pop(); |
|||
if (dominator_state != NULL) { |
|||
// This branch is guaranteed not to return NULL because pop() never
|
|||
// returns a state where "is_done() == true".
|
|||
*dominator = dominator_state->block(); |
|||
result = dominator_state->next_dominated(zone); |
|||
} else { |
|||
// Unnecessary (we are returning NULL) but done for cleanness.
|
|||
*dominator = NULL; |
|||
} |
|||
} |
|||
return result; |
|||
} |
|||
|
|||
private: |
|||
void Initialize(HBasicBlock* block, |
|||
HValueMap* map, |
|||
HSideEffectMap* dominators, |
|||
bool copy_map, |
|||
Zone* zone) { |
|||
block_ = block; |
|||
map_ = copy_map ? map->Copy(zone) : map; |
|||
dominated_index_ = -1; |
|||
length_ = block->dominated_blocks()->length(); |
|||
if (dominators != NULL) { |
|||
dominators_ = *dominators; |
|||
} |
|||
} |
|||
bool is_done() { return dominated_index_ >= length_; } |
|||
|
|||
GvnBasicBlockState(GvnBasicBlockState* previous, |
|||
HBasicBlock* block, |
|||
HValueMap* map, |
|||
HSideEffectMap* dominators, |
|||
Zone* zone) |
|||
: previous_(previous), next_(NULL) { |
|||
Initialize(block, map, dominators, true, zone); |
|||
} |
|||
|
|||
GvnBasicBlockState* next_dominated(Zone* zone) { |
|||
dominated_index_++; |
|||
if (dominated_index_ == length_ - 1) { |
|||
// No need to copy the map for the last child in the dominator tree.
|
|||
Initialize(block_->dominated_blocks()->at(dominated_index_), |
|||
map(), |
|||
dominators(), |
|||
false, |
|||
zone); |
|||
return this; |
|||
} else if (dominated_index_ < length_) { |
|||
return push(zone, |
|||
block_->dominated_blocks()->at(dominated_index_), |
|||
dominators()); |
|||
} else { |
|||
return NULL; |
|||
} |
|||
} |
|||
|
|||
GvnBasicBlockState* push(Zone* zone, |
|||
HBasicBlock* block, |
|||
HSideEffectMap* dominators) { |
|||
if (next_ == NULL) { |
|||
next_ = |
|||
new(zone) GvnBasicBlockState(this, block, map(), dominators, zone); |
|||
} else { |
|||
next_->Initialize(block, map(), dominators, true, zone); |
|||
} |
|||
return next_; |
|||
} |
|||
GvnBasicBlockState* pop() { |
|||
GvnBasicBlockState* result = previous_; |
|||
while (result != NULL && result->is_done()) { |
|||
TRACE_GVN_2("Backtracking from block B%d to block b%d\n", |
|||
block()->block_id(), |
|||
previous_->block()->block_id()) |
|||
result = result->previous_; |
|||
} |
|||
return result; |
|||
} |
|||
|
|||
GvnBasicBlockState* previous_; |
|||
GvnBasicBlockState* next_; |
|||
HBasicBlock* block_; |
|||
HValueMap* map_; |
|||
HSideEffectMap dominators_; |
|||
int dominated_index_; |
|||
int length_; |
|||
}; |
|||
|
|||
// This is a recursive traversal of the dominator tree but it has been turned
|
|||
// into a loop to avoid stack overflows.
|
|||
// The logical "stack frames" of the recursion are kept in a list of
|
|||
// GvnBasicBlockState instances.
|
|||
void HGlobalValueNumberer::AnalyzeGraph() { |
|||
HBasicBlock* entry_block = graph_->entry_block(); |
|||
HValueMap* entry_map = new(zone()) HValueMap(zone()); |
|||
GvnBasicBlockState* current = |
|||
GvnBasicBlockState::CreateEntry(zone(), entry_block, entry_map); |
|||
|
|||
while (current != NULL) { |
|||
HBasicBlock* block = current->block(); |
|||
HValueMap* map = current->map(); |
|||
HSideEffectMap* dominators = current->dominators(); |
|||
|
|||
TRACE_GVN_2("Analyzing block B%d%s\n", |
|||
block->block_id(), |
|||
block->IsLoopHeader() ? " (loop header)" : ""); |
|||
|
|||
// If this is a loop header kill everything killed by the loop.
|
|||
if (block->IsLoopHeader()) { |
|||
map->Kill(loop_side_effects_[block->block_id()]); |
|||
} |
|||
|
|||
// Go through all instructions of the current block.
|
|||
HInstruction* instr = block->first(); |
|||
while (instr != NULL) { |
|||
HInstruction* next = instr->next(); |
|||
GVNFlagSet flags = instr->ChangesFlags(); |
|||
if (!flags.IsEmpty()) { |
|||
// Clear all instructions in the map that are affected by side effects.
|
|||
// Store instruction as the dominating one for tracked side effects.
|
|||
map->Kill(flags); |
|||
dominators->Store(flags, instr); |
|||
TRACE_GVN_2("Instruction %d %s\n", instr->id(), |
|||
*GetGVNFlagsString(flags)); |
|||
} |
|||
if (instr->CheckFlag(HValue::kUseGVN)) { |
|||
ASSERT(!instr->HasObservableSideEffects()); |
|||
HValue* other = map->Lookup(instr); |
|||
if (other != NULL) { |
|||
ASSERT(instr->Equals(other) && other->Equals(instr)); |
|||
TRACE_GVN_4("Replacing value %d (%s) with value %d (%s)\n", |
|||
instr->id(), |
|||
instr->Mnemonic(), |
|||
other->id(), |
|||
other->Mnemonic()); |
|||
if (instr->HasSideEffects()) removed_side_effects_ = true; |
|||
instr->DeleteAndReplaceWith(other); |
|||
} else { |
|||
map->Add(instr, zone()); |
|||
} |
|||
} |
|||
if (instr->IsLinked() && |
|||
instr->CheckFlag(HValue::kTrackSideEffectDominators)) { |
|||
for (int i = 0; i < kNumberOfTrackedSideEffects; i++) { |
|||
HValue* other = dominators->at(i); |
|||
GVNFlag changes_flag = HValue::ChangesFlagFromInt(i); |
|||
GVNFlag depends_on_flag = HValue::DependsOnFlagFromInt(i); |
|||
if (instr->DependsOnFlags().Contains(depends_on_flag) && |
|||
(other != NULL)) { |
|||
TRACE_GVN_5("Side-effect #%d in %d (%s) is dominated by %d (%s)\n", |
|||
i, |
|||
instr->id(), |
|||
instr->Mnemonic(), |
|||
other->id(), |
|||
other->Mnemonic()); |
|||
instr->SetSideEffectDominator(changes_flag, other); |
|||
} |
|||
} |
|||
} |
|||
instr = next; |
|||
} |
|||
|
|||
HBasicBlock* dominator_block; |
|||
GvnBasicBlockState* next = |
|||
current->next_in_dominator_tree_traversal(zone(), &dominator_block); |
|||
|
|||
if (next != NULL) { |
|||
HBasicBlock* dominated = next->block(); |
|||
HValueMap* successor_map = next->map(); |
|||
HSideEffectMap* successor_dominators = next->dominators(); |
|||
|
|||
// Kill everything killed on any path between this block and the
|
|||
// dominated block. We don't have to traverse these paths if the
|
|||
// value map and the dominators list is already empty. If the range
|
|||
// of block ids (block_id, dominated_id) is empty there are no such
|
|||
// paths.
|
|||
if ((!successor_map->IsEmpty() || !successor_dominators->IsEmpty()) && |
|||
dominator_block->block_id() + 1 < dominated->block_id()) { |
|||
visited_on_paths_.Clear(); |
|||
GVNFlagSet side_effects_on_all_paths = |
|||
CollectSideEffectsOnPathsToDominatedBlock(dominator_block, |
|||
dominated); |
|||
successor_map->Kill(side_effects_on_all_paths); |
|||
successor_dominators->Kill(side_effects_on_all_paths); |
|||
} |
|||
} |
|||
current = next; |
|||
} |
|||
} |
|||
|
|||
} } // namespace v8::internal
|
@ -0,0 +1,123 @@ |
|||
// Copyright 2013 the V8 project authors. All rights reserved.
|
|||
// Redistribution and use in source and binary forms, with or without
|
|||
// modification, are permitted provided that the following conditions are
|
|||
// met:
|
|||
//
|
|||
// * Redistributions of source code must retain the above copyright
|
|||
// notice, this list of conditions and the following disclaimer.
|
|||
// * Redistributions in binary form must reproduce the above
|
|||
// copyright notice, this list of conditions and the following
|
|||
// disclaimer in the documentation and/or other materials provided
|
|||
// with the distribution.
|
|||
// * Neither the name of Google Inc. nor the names of its
|
|||
// contributors may be used to endorse or promote products derived
|
|||
// from this software without specific prior written permission.
|
|||
//
|
|||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|||
|
|||
#ifndef V8_HYDROGEN_GVN_H_ |
|||
#define V8_HYDROGEN_GVN_H_ |
|||
|
|||
#include "hydrogen.h" |
|||
#include "hydrogen-instructions.h" |
|||
#include "compiler.h" |
|||
#include "zone.h" |
|||
|
|||
namespace v8 { |
|||
namespace internal { |
|||
|
|||
// Simple sparse set with O(1) add, contains, and clear.
|
|||
class SparseSet { |
|||
public: |
|||
SparseSet(Zone* zone, int capacity) |
|||
: capacity_(capacity), |
|||
length_(0), |
|||
dense_(zone->NewArray<int>(capacity)), |
|||
sparse_(zone->NewArray<int>(capacity)) { |
|||
#ifndef NVALGRIND |
|||
// Initialize the sparse array to make valgrind happy.
|
|||
memset(sparse_, 0, sizeof(sparse_[0]) * capacity); |
|||
#endif |
|||
} |
|||
|
|||
bool Contains(int n) const { |
|||
ASSERT(0 <= n && n < capacity_); |
|||
int d = sparse_[n]; |
|||
return 0 <= d && d < length_ && dense_[d] == n; |
|||
} |
|||
|
|||
bool Add(int n) { |
|||
if (Contains(n)) return false; |
|||
dense_[length_] = n; |
|||
sparse_[n] = length_; |
|||
++length_; |
|||
return true; |
|||
} |
|||
|
|||
void Clear() { length_ = 0; } |
|||
|
|||
private: |
|||
int capacity_; |
|||
int length_; |
|||
int* dense_; |
|||
int* sparse_; |
|||
|
|||
DISALLOW_COPY_AND_ASSIGN(SparseSet); |
|||
}; |
|||
|
|||
|
|||
class HGlobalValueNumberer BASE_EMBEDDED { |
|||
public: |
|||
HGlobalValueNumberer(HGraph* graph, CompilationInfo* info); |
|||
|
|||
// Returns true if values with side effects are removed.
|
|||
bool Analyze(); |
|||
|
|||
private: |
|||
GVNFlagSet CollectSideEffectsOnPathsToDominatedBlock( |
|||
HBasicBlock* dominator, |
|||
HBasicBlock* dominated); |
|||
void AnalyzeGraph(); |
|||
void ComputeBlockSideEffects(); |
|||
void LoopInvariantCodeMotion(); |
|||
void ProcessLoopBlock(HBasicBlock* block, |
|||
HBasicBlock* before_loop, |
|||
GVNFlagSet loop_kills, |
|||
GVNFlagSet* accumulated_first_time_depends, |
|||
GVNFlagSet* accumulated_first_time_changes); |
|||
bool AllowCodeMotion(); |
|||
bool ShouldMove(HInstruction* instr, HBasicBlock* loop_header); |
|||
|
|||
HGraph* graph() { return graph_; } |
|||
CompilationInfo* info() { return info_; } |
|||
Zone* zone() const { return graph_->zone(); } |
|||
|
|||
HGraph* graph_; |
|||
CompilationInfo* info_; |
|||
bool removed_side_effects_; |
|||
|
|||
// A map of block IDs to their side effects.
|
|||
ZoneList<GVNFlagSet> block_side_effects_; |
|||
|
|||
// A map of loop header block IDs to their loop's side effects.
|
|||
ZoneList<GVNFlagSet> loop_side_effects_; |
|||
|
|||
// Used when collecting side effects on paths from dominator to
|
|||
// dominated.
|
|||
SparseSet visited_on_paths_; |
|||
}; |
|||
|
|||
|
|||
} } // namespace v8::internal
|
|||
|
|||
#endif // V8_HYDROGEN_GVN_H_
|
File diff suppressed because it is too large
File diff suppressed because it is too large
Some files were not shown because too many files changed in this diff
Loading…
Reference in new issue