Attention is currently required from: Vyacheslav Egorov.
Martin Kustermann would like Vyacheslav Egorov to review this change.
[vm/compiler] Assign a deopt id to StoreInstanceField iff the store can cause box allocation
If a StoreInstanceField can cause a box allocation, it can go to runtime
which can cause lazy-deopt.
Issue https://github.com/dart-lang/sdk/issues/45213
TEST=Existing test suite.
Change-Id: I30342f660d731e46dbebffe3e9111028023aaceb
---
M runtime/vm/compiler/backend/il.cc
M runtime/vm/compiler/backend/il.h
M runtime/vm/compiler/backend/inliner.cc
M runtime/vm/compiler/backend/redundancy_elimination_test.cc
M runtime/vm/compiler/backend/slot.cc
M runtime/vm/compiler/backend/slot.h
M runtime/vm/compiler/call_specializer.cc
M runtime/vm/compiler/frontend/base_flow_graph_builder.cc
M runtime/vm/compiler/graph_intrinsifier.cc
M runtime/vm/compiler/jit/jit_call_specializer.cc
10 files changed, 63 insertions(+), 49 deletions(-)
diff --git a/runtime/vm/compiler/backend/il.cc b/runtime/vm/compiler/backend/il.cc
index c155d06..e38c615 100644
--- a/runtime/vm/compiler/backend/il.cc
+++ b/runtime/vm/compiler/backend/il.cc
@@ -1028,8 +1028,9 @@
}
bool StoreInstanceFieldInstr::IsPotentialUnboxedStore() const {
- return slot().IsDartField() &&
- FlowGraphCompiler::IsPotentialUnboxedField(slot().field());
+ if (!slot().IsDartField()) return false;
+ if (!slot().can_be_unboxed_field()) return false;
+ return FlowGraphCompiler::IsPotentialUnboxedField(slot().field());
}
Representation StoreInstanceFieldInstr::RequiredInputRepresentation(
diff --git a/runtime/vm/compiler/backend/il.h b/runtime/vm/compiler/backend/il.h
index 3cf8ffc..5aade32 100644
--- a/runtime/vm/compiler/backend/il.h
+++ b/runtime/vm/compiler/backend/il.h
@@ -5288,31 +5288,20 @@
Value* value,
StoreBarrierType emit_store_barrier,
const InstructionSource& source,
+ intptr_t deopt_id,
Kind kind = Kind::kOther)
- : TemplateInstruction(source),
+ : TemplateInstruction(source, deopt_id),
slot_(slot),
emit_store_barrier_(emit_store_barrier),
token_pos_(source.token_pos),
is_initialization_(kind == Kind::kInitializing) {
+ // We need to have a deopt-id assigned to the store iff the store can cause
+ // a box allocation.
+ ASSERT(!slot.can_be_unboxed_field() || (deopt_id != DeoptId::kNone));
SetInputAt(kInstancePos, instance);
SetInputAt(kValuePos, value);
}
- // Convenience constructor that looks up an IL Slot for the given [field].
- StoreInstanceFieldInstr(const Field& field,
- Value* instance,
- Value* value,
- StoreBarrierType emit_store_barrier,
- const InstructionSource& source,
- const ParsedFunction* parsed_function,
- Kind kind = Kind::kOther)
- : StoreInstanceFieldInstr(Slot::Get(field, parsed_function),
- instance,
- value,
- emit_store_barrier,
- source,
- kind) {}
-
virtual SpeculativeMode SpeculativeModeOfInput(intptr_t index) const {
// In AOT unbox is done based on TFA, therefore it was proven to be correct
// and it can never deoptmize.
diff --git a/runtime/vm/compiler/backend/inliner.cc b/runtime/vm/compiler/backend/inliner.cc
index 95ba032..0fdec96 100644
--- a/runtime/vm/compiler/backend/inliner.cc
+++ b/runtime/vm/compiler/backend/inliner.cc
@@ -2779,9 +2779,9 @@
(*entry)->InheritDeoptTarget(Z, call);
// This is an internal method, no need to check argument types.
- StoreInstanceFieldInstr* store = new (Z)
- StoreInstanceFieldInstr(field, new (Z) Value(array), new (Z) Value(value),
- store_barrier_type, call->source());
+ StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr(
+ field, new (Z) Value(array), new (Z) Value(value), store_barrier_type,
+ call->source(), DeoptId::kNone);
flow_graph->AppendTo(*entry, store, call->env(), FlowGraph::kEffect);
*last = store;
// We need a return value to replace uses of the original definition. However,
diff --git a/runtime/vm/compiler/backend/redundancy_elimination_test.cc b/runtime/vm/compiler/backend/redundancy_elimination_test.cc
index 65b6b80..f6b74ed 100644
--- a/runtime/vm/compiler/backend/redundancy_elimination_test.cc
+++ b/runtime/vm/compiler/backend/redundancy_elimination_test.cc
@@ -439,9 +439,9 @@
v5 = builder.AddDefinition(
new AllocateObjectInstr(InstructionSource(), cls));
if (!make_host_escape) {
- builder.AddInstruction(
- new StoreInstanceFieldInstr(slot, new Value(v5), new Value(v0),
- kEmitStoreBarrier, InstructionSource()));
+ builder.AddInstruction(new StoreInstanceFieldInstr(
+ slot, new Value(v5), new Value(v0), kEmitStoreBarrier,
+ InstructionSource(), S.GetNextDeoptId()));
}
v1 = builder.AddDefinition(
new LoadFieldInstr(new Value(v0), slot, InstructionSource()));
@@ -453,9 +453,9 @@
new LoadFieldInstr(new Value(v2), slot, InstructionSource()));
args->Add(new Value(v6));
} else if (make_host_escape) {
- builder.AddInstruction(
- new StoreInstanceFieldInstr(slot, new Value(v2), new Value(v0),
- kEmitStoreBarrier, InstructionSource()));
+ builder.AddInstruction(new StoreInstanceFieldInstr(
+ slot, new Value(v2), new Value(v0), kEmitStoreBarrier,
+ InstructionSource(), S.GetNextDeoptId()));
args->Add(new Value(v5));
}
call = builder.AddInstruction(new StaticCallInstr(
diff --git a/runtime/vm/compiler/backend/slot.cc b/runtime/vm/compiler/backend/slot.cc
index 5b814c5..6b3c358c 100644
--- a/runtime/vm/compiler/backend/slot.cc
+++ b/runtime/vm/compiler/backend/slot.cc
@@ -8,6 +8,7 @@
#include "vm/compiler/backend/il.h"
#include "vm/compiler/compiler_state.h"
#include "vm/hash_map.h"
+#include "vm/object_store.h"
#include "vm/parser.h"
#include "vm/scopes.h"
@@ -246,6 +247,7 @@
const ParsedFunction* parsed_function) {
Thread* thread = Thread::Current();
Zone* zone = thread->zone();
+ auto object_store = thread->isolate_group()->object_store();
Representation rep = kTagged;
intptr_t nullable_cid = kDynamicCid;
bool is_nullable = true;
@@ -296,12 +298,24 @@
}
}
+ const auto& double_type =
+ AbstractType::Handle(zone, object_store->double_type());
+ const auto& float32x4_type =
+ AbstractType::Handle(zone, object_store->float32x4_type());
+ const auto& float64x2_type =
+ AbstractType::Handle(zone, object_store->float64x2_type());
+ const bool can_be_unboxed_field =
+ double_type.IsSubtypeOf(type, Heap::kNew) ||
+ float32x4_type.IsSubtypeOf(type, Heap::kNew) ||
+ float64x2_type.IsSubtypeOf(type, Heap::kNew);
+
const Slot& slot = SlotCache::Instance(thread).Canonicalize(
Slot(Kind::kDartField,
IsImmutableBit::encode((field.is_final() && !field.is_late()) ||
field.is_const()) |
IsNullableBit::encode(is_nullable) |
- IsGuardedBit::encode(used_guarded_state),
+ IsGuardedBit::encode(used_guarded_state) |
+ CanBeUnboxedFieldBit::encode(can_be_unboxed_field),
nullable_cid, compiler::target::Field::OffsetOf(field), &field,
&type, rep));
diff --git a/runtime/vm/compiler/backend/slot.h b/runtime/vm/compiler/backend/slot.h
index 3a2a1cd..90a238b 100644
--- a/runtime/vm/compiler/backend/slot.h
+++ b/runtime/vm/compiler/backend/slot.h
@@ -240,6 +240,12 @@
// of the corresponding Dart field.
bool is_guarded_field() const { return IsGuardedBit::decode(flags_); }
+ // Returns true if this slot refers to a dart field which could become unboxed
+ // (based on static type of the field).
+ bool can_be_unboxed_field() const {
+ return CanBeUnboxedFieldBit::decode(flags_);
+ }
+
// Static type of the slots if any.
//
// A value that is read from the slot is guaranteed to be assignable to its
@@ -294,6 +300,8 @@
using IsImmutableBit = BitField<int8_t, bool, 0, 1>;
using IsNullableBit = BitField<int8_t, bool, IsImmutableBit::kNextBit, 1>;
using IsGuardedBit = BitField<int8_t, bool, IsNullableBit::kNextBit, 1>;
+ using CanBeUnboxedFieldBit =
+ BitField<int8_t, bool, IsGuardedBit ::kNextBit, 1>;
template <typename T>
const T* DataAs() const {
diff --git a/runtime/vm/compiler/call_specializer.cc b/runtime/vm/compiler/call_specializer.cc
index 4791955..7bed2d3 100644
--- a/runtime/vm/compiler/call_specializer.cc
+++ b/runtime/vm/compiler/call_specializer.cc
@@ -924,10 +924,12 @@
// Field guard was detached.
ASSERT(instr->FirstArgIndex() == 0);
+
+ const auto& slot = Slot::Get(field, &flow_graph()->parsed_function());
StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr(
- field, new (Z) Value(instr->ArgumentAt(0)),
+ slot, new (Z) Value(instr->ArgumentAt(0)),
new (Z) Value(instr->ArgumentAt(1)), kEmitStoreBarrier, instr->source(),
- &flow_graph()->parsed_function());
+ instr->deopt_id());
// Discard the environment from the original instruction because the store
// can't deoptimize.
diff --git a/runtime/vm/compiler/frontend/base_flow_graph_builder.cc b/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
index a106f1e..3fbe48c 100644
--- a/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
+++ b/runtime/vm/compiler/frontend/base_flow_graph_builder.cc
@@ -509,7 +509,7 @@
Fragment BaseFlowGraphBuilder::StoreInstanceField(
TokenPosition position,
- const Slot& field,
+ const Slot& slot,
StoreInstanceFieldInstr::Kind
kind /* = StoreInstanceFieldInstr::Kind::kOther */,
StoreBarrierType emit_store_barrier /* = kEmitStoreBarrier */) {
@@ -517,9 +517,14 @@
if (value->BindsToConstant()) {
emit_store_barrier = kNoStoreBarrier;
}
- StoreInstanceFieldInstr* store =
- new (Z) StoreInstanceFieldInstr(field, Pop(), value, emit_store_barrier,
- InstructionSource(position), kind);
+
+ // A StoreInstanceField can deopt iff it can result in a box allocation.
+ const intptr_t deopt_id =
+ slot.can_be_unboxed_field() ? GetNextDeoptId() : DeoptId::kNone;
+
+ auto store = new (Z)
+ StoreInstanceFieldInstr(slot, Pop(), value, emit_store_barrier,
+ InstructionSource(position), deopt_id, kind);
return Fragment(store);
}
@@ -528,16 +533,10 @@
StoreInstanceFieldInstr::Kind
kind /* = StoreInstanceFieldInstr::Kind::kOther */,
StoreBarrierType emit_store_barrier) {
- Value* value = Pop();
- if (value->BindsToConstant()) {
- emit_store_barrier = kNoStoreBarrier;
- }
-
- StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr(
- MayCloneField(Z, field), Pop(), value, emit_store_barrier,
- InstructionSource(), parsed_function_, kind);
-
- return Fragment(store);
+ const auto& cloned_field = MayCloneField(Z, field);
+ const auto& slot = Slot::Get(cloned_field, parsed_function_);
+ return StoreInstanceField(TokenPosition::kNoSource, slot, kind,
+ emit_store_barrier);
}
Fragment BaseFlowGraphBuilder::StoreInstanceFieldGuarded(
diff --git a/runtime/vm/compiler/graph_intrinsifier.cc b/runtime/vm/compiler/graph_intrinsifier.cc
index 61cf909..c8ac058 100644
--- a/runtime/vm/compiler/graph_intrinsifier.cc
+++ b/runtime/vm/compiler/graph_intrinsifier.cc
@@ -892,7 +892,7 @@
builder.AddInstruction(new StoreInstanceFieldInstr(
Slot::GrowableObjectArray_data(), new Value(growable_array),
- new Value(data), kEmitStoreBarrier, builder.Source()));
+ new Value(data), kEmitStoreBarrier, builder.Source(), DeoptId::kNone));
// Return null.
Definition* null_def = builder.AddNullDefinition();
builder.AddReturn(new Value(null_def));
@@ -914,7 +914,7 @@
new CheckSmiInstr(new Value(length), DeoptId::kNone, builder.Source()));
builder.AddInstruction(new StoreInstanceFieldInstr(
Slot::GrowableObjectArray_length(), new Value(growable_array),
- new Value(length), kNoStoreBarrier, builder.Source()));
+ new Value(length), kNoStoreBarrier, builder.Source(), DeoptId::kNone));
Definition* null_def = builder.AddNullDefinition();
builder.AddReturn(new Value(null_def));
return true;
@@ -1220,7 +1220,7 @@
builder.AddInstruction(new (zone) StoreInstanceFieldInstr(
slot, new (zone) Value(receiver), new (zone) Value(value), barrier_mode,
- builder.Source()));
+ builder.Source(), normal_entry->deopt_id()));
builder.AddReturn(new (zone) Value(flow_graph->constant_null()));
return true;
diff --git a/runtime/vm/compiler/jit/jit_call_specializer.cc b/runtime/vm/compiler/jit/jit_call_specializer.cc
index 40bf22e..5492c63 100644
--- a/runtime/vm/compiler/jit/jit_call_specializer.cc
+++ b/runtime/vm/compiler/jit/jit_call_specializer.cc
@@ -243,7 +243,7 @@
}
StoreInstanceFieldInstr* store = new (Z) StoreInstanceFieldInstr(
Slot::Context_parent(), new (Z) Value(replacement), initial_value,
- kNoStoreBarrier, alloc->source(),
+ kNoStoreBarrier, alloc->source(), DeoptId::kNone,
StoreInstanceFieldInstr::Kind::kInitializing);
flow_graph()->InsertAfter(cursor, store, nullptr, FlowGraph::kEffect);
cursor = replacement;
@@ -261,7 +261,8 @@
store = new (Z) StoreInstanceFieldInstr(
*slot, new (Z) Value(replacement), initial_value, kNoStoreBarrier,
- alloc->source(), StoreInstanceFieldInstr::Kind::kInitializing);
+ alloc->source(), DeoptId::kNone,
+ StoreInstanceFieldInstr::Kind::kInitializing);
flow_graph()->InsertAfter(cursor, store, nullptr, FlowGraph::kEffect);
cursor = store;
}
To view, visit change 191042. To unsubscribe, or for help writing mail filters, visit settings.
Attention is currently required from: Vyacheslav Egorov.
Attention is currently required from: Alexander Markov, Vyacheslav Egorov.
Martin Kustermann would like Alexander Markov to review this change.
To view, visit change 191042. To unsubscribe, or for help writing mail filters, visit settings.
Attention is currently required from: Alexander Markov, Vyacheslav Egorov.
1 comment:
Patchset:
Will mark it WIP again for investigating an alternative suggested by Slava.
To view, visit change 191042. To unsubscribe, or for help writing mail filters, visit settings.
Martin Kustermann abandoned this change.
To view, visit change 191042. To unsubscribe, or for help writing mail filters, visit settings.