Re: Change in sdk[master]: [VM] Use IR for code in [CatchEntryInstr]s to populate captured excep...

11 views
Skip to first unread message

Vyacheslav Egorov

unread,
Jan 21, 2018, 12:43:10 PM1/21/18
to Commit Bot, vm-...@dartlang.org, Martin Kustermann, Matan Lurey, Vyacheslav Egorov, Dart Reviews
FYI this CL has failed on SIMARM64: https://ci.chromium.org/p/dart/builds/b8956774519756276976

python tools/test.py -m release -c dartk -a simarm64 --strong language_2/await_future_test
Judging by the failure it is definitely related to this CL. (I think context is not restored correctly).


// Vyacheslav Egorov

On Sun, Jan 21, 2018 at 5:13 PM, commi...@chromium.org (Gerrit) <noreply-gerritcoderevie...@google.com> wrote:

commi...@chromium.org merged this change.

View Change

Approvals: Vyacheslav Egorov: Looks good to me, approved Martin Kustermann: Commit
[VM] Use IR for code in [CatchEntryInstr]s to populate captured exception/stacktrace variables

This fixes an issue when a program got loaded via dill, a function
with a try-catch got optimized and the exception/stacktrace variables
got captured.

Change-Id: Ia6b62f2a0986c78b90afe7fae25025ca4e5b09db
Reviewed-on: https://dart-review.googlesource.com/35182
Commit-Queue: Martin Kustermann <kuste...@google.com>
Reviewed-by: Vyacheslav Egorov <veg...@google.com>
---
M runtime/platform/growable_array.h
M runtime/vm/compiler/backend/flow_graph.cc
M runtime/vm/compiler/backend/il.h
M runtime/vm/compiler/backend/il_arm.cc
M runtime/vm/compiler/backend/il_arm64.cc
M runtime/vm/compiler/backend/il_dbc.cc
M runtime/vm/compiler/backend/il_ia32.cc
M runtime/vm/compiler/backend/il_x64.cc
M runtime/vm/compiler/backend/linearscan.cc
M runtime/vm/compiler/backend/locations.cc
M runtime/vm/compiler/backend/locations.h
M runtime/vm/compiler/backend/type_propagator.cc
M runtime/vm/compiler/frontend/flow_graph_builder.cc
M runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
M runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
M runtime/vm/compiler/frontend/kernel_to_il.cc
M runtime/vm/compiler/frontend/kernel_to_il.h
A tests/language_2/regress_31946_test.dart
18 files changed, 373 insertions(+), 256 deletions(-)

diff --git a/runtime/platform/growable_array.h b/runtime/platform/growable_array.h
index 28e04f6..1c34696 100644
--- a/runtime/platform/growable_array.h
+++ b/runtime/platform/growable_array.h
@@ -70,6 +70,16 @@
}
}

+ void EnsureLength(intptr_t new_length, const T& default_value) {
+ const intptr_t old_length = length_;
+ if (old_length < new_length) {
+ Resize(new_length);
+ for (intptr_t i = old_length; i < new_length; ++i) {
+ (*this)[i] = default_value;
+ }
+ }
+ }
+
const T& At(intptr_t index) const { return operator[](index); }

T& Last() const {
diff --git a/runtime/vm/compiler/backend/flow_graph.cc b/runtime/vm/compiler/backend/flow_graph.cc
index 516db37..7376b6c 100644
--- a/runtime/vm/compiler/backend/flow_graph.cc
+++ b/runtime/vm/compiler/backend/flow_graph.cc
@@ -1080,10 +1080,34 @@
}
}
}
- } else if (block_entry->IsCatchBlockEntry()) {
+ } else if (CatchBlockEntryInstr* catch_entry =
+ block_entry->AsCatchBlockEntry()) {
+ const intptr_t raw_exception_var_envindex =
+ catch_entry->raw_exception_var() != NULL
+ ? catch_entry->raw_exception_var()->BitIndexIn(
+ num_direct_parameters_)
+ : -1;
+ const intptr_t raw_stacktrace_var_envindex =
+ catch_entry->raw_stacktrace_var() != NULL
+ ? catch_entry->raw_stacktrace_var()->BitIndexIn(
+ num_direct_parameters_)
+ : -1;
+
// Add real definitions for all locals and parameters.
for (intptr_t i = 0; i < env->length(); ++i) {
- ParameterInstr* param = new (zone()) ParameterInstr(i, block_entry);
+ // Replace usages of the raw exception/stacktrace variables with
+ // [SpecialParameterInstr]s.
+ Definition* param = NULL;
+ if (raw_exception_var_envindex == i) {
+ param = new SpecialParameterInstr(SpecialParameterInstr::kException,
+ Thread::kNoDeoptId);
+ } else if (raw_stacktrace_var_envindex == i) {
+ param = new SpecialParameterInstr(SpecialParameterInstr::kStackTrace,
+ Thread::kNoDeoptId);
+ } else {
+ param = new (zone()) ParameterInstr(i, block_entry);
+ }
+
param->set_ssa_temp_index(alloc_ssa_temp_index()); // New SSA temp.
(*env)[i] = param;
block_entry->AsCatchBlockEntry()->initial_definitions()->Add(param);
@@ -1105,6 +1129,30 @@
// Attach environment to the block entry.
AttachEnvironment(block_entry, env);

+#if defined(TARGET_ARCH_DBC)
+ // On DBC the exception/stacktrace variables are in special registers when
+ // entering the catch block. The only usage of those special registers is
+ // within the catch block. A possible lazy-deopt at the beginning of the
+ // catch does not need to move those around, since the registers will be
+ // up-to-date when arriving in the unoptimized code and unoptimized code will
+ // take care of moving them to appropriate slots.
+ if (CatchBlockEntryInstr* catch_entry = block_entry->AsCatchBlockEntry()) {
+ Environment* deopt_env = catch_entry->env();
+ const LocalVariable* raw_exception_var = catch_entry->raw_exception_var();
+ const LocalVariable* raw_stacktrace_var = catch_entry->raw_stacktrace_var();
+ if (raw_exception_var != NULL) {
+ Value* value = deopt_env->ValueAt(
+ raw_exception_var->BitIndexIn(num_direct_parameters_));
+ value->BindToEnvironment(constant_null());
+ }
+ if (raw_stacktrace_var != NULL) {
+ Value* value = deopt_env->ValueAt(
+ raw_stacktrace_var->BitIndexIn(num_direct_parameters_));
+ value->BindToEnvironment(constant_null());
+ }
+ }
+#endif // defined(TARGET_ARCH_DBC)
+
// 2. Process normal instructions.
for (ForwardInstructionIterator it(block_entry); !it.Done(); it.Advance()) {
Instruction* current = it.Current();
diff --git a/runtime/vm/compiler/backend/il.h b/runtime/vm/compiler/backend/il.h
index 1c60030..0721bba 100644
--- a/runtime/vm/compiler/backend/il.h
+++ b/runtime/vm/compiler/backend/il.h
@@ -1561,7 +1561,8 @@
const LocalVariable& stacktrace_var,
bool needs_stacktrace,
intptr_t deopt_id,
- bool should_restore_closure_context = false)
+ const LocalVariable* raw_exception_var,
+ const LocalVariable* raw_stacktrace_var)
: BlockEntryInstr(block_id, try_index, deopt_id),
graph_entry_(graph_entry),
predecessor_(NULL),
@@ -1569,8 +1570,9 @@
catch_try_index_(catch_try_index),
exception_var_(exception_var),
stacktrace_var_(stacktrace_var),
+ raw_exception_var_(raw_exception_var),
+ raw_stacktrace_var_(raw_stacktrace_var),
needs_stacktrace_(needs_stacktrace),
- should_restore_closure_context_(should_restore_closure_context),
handler_token_pos_(handler_token_pos),
is_generated_(is_generated) {}

@@ -1589,6 +1591,11 @@
const LocalVariable& exception_var() const { return exception_var_; }
const LocalVariable& stacktrace_var() const { return stacktrace_var_; }

+ const LocalVariable* raw_exception_var() const { return raw_exception_var_; }
+ const LocalVariable* raw_stacktrace_var() const {
+ return raw_stacktrace_var_;
+ }
+
bool needs_stacktrace() const { return needs_stacktrace_; }

bool is_generated() const { return is_generated_; }
@@ -1612,12 +1619,6 @@
predecessor_ = predecessor;
}

- bool should_restore_closure_context() const {
- ASSERT(exception_var_.is_captured() == stacktrace_var_.is_captured());
- ASSERT(!exception_var_.is_captured() || should_restore_closure_context_);
- return should_restore_closure_context_;
- }
-
GraphEntryInstr* graph_entry_;
BlockEntryInstr* predecessor_;
const Array& catch_handler_types_;
@@ -1625,8 +1626,9 @@
GrowableArray<Definition*> initial_definitions_;
const LocalVariable& exception_var_;
const LocalVariable& stacktrace_var_;
+ const LocalVariable* raw_exception_var_;
+ const LocalVariable* raw_stacktrace_var_;
const bool needs_stacktrace_;
- const bool should_restore_closure_context_;
TokenPosition handler_token_pos_;
bool is_generated_;

@@ -2909,7 +2911,13 @@
// the type arguments of a generic function or an arguments descriptor.
class SpecialParameterInstr : public TemplateDefinition<0, NoThrow> {
public:
- enum SpecialParameterKind { kContext, kTypeArgs, kArgDescriptor };
+ enum SpecialParameterKind {
+ kContext,
+ kTypeArgs,
+ kArgDescriptor,
+ kException,
+ kStackTrace
+ };

SpecialParameterInstr(SpecialParameterKind kind, intptr_t deopt_id)
: TemplateDefinition(deopt_id), kind_(kind) {}
@@ -2938,6 +2946,10 @@
return "kTypeArgs";
case kArgDescriptor:
return "kArgDescriptor";
+ case kException:
+ return "kException";
+ case kStackTrace:
+ return "kStackTrace";
}
UNREACHABLE();
return NULL;
diff --git a/runtime/vm/compiler/backend/il_arm.cc b/runtime/vm/compiler/backend/il_arm.cc
index 0cac243..6048552 100644
--- a/runtime/vm/compiler/backend/il_arm.cc
+++ b/runtime/vm/compiler/backend/il_arm.cc
@@ -2840,38 +2840,15 @@
ASSERT(fp_sp_dist <= 0);
__ AddImmediate(SP, FP, fp_sp_dist);

- // Auxiliary variables introduced by the try catch can be captured if we are
- // inside a function with yield/resume points. In this case we first need
- // to restore the context to match the context at entry into the closure.
- if (should_restore_closure_context()) {
- const ParsedFunction& parsed_function = compiler->parsed_function();
- ASSERT(parsed_function.function().IsClosureFunction());
- LocalScope* scope = parsed_function.node_sequence()->scope();
-
- LocalVariable* closure_parameter = scope->VariableAt(0);
- ASSERT(!closure_parameter->is_captured());
- __ LoadFromOffset(kWord, CTX, FP, closure_parameter->index() * kWordSize);
- __ LoadFieldFromOffset(kWord, CTX, CTX, Closure::context_offset());
-
- const intptr_t context_index =
- parsed_function.current_context_var()->index();
- __ StoreToOffset(kWord, CTX, FP, context_index * kWordSize);
- }
-
- // Initialize exception and stack trace variables.
- if (exception_var().is_captured()) {
- ASSERT(stacktrace_var().is_captured());
- __ StoreIntoObjectOffset(CTX,
- Context::variable_offset(exception_var().index()),
- kExceptionObjectReg);
- __ StoreIntoObjectOffset(CTX,
- Context::variable_offset(stacktrace_var().index()),
- kStackTraceObjectReg);
- } else {
- __ StoreToOffset(kWord, kExceptionObjectReg, FP,
- exception_var().index() * kWordSize);
- __ StoreToOffset(kWord, kStackTraceObjectReg, FP,
- stacktrace_var().index() * kWordSize);
+ if (!compiler->is_optimizing()) {
+ if (raw_exception_var_ != NULL) {
+ __ str(kExceptionObjectReg,
+ Address(FP, raw_exception_var_->index() * kWordSize));
+ }
+ if (raw_stacktrace_var_ != NULL) {
+ __ str(kStackTraceObjectReg,
+ Address(FP, raw_stacktrace_var_->index() * kWordSize));
+ }
}
}

diff --git a/runtime/vm/compiler/backend/il_arm64.cc b/runtime/vm/compiler/backend/il_arm64.cc
index 182d697..12be6d3 100644
--- a/runtime/vm/compiler/backend/il_arm64.cc
+++ b/runtime/vm/compiler/backend/il_arm64.cc
@@ -2574,40 +2574,15 @@
ASSERT(fp_sp_dist <= 0);
__ AddImmediate(SP, FP, fp_sp_dist);

- // Auxiliary variables introduced by the try catch can be captured if we are
- // inside a function with yield/resume points. In this case we first need
- // to restore the context to match the context at entry into the closure.
- if (should_restore_closure_context()) {
- const ParsedFunction& parsed_function = compiler->parsed_function();
- ASSERT(parsed_function.function().IsClosureFunction());
- LocalScope* scope = parsed_function.node_sequence()->scope();
-
- LocalVariable* closure_parameter = scope->VariableAt(0);
- ASSERT(!closure_parameter->is_captured());
- __ LoadFromOffset(CTX, FP, closure_parameter->index() * kWordSize);
- __ LoadFieldFromOffset(CTX, CTX, Closure::context_offset());
-
- const intptr_t context_index =
- parsed_function.current_context_var()->index();
- __ StoreToOffset(CTX, FP, context_index * kWordSize);
- }
-
- // Initialize exception and stack trace variables.
- if (exception_var().is_captured()) {
- ASSERT(stacktrace_var().is_captured());
- __ StoreIntoObjectOffset(CTX,
- Context::variable_offset(exception_var().index()),
- kExceptionObjectReg);
- __ StoreIntoObjectOffset(CTX,
- Context::variable_offset(stacktrace_var().index()),
- kStackTraceObjectReg);
- } else {
- // Restore stack and initialize the two exception variables:
- // exception and stack trace variables.
- __ StoreToOffset(kExceptionObjectReg, FP,
- exception_var().index() * kWordSize);
- __ StoreToOffset(kStackTraceObjectReg, FP,
- stacktrace_var().index() * kWordSize);
+ if (!compiler->is_optimizing()) {
+ if (raw_exception_var_ != NULL) {
+ __ str(kExceptionObjectReg,
+ Address(FP, raw_exception_var_->index() * kWordSize));
+ }
+ if (raw_stacktrace_var_ != NULL) {
+ __ str(kStackTraceObjectReg,
+ Address(FP, raw_stacktrace_var_->index() * kWordSize));
+ }
}
}

diff --git a/runtime/vm/compiler/backend/il_dbc.cc b/runtime/vm/compiler/backend/il_dbc.cc
index bb75c7c..1d13c51 100644
--- a/runtime/vm/compiler/backend/il_dbc.cc
+++ b/runtime/vm/compiler/backend/il_dbc.cc
@@ -1172,69 +1172,18 @@
if (HasParallelMove()) {
compiler->parallel_move_resolver()->EmitNativeCode(parallel_move());
}
+ __ SetFrame(compiler->StackSize());

- Register context_reg = kNoRegister;
-
- // Auxiliary variables introduced by the try catch can be captured if we are
- // inside a function with yield/resume points. In this case we first need
- // to restore the context to match the context at entry into the closure.
- if (should_restore_closure_context()) {
- const ParsedFunction& parsed_function = compiler->parsed_function();
-
- ASSERT(parsed_function.function().IsClosureFunction());
- LocalScope* scope = parsed_function.node_sequence()->scope();
-
- LocalVariable* closure_parameter = scope->VariableAt(0);
- ASSERT(!closure_parameter->is_captured());
-
- const LocalVariable& current_context_var =
- *parsed_function.current_context_var();
-
- context_reg = compiler->is_optimizing()
- ? compiler->CatchEntryRegForVariable(current_context_var)
- : LocalVarIndex(0, current_context_var.index());
-
- Register closure_reg;
- if (closure_parameter->index() > 0) {
- __ Move(context_reg, LocalVarIndex(0, closure_parameter->index()));
- closure_reg = context_reg;
- } else {
- closure_reg = LocalVarIndex(0, closure_parameter->index());
- }
-
- __ LoadField(context_reg, closure_reg,
- Closure::context_offset() / kWordSize);
- }
-
- if (exception_var().is_captured()) {
- ASSERT(stacktrace_var().is_captured());
- ASSERT(context_reg != kNoRegister);
- // This will be SP[1] register so we are free to use it as a temporary.
- const Register temp = compiler->StackSize();
- __ MoveSpecial(temp, Simulator::kExceptionSpecialIndex);
- __ StoreField(context_reg,
- Context::variable_offset(exception_var().index()) / kWordSize,
- temp);
- __ MoveSpecial(temp, Simulator::kStackTraceSpecialIndex);
- __ StoreField(
- context_reg,
- Context::variable_offset(stacktrace_var().index()) / kWordSize, temp);
- } else {
- if (compiler->is_optimizing()) {
- const intptr_t exception_reg =
- compiler->CatchEntryRegForVariable(exception_var());
- const intptr_t stacktrace_reg =
- compiler->CatchEntryRegForVariable(stacktrace_var());
- __ MoveSpecial(exception_reg, Simulator::kExceptionSpecialIndex);
- __ MoveSpecial(stacktrace_reg, Simulator::kStackTraceSpecialIndex);
- } else {
- __ MoveSpecial(LocalVarIndex(0, exception_var().index()),
+ if (!compiler->is_optimizing()) {
+ if (raw_exception_var_ != NULL) {
+ __ MoveSpecial(LocalVarIndex(0, raw_exception_var_->index()),
Simulator::kExceptionSpecialIndex);
- __ MoveSpecial(LocalVarIndex(0, stacktrace_var().index()),
+ }
+ if (raw_stacktrace_var_ != NULL) {
+ __ MoveSpecial(LocalVarIndex(0, raw_stacktrace_var_->index()),
Simulator::kStackTraceSpecialIndex);
}
}
- __ SetFrame(compiler->StackSize());
}

EMIT_NATIVE_CODE(Throw, 0, Location::NoLocation(), LocationSummary::kCall) {
diff --git a/runtime/vm/compiler/backend/il_ia32.cc b/runtime/vm/compiler/backend/il_ia32.cc
index f8bd071..46a47fc 100644
--- a/runtime/vm/compiler/backend/il_ia32.cc
+++ b/runtime/vm/compiler/backend/il_ia32.cc
@@ -2498,51 +2498,15 @@
ASSERT(fp_sp_dist <= 0);
__ leal(ESP, Address(EBP, fp_sp_dist));

- // Auxiliary variables introduced by the try catch can be captured if we are
- // inside a function with yield/resume points. In this case we first need
- // to restore the context to match the context at entry into the closure.
- if (should_restore_closure_context()) {
- const ParsedFunction& parsed_function = compiler->parsed_function();
- ASSERT(parsed_function.function().IsClosureFunction());
- LocalScope* scope = parsed_function.node_sequence()->scope();
-
- LocalVariable* closure_parameter = scope->VariableAt(0);
- ASSERT(!closure_parameter->is_captured());
- __ movl(CTX, Address(EBP, closure_parameter->index() * kWordSize));
- __ movl(CTX, FieldAddress(CTX, Closure::context_offset()));
-
-#ifdef DEBUG
- Label ok;
- __ LoadClassId(EBX, CTX);
- __ cmpl(EBX, Immediate(kContextCid));
- __ j(EQUAL, &ok, Assembler::kNearJump);
- __ Stop("Incorrect context at entry");
- __ Bind(&ok);
-#endif
-
- const intptr_t context_index =
- parsed_function.current_context_var()->index();
- __ movl(Address(EBP, context_index * kWordSize), CTX);
- }
-
- // Initialize exception and stack trace variables.
- if (exception_var().is_captured()) {
- ASSERT(stacktrace_var().is_captured());
- __ StoreIntoObject(
- CTX,
- FieldAddress(CTX, Context::variable_offset(exception_var().index())),
- kExceptionObjectReg);
- __ StoreIntoObject(
- CTX,
- FieldAddress(CTX, Context::variable_offset(stacktrace_var().index())),
- kStackTraceObjectReg);
- } else {
- // Restore stack and initialize the two exception variables:
- // exception and stack trace variables.
- __ movl(Address(EBP, exception_var().index() * kWordSize),
- kExceptionObjectReg);
- __ movl(Address(EBP, stacktrace_var().index() * kWordSize),
- kStackTraceObjectReg);
+ if (!compiler->is_optimizing()) {
+ if (raw_exception_var_ != NULL) {
+ __ movl(Address(EBP, raw_exception_var_->index() * kWordSize),
+ kExceptionObjectReg);
+ }
+ if (raw_stacktrace_var_ != NULL) {
+ __ movl(Address(EBP, raw_stacktrace_var_->index() * kWordSize),
+ kStackTraceObjectReg);
+ }
}
}

diff --git a/runtime/vm/compiler/backend/il_x64.cc b/runtime/vm/compiler/backend/il_x64.cc
index 532fec3..7c6a807 100644
--- a/runtime/vm/compiler/backend/il_x64.cc
+++ b/runtime/vm/compiler/backend/il_x64.cc
@@ -2523,49 +2523,15 @@
ASSERT(fp_sp_dist <= 0);
__ leaq(RSP, Address(RBP, fp_sp_dist));

- // Auxiliary variables introduced by the try catch can be captured if we are
- // inside a function with yield/resume points. In this case we first need
- // to restore the context to match the context at entry into the closure.
- if (should_restore_closure_context()) {
- const ParsedFunction& parsed_function = compiler->parsed_function();
- ASSERT(parsed_function.function().IsClosureFunction());
- LocalScope* scope = parsed_function.node_sequence()->scope();
-
- LocalVariable* closure_parameter = scope->VariableAt(0);
- ASSERT(!closure_parameter->is_captured());
- __ movq(CTX, Address(RBP, closure_parameter->index() * kWordSize));
- __ movq(CTX, FieldAddress(CTX, Closure::context_offset()));
-
-#ifdef DEBUG
- Label ok;
- __ LoadClassId(RBX, CTX);
- __ cmpq(RBX, Immediate(kContextCid));
- __ j(EQUAL, &ok, Assembler::kNearJump);
- __ Stop("Incorrect context at entry");
- __ Bind(&ok);
-#endif
-
- const intptr_t context_index =
- parsed_function.current_context_var()->index();
- __ movq(Address(RBP, context_index * kWordSize), CTX);
- }
-
- // Initialize exception and stack trace variables.
- if (exception_var().is_captured()) {
- ASSERT(stacktrace_var().is_captured());
- __ StoreIntoObject(
- CTX,
- FieldAddress(CTX, Context::variable_offset(exception_var().index())),
- kExceptionObjectReg);
- __ StoreIntoObject(
- CTX,
- FieldAddress(CTX, Context::variable_offset(stacktrace_var().index())),
- kStackTraceObjectReg);
- } else {
- __ movq(Address(RBP, exception_var().index() * kWordSize),
- kExceptionObjectReg);
- __ movq(Address(RBP, stacktrace_var().index() * kWordSize),
- kStackTraceObjectReg);
+ if (!compiler->is_optimizing()) {
+ if (raw_exception_var_ != NULL) {
+ __ movq(Address(RBP, raw_exception_var_->index() * kWordSize),
+ kExceptionObjectReg);
+ }
+ if (raw_stacktrace_var_ != NULL) {
+ __ movq(Address(RBP, raw_stacktrace_var_->index() * kWordSize),
+ kStackTraceObjectReg);
+ }
}
}

diff --git a/runtime/vm/compiler/backend/linearscan.cc b/runtime/vm/compiler/backend/linearscan.cc
index 9abd019..0695007 100644
--- a/runtime/vm/compiler/backend/linearscan.cc
+++ b/runtime/vm/compiler/backend/linearscan.cc
@@ -639,11 +639,42 @@
void FlowGraphAllocator::ProcessInitialDefinition(Definition* defn,
LiveRange* range,
BlockEntryInstr* block) {
-#if defined(TARGET_ARCH_DBC)
+ // Save the range end because it may change below.
+ const intptr_t range_end = range->End();
+
+ // TODO(31956): Clean up this code and factor common functionality out.
+ // Consider also making a separate [ProcessInitialDefinition] for
+ // [CatchBlockEntry]'s.
if (block->IsCatchBlockEntry()) {
- if (defn->IsParameter()) {
+ if (SpecialParameterInstr* param = defn->AsSpecialParameter()) {
+ Location loc;
+ switch (param->kind()) {
+ case SpecialParameterInstr::kException:
+ loc = Location::ExceptionLocation();
+ break;
+ case SpecialParameterInstr::kStackTrace:
+ loc = Location::StackTraceLocation();
+ break;
+ default:
+ UNREACHABLE();
+ }
+ range->set_assigned_location(loc);
+ AssignSafepoints(defn, range);
+ range->finger()->Initialize(range);
+ SplitInitialDefinitionAt(range, block->lifetime_position() + 2);
+ ConvertAllUses(range);
+
+ // On non-DBC we'll have exception/stacktrace in a register and need to
+ // ensure this register is not available for register allocation during
+ // the [CatchBlockEntry] to ensure it's not overwritten.
+ if (loc.IsRegister()) {
+ BlockLocation(loc, block->lifetime_position(),
+ block->lifetime_position() + 2);
+ }
+ return;
+#if defined(TARGET_ARCH_DBC)
+ } else if (ParameterInstr* param = defn->AsParameter()) {
// This must be in sync with FlowGraphCompiler::CatchEntryRegForVariable.
- ParameterInstr* param = defn->AsParameter();
intptr_t slot_index = param->index();
AssignSafepoints(defn, range);
range->finger()->Initialize(range);
@@ -652,8 +683,8 @@
SplitInitialDefinitionAt(range, block->lifetime_position() + 2);
ConvertAllUses(range);
BlockLocation(Location::RegisterLocation(slot_index), 0, kMaxPosition);
- } else {
- ConstantInstr* constant = defn->AsConstant();
+ return;
+ } else if (ConstantInstr* constant = defn->AsConstant()) {
ASSERT(constant != NULL);
range->set_assigned_location(Location::Constant(constant));
range->set_spill_slot(Location::Constant(constant));
@@ -668,13 +699,11 @@
CompleteRange(tail, Location::kRegister);
}
ConvertAllUses(range);
+ return;
+#endif // defined(TARGET_ARCH_DBC)
}
- return;
}
-#endif

- // Save the range end because it may change below.
- intptr_t range_end = range->End();
if (defn->IsParameter()) {
ParameterInstr* param = defn->AsParameter();
intptr_t slot_index = param->index();
@@ -705,16 +734,14 @@
Location loc;
#if defined(TARGET_ARCH_DBC)
loc = Location::ArgumentsDescriptorLocation();
- range->set_assigned_location(loc);
#else
loc = Location::RegisterLocation(ARGS_DESC_REG);
- range->set_assigned_location(loc);
#endif // defined(TARGET_ARCH_DBC)
+ range->set_assigned_location(loc);
if (loc.IsRegister()) {
AssignSafepoints(defn, range);
if (range->End() > kNormalEntryPos) {
- LiveRange* tail = range->SplitAt(kNormalEntryPos);
- CompleteRange(tail, Location::kRegister);
+ SplitInitialDefinitionAt(range, kNormalEntryPos);
}
ConvertAllUses(range);
return;
@@ -1963,7 +1990,15 @@
}
}

+ while (idx > spill_slots_.length()) {
+ spill_slots_.Add(kMaxPosition);
+ quad_spill_slots_.Add(false);
+ untagged_spill_slots_.Add(false);
+ }
+
if (idx == spill_slots_.length()) {
+ idx = spill_slots_.length();
+
// No free spill slot found. Allocate a new one.
spill_slots_.Add(0);
quad_spill_slots_.Add(need_quad);
diff --git a/runtime/vm/compiler/backend/locations.cc b/runtime/vm/compiler/backend/locations.cc
index a01abb2..44c88ee 100644
--- a/runtime/vm/compiler/backend/locations.cc
+++ b/runtime/vm/compiler/backend/locations.cc
@@ -180,8 +180,17 @@
}
UNREACHABLE();
#if TARGET_ARCH_DBC
- case kArgsDescRegister:
- return "ArgDesc";
+ case kSpecialDbcRegister:
+ switch (payload()) {
+ case kArgsDescriptorReg:
+ return "ArgDescReg";
+ case kExceptionReg:
+ return "ExceptionReg";
+ case kStackTraceReg:
+ return "StackTraceReg";
+ default:
+ UNREACHABLE();
+ }
#endif
default:
if (IsConstant()) {
diff --git a/runtime/vm/compiler/backend/locations.h b/runtime/vm/compiler/backend/locations.h
index 2cf9a45..562dcf8 100644
--- a/runtime/vm/compiler/backend/locations.h
+++ b/runtime/vm/compiler/backend/locations.h
@@ -62,6 +62,14 @@
static const uword kLocationTagMask = 0x3;

public:
+#if defined(TARGET_ARCH_DBC)
+ enum SpecialDbcRegister{
+ kArgsDescriptorReg,
+ kExceptionReg,
+ kStackTraceReg,
+ };
+#endif
+
// Constant payload can overlap with kind field so Kind values
// have to be chosen in a way that their last 2 bits are never
// the same as kConstantTag or kPairLocationTag.
@@ -99,9 +107,9 @@
kFpuRegister = 12,

#ifdef TARGET_ARCH_DBC
- // We use this to signify a special `Location` where the arguments
- // descriptor can be found on DBC.
- kArgsDescRegister = 15,
+ // We use this to signify a special `Location` where the different
+ // [SpecialDbcRegister]s can be found on DBC.
+ kSpecialDbcRegister = 15,
#endif
};

@@ -130,8 +138,9 @@
COMPILE_ASSERT((kFpuRegister & kLocationTagMask) != kPairLocationTag);

#ifdef TARGET_ARCH_DBC
- COMPILE_ASSERT((kArgsDescRegister & kLocationTagMask) != kConstantTag);
- COMPILE_ASSERT((kArgsDescRegister & kLocationTagMask) != kPairLocationTag);
+ COMPILE_ASSERT((kSpecialDbcRegister & kLocationTagMask) != kConstantTag);
+ COMPILE_ASSERT((kSpecialDbcRegister & kLocationTagMask) !=
+ kPairLocationTag);
#endif

// Verify tags and tagmask.
@@ -247,12 +256,44 @@

bool IsFpuRegister() const { return kind() == kFpuRegister; }

-#ifdef TARGET_ARCH_DBC
static Location ArgumentsDescriptorLocation() {
- return Location(kArgsDescRegister, 0);
+#ifdef TARGET_ARCH_DBC
+ return Location(kSpecialDbcRegister, kArgsDescriptorReg);
+#else
+ return Location::RegisterLocation(ARGS_DESC_REG);
+#endif
}

- bool IsArgsDescRegister() const { return kind() == kArgsDescRegister; }
+ static Location ExceptionLocation() {
+#ifdef TARGET_ARCH_DBC
+ return Location(kSpecialDbcRegister, kExceptionReg);
+#else
+ return Location::RegisterLocation(kExceptionObjectReg);
+#endif
+ }
+
+ static Location StackTraceLocation() {
+#ifdef TARGET_ARCH_DBC
+ return Location(kSpecialDbcRegister, kStackTraceReg);
+#else
+ return Location::RegisterLocation(kStackTraceObjectReg);
+#endif
+ }
+
+#ifdef TARGET_ARCH_DBC
+ bool IsArgsDescRegister() const {
+ return IsSpecialDbcRegister(kArgsDescriptorReg);
+ }
+ bool IsExceptionRegister() const {
+ return IsSpecialDbcRegister(kExceptionReg);
+ }
+ bool IsStackTraceRegister() const {
+ return IsSpecialDbcRegister(kStackTraceReg);
+ }
+
+ bool IsSpecialDbcRegister(SpecialDbcRegister reg) const {
+ return kind() == kSpecialDbcRegister && payload() == reg;
+ }
#endif

FpuRegister fpu_reg() const {
diff --git a/runtime/vm/compiler/backend/type_propagator.cc b/runtime/vm/compiler/backend/type_propagator.cc
index 2520f4b..c1281ed 100644
--- a/runtime/vm/compiler/backend/type_propagator.cc
+++ b/runtime/vm/compiler/backend/type_propagator.cc
@@ -1027,6 +1027,10 @@
return CompileType::FromCid(kTypeArgumentsCid);
case kArgDescriptor:
return CompileType::FromCid(kImmutableArrayCid);
+ case kException:
+ return CompileType::Dynamic();
+ case kStackTrace:
+ return CompileType::FromCid(kStackTraceCid);
}
UNREACHABLE();
return CompileType::Dynamic();
diff --git a/runtime/vm/compiler/frontend/flow_graph_builder.cc b/runtime/vm/compiler/frontend/flow_graph_builder.cc
index 628bd55..72982d3 100644
--- a/runtime/vm/compiler/frontend/flow_graph_builder.cc
+++ b/runtime/vm/compiler/frontend/flow_graph_builder.cc
@@ -4105,7 +4105,8 @@
owner()->AllocateBlockId(), catch_handler_index, owner()->graph_entry(),
catch_block->handler_types(), try_handler_index,
catch_block->exception_var(), catch_block->stacktrace_var(),
- catch_block->needs_stacktrace(), owner()->GetNextDeoptId());
+ catch_block->needs_stacktrace(), owner()->GetNextDeoptId(),
+ &catch_block->exception_var(), &catch_block->stacktrace_var());
owner()->AddCatchEntry(catch_entry);
AppendFragment(catch_entry, for_catch);

@@ -4151,7 +4152,8 @@
owner()->AllocateBlockId(), original_handler_index,
owner()->graph_entry(), types, catch_handler_index,
catch_block->exception_var(), catch_block->stacktrace_var(),
- catch_block->needs_stacktrace(), owner()->GetNextDeoptId());
+ catch_block->needs_stacktrace(), owner()->GetNextDeoptId(),
+ &catch_block->exception_var(), &catch_block->stacktrace_var());
owner()->AddCatchEntry(finally_entry);
AppendFragment(finally_entry, for_finally);
}
diff --git a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
index 52d5650..516da9d 100644
--- a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
+++ b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.cc
@@ -1681,6 +1681,9 @@
ExitScope(builder_->reader_->min_position(),
builder_->reader_->max_position());
}
+
+ FinalizeCatchVariables();
+
--depth_.catch_;
return;
}
@@ -1698,6 +1701,8 @@

VisitStatement(); // read finalizer.

+ FinalizeCatchVariables();
+
--depth_.catch_;
return;
}
@@ -2066,6 +2071,30 @@
variables->Add(v);
}

+void StreamingScopeBuilder::FinalizeExceptionVariable(
+ GrowableArray<LocalVariable*>* variables,
+ GrowableArray<LocalVariable*>* raw_variables,
+ const String& symbol,
+ intptr_t nesting_depth) {
+ // No need to create variables for try/catch-statements inside
+ // nested functions.
+ if (depth_.function_ > 0) return;
+
+ LocalVariable* variable = (*variables)[nesting_depth - 1];
+ LocalVariable* raw_variable;
+ if (variable->is_captured()) {
+ raw_variable =
+ new LocalVariable(TokenPosition::kNoSource, TokenPosition::kNoSource,
+ symbol, AbstractType::dynamic_type());
+ const bool ok = scope_->AddVariable(raw_variable);
+ ASSERT(ok);
+ } else {
+ raw_variable = variable;
+ }
+ raw_variables->EnsureLength(nesting_depth, NULL);
+ (*raw_variables)[nesting_depth - 1] = raw_variable;
+}
+
void StreamingScopeBuilder::AddTryVariables() {
AddExceptionVariable(&result_->catch_context_variables,
":saved_try_context_var", depth_.try_);
@@ -2078,6 +2107,16 @@
depth_.catch_);
}

+void StreamingScopeBuilder::FinalizeCatchVariables() {
+ const intptr_t unique_id = result_->raw_variable_counter_++;
+ FinalizeExceptionVariable(
+ &result_->exception_variables, &result_->raw_exception_variables,
+ GenerateName(":raw_exception", unique_id), depth_.catch_);
+ FinalizeExceptionVariable(
+ &result_->stack_trace_variables, &result_->raw_stack_trace_variables,
+ GenerateName(":raw_stacktrace", unique_id), depth_.catch_);
+}
+
void StreamingScopeBuilder::AddIteratorVariable() {
if (depth_.function_ > 0) return;
if (result_->iterator_variables.length() >= depth_.for_in_) return;
diff --git a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
index 7a2ea01..e1791d8 100644
--- a/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
+++ b/runtime/vm/compiler/frontend/kernel_binary_flowgraph.h
@@ -712,8 +712,14 @@
const char* prefix,
intptr_t nesting_depth);

+ void FinalizeExceptionVariable(GrowableArray<LocalVariable*>* variables,
+ GrowableArray<LocalVariable*>* raw_variables,
+ const String& symbol,
+ intptr_t nesting_depth);
+
void AddTryVariables();
void AddCatchVariables();
+ void FinalizeCatchVariables();
void AddIteratorVariable();
void AddSwitchVariable();

diff --git a/runtime/vm/compiler/frontend/kernel_to_il.cc b/runtime/vm/compiler/frontend/kernel_to_il.cc
index f004628..acaf032 100644
--- a/runtime/vm/compiler/frontend/kernel_to_il.cc
+++ b/runtime/vm/compiler/frontend/kernel_to_il.cc
@@ -1124,19 +1124,54 @@
Fragment FlowGraphBuilder::CatchBlockEntry(const Array& handler_types,
intptr_t handler_index,
bool needs_stacktrace) {
- ASSERT(CurrentException()->is_captured() ==
- CurrentStackTrace()->is_captured());
- const bool should_restore_closure_context =
- CurrentException()->is_captured() || CurrentCatchContext()->is_captured();
+ LocalVariable* exception_var = CurrentException();
+ LocalVariable* stacktrace_var = CurrentStackTrace();
+ LocalVariable* raw_exception_var = CurrentRawException();
+ LocalVariable* raw_stacktrace_var = CurrentRawStackTrace();
+
CatchBlockEntryInstr* entry = new (Z) CatchBlockEntryInstr(
TokenPosition::kNoSource, // Token position of catch block.
false, // Not an artifact of compilation.
AllocateBlockId(), CurrentTryIndex(), graph_entry_, handler_types,
- handler_index, *CurrentException(), *CurrentStackTrace(),
- needs_stacktrace, GetNextDeoptId(), should_restore_closure_context);
+ handler_index, *exception_var, *stacktrace_var, needs_stacktrace,
+ GetNextDeoptId(), raw_exception_var, raw_stacktrace_var);
graph_entry_->AddCatchEntry(entry);
+
Fragment instructions(entry);

+ // Auxiliary variables introduced by the try catch can be captured if we are
+ // inside a function with yield/resume points. In this case we first need
+ // to restore the context to match the context at entry into the closure.
+ const bool should_restore_closure_context =
+ CurrentException()->is_captured() || CurrentCatchContext()->is_captured();
+ LocalVariable* context_variable = parsed_function_->current_context_var();
+ if (should_restore_closure_context) {
+ ASSERT(parsed_function_->function().IsClosureFunction());
+ LocalScope* scope = parsed_function_->node_sequence()->scope();
+
+ LocalVariable* closure_parameter = scope->VariableAt(0);
+ ASSERT(!closure_parameter->is_captured());
+ instructions += LoadLocal(closure_parameter);
+ instructions += LoadField(Closure::context_offset());
+ instructions += StoreLocal(TokenPosition::kNoSource, context_variable);
+ instructions += Drop();
+ }
+
+ if (exception_var->is_captured()) {
+ instructions += LoadLocal(context_variable);
+ instructions += LoadLocal(raw_exception_var);
+ instructions +=
+ StoreInstanceField(TokenPosition::kNoSource,
+ Context::variable_offset(exception_var->index()));
+ }
+ if (stacktrace_var->is_captured()) {
+ instructions += LoadLocal(context_variable);
+ instructions += LoadLocal(raw_stacktrace_var);
+ instructions +=
+ StoreInstanceField(TokenPosition::kNoSource,
+ Context::variable_offset(stacktrace_var->index()));
+ }
+
// :saved_try_context_var can be captured in the context of
// of the closure, in this case CatchBlockEntryInstr restores
// :current_context_var to point to closure context in the
diff --git a/runtime/vm/compiler/frontend/kernel_to_il.h b/runtime/vm/compiler/frontend/kernel_to_il.h
index 6d9948d..b40b736 100644
--- a/runtime/vm/compiler/frontend/kernel_to_il.h
+++ b/runtime/vm/compiler/frontend/kernel_to_il.h
@@ -466,7 +466,8 @@
finally_return_variable(NULL),
setter_value(NULL),
yield_jump_variable(NULL),
- yield_context_variable(NULL) {}
+ yield_context_variable(NULL),
+ raw_variable_counter_(0) {}

IntMap<LocalVariable*> locals;
IntMap<LocalScope*> scopes;
@@ -503,6 +504,12 @@
GrowableArray<LocalVariable*> stack_trace_variables;
GrowableArray<LocalVariable*> catch_context_variables;

+ // These are used to access the raw exception/stacktrace variables (and are
+ // used to put them into the captured variables in the context).
+ GrowableArray<LocalVariable*> raw_exception_variables;
+ GrowableArray<LocalVariable*> raw_stack_trace_variables;
+ intptr_t raw_variable_counter_;
+
// For-in iterators, one per for-in nesting level.
GrowableArray<LocalVariable*> iterator_variables;
};
@@ -818,6 +825,12 @@
LocalVariable* CurrentStackTrace() {
return scopes_->stack_trace_variables[catch_depth_ - 1];
}
+ LocalVariable* CurrentRawException() {
+ return scopes_->raw_exception_variables[catch_depth_ - 1];
+ }
+ LocalVariable* CurrentRawStackTrace() {
+ return scopes_->raw_stack_trace_variables[catch_depth_ - 1];
+ }
LocalVariable* CurrentCatchContext() {
return scopes_->catch_context_variables[try_depth_];
}
diff --git a/tests/language_2/regress_31946_test.dart b/tests/language_2/regress_31946_test.dart
new file mode 100644
index 0000000..d9b41b8
--- /dev/null
+++ b/tests/language_2/regress_31946_test.dart
@@ -0,0 +1,32 @@
+// Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file
+// for details. All rights reserved. Use of this source code is governed by a
+// BSD-style license that can be found in the LICENSE file.
+
+// VMOptions=--no_background_compilation --optimization_counter_threshold=10
+
+import 'dart:async';
+
+import 'package:expect/expect.dart';
+
+var root = [];
+var tests = [
+ () async {},
+ () async {
+ await new Future.value();
+ root.singleWhere((f) => f.name == 'optimizedFunction');
+ },
+];
+
+main(args) async {
+ for (int i = 0; i < 100; ++i) {
+ int exceptions = 0;
+ for (final test in tests) {
+ try {
+ await test();
+ } on StateError {
+ exceptions++;
+ }
+ }
+ Expect.isTrue(exceptions == 1);
+ }
+}

To view, visit change 35182. To unsubscribe, or for help writing mail filters, visit settings.

Gerrit-Project: sdk
Gerrit-Branch: master
Gerrit-MessageType: merged
Gerrit-Change-Id: Ia6b62f2a0986c78b90afe7fae25025ca4e5b09db
Gerrit-Change-Number: 35182
Gerrit-PatchSet: 6
Gerrit-Owner: Martin Kustermann <kuste...@google.com>
Gerrit-Reviewer: Martin Kustermann <kuste...@google.com>
Gerrit-Reviewer: Vyacheslav Egorov <veg...@google.com>
Gerrit-CC: Dart Reviews <rev...@dartlang.org>

Reply all
Reply to author
Forward
0 new messages