Attention is currently required from: Ryan Macnak, Vyacheslav Egorov, Tess Strickland.
Alexander Markov would like Ryan Macnak, Vyacheslav Egorov and Tess Strickland to review this change.
[vm/aot] Discard Code objects from the heap
While deserializing AOT snapshot, Code objects which do not contain
valuable information besides entry point and stack maps are discarded
and not allocated on the heap (they are replaced with
StubCode::UnknownDartCode()).
PC -> Code/CompressedStackMaps lookup is implemented using a separate
table (InstructionsTable).
Flutter gallery in release-sizeopt mode:
Heap size of snapshot objects: arm -26.89%, arm64 -27.68%
Large Flutter application in release mode with --dwarf-stack-traces:
Heap size of snapshot objects: -24.3%.
Discarded Code objects: 72.5% of all Code objects.
Issue: https://github.com/dart-lang/sdk/issues/44852.
TEST=existing tests; "--dwarf_stack_traces --no-retain_function_objects
--no-retain_code_objects" mode is enabled for a few tests.
Change-Id: I5fe3e283630c8e8f4442319d5dcae38d174dd0d8
---
M runtime/vm/class_id.h
M runtime/vm/clustered_snapshot.cc
M runtime/vm/clustered_snapshot.h
M runtime/vm/compiler/aot/precompiler.cc
M runtime/vm/compiler/runtime_api.cc
M runtime/vm/compiler/runtime_api.h
M runtime/vm/compiler/runtime_offsets_extracted.h
M runtime/vm/compiler/runtime_offsets_list.h
M runtime/vm/compiler/stub_code_compiler.cc
M runtime/vm/compiler/stub_code_compiler_arm.cc
M runtime/vm/compiler/stub_code_compiler_arm64.cc
M runtime/vm/compiler/stub_code_compiler_x64.cc
M runtime/vm/dart_entry.cc
M runtime/vm/dart_entry.h
M runtime/vm/object.cc
M runtime/vm/object.h
M runtime/vm/object_graph.cc
M runtime/vm/object_service.cc
M runtime/vm/object_store.h
M runtime/vm/raw_object.cc
M runtime/vm/raw_object.h
M runtime/vm/raw_object_snapshot.cc
M runtime/vm/reverse_pc_lookup_cache.cc
M runtime/vm/reverse_pc_lookup_cache.h
M runtime/vm/stack_frame.cc
M runtime/vm/symbols.h
M runtime/vm/tagged_pointer.h
M runtime/vm/type_testing_stubs_test.cc
M tests/corelib/apply2_test.dart
M tests/corelib/bigint_from_test.dart
M tests/corelib_2/apply2_test.dart
M tests/corelib_2/bigint_from_test.dart
M tests/language/async/identifier_test.dart
M tests/language/deferred/function_type_test.dart
M tests/language/deferred/shared_and_unshared_classes_test.dart
M tests/language_2/async/identifier_test.dart
M tests/language_2/deferred/function_type_test.dart
M tests/language_2/deferred/shared_and_unshared_classes_test.dart
38 files changed, 785 insertions(+), 202 deletions(-)
diff --git a/runtime/vm/class_id.h b/runtime/vm/class_id.h
index ed8a237..f59c94b 100644
--- a/runtime/vm/class_id.h
+++ b/runtime/vm/class_id.h
@@ -32,6 +32,7 @@
V(Code) \
V(Instructions) \
V(InstructionsSection) \
+ V(InstructionsTable) \
V(ObjectPool) \
V(PcDescriptors) \
V(CodeSourceMap) \
@@ -420,7 +421,8 @@
IsOneByteStringClassId(index) || IsTwoByteStringClassId(index) ||
IsTypedDataClassId(index) || (index == kContextCid) ||
(index == kTypeArgumentsCid) || (index == kInstructionsCid) ||
- (index == kInstructionsSectionCid) || (index == kObjectPoolCid) ||
+ (index == kInstructionsSectionCid) ||
+ (index == kInstructionsTableCid) || (index == kObjectPoolCid) ||
(index == kPcDescriptorsCid) || (index == kCodeSourceMapCid) ||
(index == kCompressedStackMapsCid) ||
(index == kLocalVarDescriptorsCid) ||
diff --git a/runtime/vm/clustered_snapshot.cc b/runtime/vm/clustered_snapshot.cc
index 537bfe0..4ea44ae 100644
--- a/runtime/vm/clustered_snapshot.cc
+++ b/runtime/vm/clustered_snapshot.cc
@@ -976,8 +976,21 @@
Function::InstanceSize());
ReadFromTo(func);
+#if defined(DEBUG)
+ func->untag()->entry_point_ = 0;
+ func->untag()->unchecked_entry_point_ = 0;
+#endif
+
if (kind == Snapshot::kFullAOT) {
- func->untag()->code_ = static_cast<CodePtr>(d->ReadRef());
+ const intptr_t code_index = d->ReadUnsigned();
+ CodePtr code = static_cast<CodePtr>(d->Ref(code_index));
+ func->untag()->code_ = code;
+ if (Code::IsUnknownDartCode(code)) {
+ const uword entry_point = d->instructions_table().EntryPointAt(
+ code_index - d->code_start_index());
+ func->untag()->entry_point_ = entry_point;
+ func->untag()->unchecked_entry_point_ = entry_point;
+ }
} else if (kind == Snapshot::kFullJIT) {
NOT_IN_PRECOMPILED(func->untag()->unoptimized_code_ =
static_cast<CodePtr>(d->ReadRef()));
@@ -985,11 +998,6 @@
func->untag()->ic_data_array_ = static_cast<ArrayPtr>(d->ReadRef());
}
-#if defined(DEBUG)
- func->untag()->entry_point_ = 0;
- func->untag()->unchecked_entry_point_ = 0;
-#endif
-
#if !defined(DART_PRECOMPILED_RUNTIME)
if (kind != Snapshot::kFullAOT) {
func->untag()->token_pos_ = d->ReadTokenPosition();
@@ -1021,13 +1029,16 @@
for (intptr_t i = start_index_; i < stop_index_; i++) {
func ^= refs.At(i);
ASSERT(func.ptr()->untag()->code()->IsCode());
- uword entry_point = func.ptr()->untag()->code()->untag()->entry_point_;
- ASSERT(entry_point != 0);
- func.ptr()->untag()->entry_point_ = entry_point;
- uword unchecked_entry_point =
- func.ptr()->untag()->code()->untag()->unchecked_entry_point_;
- ASSERT(unchecked_entry_point != 0);
- func.ptr()->untag()->unchecked_entry_point_ = unchecked_entry_point;
+ if (!Code::IsUnknownDartCode(func.ptr()->untag()->code())) {
+ uword entry_point =
+ func.ptr()->untag()->code()->untag()->entry_point_;
+ ASSERT(entry_point != 0);
+ func.ptr()->untag()->entry_point_ = entry_point;
+ uword unchecked_entry_point =
+ func.ptr()->untag()->code()->untag()->unchecked_entry_point_;
+ ASSERT(unchecked_entry_point != 0);
+ func.ptr()->untag()->unchecked_entry_point_ = unchecked_entry_point;
+ }
}
} else if (d->kind() == Snapshot::kFullJIT) {
Function& func = Function::Handle(d->zone());
@@ -1743,8 +1754,7 @@
class CodeSerializationCluster : public SerializationCluster {
public:
explicit CodeSerializationCluster(Heap* heap)
- : SerializationCluster("Code", compiler::target::Code::InstanceSize()),
- array_(Array::Handle()) {}
+ : SerializationCluster("Code"), array_(Array::Handle()) {}
~CodeSerializationCluster() {}
void Trace(Serializer* s, ObjectPtr object) {
@@ -1898,14 +1908,22 @@
const intptr_t count = objects_.length();
s->WriteUnsigned(count);
for (intptr_t i = 0; i < count; i++) {
- CodePtr code = objects_[i];
- s->AssignRef(code);
+ WriteAlloc(s, objects_[i]);
}
const intptr_t deferred_count = deferred_objects_.length();
s->WriteUnsigned(deferred_count);
for (intptr_t i = 0; i < deferred_count; i++) {
- CodePtr code = deferred_objects_[i];
- s->AssignRef(code);
+ WriteAlloc(s, deferred_objects_[i]);
+ }
+ }
+
+ void WriteAlloc(Serializer* s, CodePtr code) {
+ s->AssignRef(code);
+ AutoTraceObjectName(code, MakeDisambiguatedCodeName(s, code));
+ const int32_t state_bits = code->untag()->state_bits_;
+ s->Write<int32_t>(state_bits);
+ if (!Code::DiscardedBit::decode(state_bits)) {
+ target_memory_size_ += compiler::target::Code::InstanceSize();
}
}
@@ -1951,14 +1969,6 @@
active_unchecked_offset, code, deferred);
}
- if (s->InCurrentLoadingUnit(code->untag()->compressed_stackmaps_)) {
- WriteField(code, compressed_stackmaps_);
- } else {
- WriteFieldValue(compressed_stackmaps_, CompressedStackMaps::null());
- }
-
- s->Write<int32_t>(code->untag()->state_bits_);
-
#if defined(DART_PRECOMPILER)
if (FLAG_write_v8_snapshot_profile_to != nullptr) {
// If we are writing V8 snapshot profile then attribute references going
@@ -2023,6 +2033,11 @@
WriteField(code, exception_handlers_);
WriteField(code, pc_descriptors_);
WriteField(code, catch_entry_);
+ if (s->InCurrentLoadingUnit(code->untag()->compressed_stackmaps_)) {
+ WriteField(code, compressed_stackmaps_);
+ } else {
+ WriteFieldValue(compressed_stackmaps_, CompressedStackMaps::null());
+ }
if (FLAG_precompiled_mode && FLAG_dwarf_stack_traces_mode) {
WriteFieldValue(inlined_id_to_function_, Array::null());
WriteFieldValue(code_source_map_, CodeSourceMap::null());
@@ -2097,21 +2112,33 @@
void ReadAlloc(Deserializer* d, bool stamp_canonical) {
PageSpace* old_space = d->heap()->old_space();
start_index_ = d->next_index();
+ d->set_code_start_index(start_index_);
const intptr_t count = d->ReadUnsigned();
for (intptr_t i = 0; i < count; i++) {
- auto code = AllocateUninitialized(old_space, Code::InstanceSize(0));
- d->AssignRef(code);
+ ReadAllocOneCode(d, old_space);
}
stop_index_ = d->next_index();
deferred_start_index_ = d->next_index();
const intptr_t deferred_count = d->ReadUnsigned();
for (intptr_t i = 0; i < deferred_count; i++) {
- auto code = AllocateUninitialized(old_space, Code::InstanceSize(0));
- d->AssignRef(code);
+ ReadAllocOneCode(d, old_space);
}
deferred_stop_index_ = d->next_index();
}
+ void ReadAllocOneCode(Deserializer* d, PageSpace* old_space) {
+ const int32_t state_bits = d->Read<int32_t>();
+ if (Code::DiscardedBit::decode(state_bits)) {
+ ASSERT(StubCode::HasBeenInitialized());
+ d->AssignRef(StubCode::UnknownDartCode().ptr());
+ } else {
+ auto code = static_cast<CodePtr>(
+ AllocateUninitialized(old_space, Code::InstanceSize(0)));
+ d->AssignRef(code);
+ code->untag()->state_bits_ = state_bits;
+ }
+ }
+
void ReadFill(Deserializer* d, bool stamp_canonical) {
ASSERT(!stamp_canonical); // Never canonical.
for (intptr_t id = start_index_; id < stop_index_; id++) {
@@ -2124,23 +2151,19 @@
void ReadFill(Deserializer* d, intptr_t id, bool deferred) {
auto const code = static_cast<CodePtr>(d->Ref(id));
- Deserializer::InitializeHeader(code, kCodeCid, Code::InstanceSize(0));
-
- d->ReadInstructions(code, deferred);
-
- code->untag()->compressed_stackmaps_ =
- static_cast<CompressedStackMapsPtr>(d->ReadRef());
- code->untag()->state_bits_ = d->Read<int32_t>();
#if defined(DART_PRECOMPILED_RUNTIME)
- if (Code::IsDiscarded(code)) {
- code->untag()->owner_ = Smi::New(kFunctionCid);
+ if (Code::IsUnknownDartCode(code)) {
+ d->ReadInstructions(code, deferred, /*discarded=*/true);
return;
}
-#else
- ASSERT(!Code::IsDiscarded(code));
#endif // defined(DART_PRECOMPILED_RUNTIME)
+ Deserializer::InitializeHeader(code, kCodeCid, Code::InstanceSize(0));
+ ASSERT(!Code::IsDiscarded(code));
+
+ d->ReadInstructions(code, deferred, /*discarded=*/false);
+
// There would be a single global pool if this is a full AOT snapshot
// with bare instructions.
if (!(d->kind() == Snapshot::kFullAOT && FLAG_use_bare_instructions)) {
@@ -2154,6 +2177,8 @@
code->untag()->pc_descriptors_ =
static_cast<PcDescriptorsPtr>(d->ReadRef());
code->untag()->catch_entry_ = d->ReadRef();
+ code->untag()->compressed_stackmaps_ =
+ static_cast<CompressedStackMapsPtr>(d->ReadRef());
code->untag()->inlined_id_to_function_ =
static_cast<ArrayPtr>(d->ReadRef());
code->untag()->code_source_map_ =
@@ -2178,7 +2203,7 @@
}
void PostLoad(Deserializer* d, const Array& refs, bool canonicalize) {
- d->EndInstructions(refs, start_index_, stop_index_);
+ d->EndInstructions();
#if !defined(PRODUCT)
if (!CodeObservers::AreActive() && !FLAG_support_disassembler) return;
@@ -5975,6 +6000,7 @@
ASSERT(deferred_object->IsCode());
CodePtr code = static_cast<CodePtr>(deferred_object->ptr());
ASSERT(s->RefId(code) == (start_index + i));
+ ASSERT(!Code::IsDiscarded(code));
s->WriteInstructions(code->untag()->instructions_,
code->untag()->unchecked_offset_, code, false);
if (!FLAG_use_bare_instructions) {
@@ -6030,7 +6056,8 @@
deferred_stop_index_ = deferred_start_index_ + d->ReadUnsigned();
for (intptr_t id = deferred_start_index_; id < deferred_stop_index_; id++) {
CodePtr code = static_cast<CodePtr>(d->Ref(id));
- d->ReadInstructions(code, false);
+ ASSERT(!Code::IsUnknownDartCode(code));
+ d->ReadInstructions(code, /*deferred=*/false, /*discarded=*/false);
if (code->untag()->owner_->IsHeapObject() &&
code->untag()->owner_->IsFunction()) {
FunctionPtr func = static_cast<FunctionPtr>(code->untag()->owner_);
@@ -6070,12 +6097,13 @@
if (isolate_group->dispatch_table_snapshot() != nullptr) {
ReadStream stream(isolate_group->dispatch_table_snapshot(),
isolate_group->dispatch_table_snapshot_size());
- d->ReadDispatchTable(&stream);
+ d->ReadDispatchTable(&stream, /*deferred=*/true, deferred_start_index_,
+ deferred_stop_index_);
}
}
void PostLoad(Deserializer* d, const Array& refs) {
- d->EndInstructions(refs, deferred_start_index_, deferred_stop_index_);
+ d->EndInstructions();
unit_.set_base_objects(refs);
}
@@ -6523,8 +6551,8 @@
}
#if !defined(DART_PRECOMPILED_RUNTIME)
-void Serializer::PrepareInstructions() {
- if (!Snapshot::IncludesCode(kind())) return;
+intptr_t Serializer::PrepareInstructions() {
+ if (!Snapshot::IncludesCode(kind())) return 0;
CodeSerializationCluster* cluster =
static_cast<CodeSerializationCluster*>(clusters_by_cid_[kCodeCid]);
@@ -6576,8 +6604,10 @@
GrowableArray<ImageWriterCommand> writer_commands;
RelocateCodeObjects(vm_, &code_objects, &writer_commands);
image_writer_->PrepareForSerialization(&writer_commands);
+ return code_objects.length();
}
#endif // defined(DART_PRECOMPILER) && !defined(TARGET_ARCH_IA32)
+ return 0;
}
void Serializer::WriteInstructions(InstructionsPtr instr,
@@ -6614,6 +6644,16 @@
(unchecked_offset << 1) | (Code::HasMonomorphicEntry(code) ? 0x1 : 0x0);
WriteUnsigned(payload_info);
previous_text_offset_ = offset;
+
+ if (Code::IsDiscarded(code)) {
+ // Discarded Code objects are not supported in the vm isolate snapshot.
+ ASSERT(!vm_);
+ // Stack maps of discarded Code objects are written along with
+ // instructions so they can be added to instructions table during
+ // deserialization.
+ WritePropertyRef(code->untag()->compressed_stackmaps_,
+ "compressed_stackmaps_");
+ }
return;
}
#endif
@@ -6828,8 +6868,14 @@
}
}
GrowableArray<SerializationCluster*> clusters;
+ // Code cluster should be deserialized before Function as
+ // FunctionDeserializationCluster::ReadFill uses instructions table
+ // which is filled in CodeDeserializationCluster::ReadFill.
+ if (clusters_by_cid_[kCodeCid] != nullptr) {
+ clusters.Add(clusters_by_cid_[kCodeCid]);
+ }
for (intptr_t cid = 0; cid < num_cids_; cid++) {
- if (clusters_by_cid_[cid] != nullptr) {
+ if (clusters_by_cid_[cid] != nullptr && cid != kCodeCid) {
clusters.Add(clusters_by_cid_[cid]);
}
}
@@ -6842,7 +6888,7 @@
}
#endif
- PrepareInstructions();
+ instructions_table_len_ = PrepareInstructions();
intptr_t num_objects = num_base_objects_ + num_written_objects_;
#if defined(ARCH_IS_64_BIT)
@@ -6861,6 +6907,9 @@
} else {
WriteUnsigned(0);
}
+ ASSERT((instructions_table_len_ == 0) ||
+ (FLAG_precompiled_mode && FLAG_use_bare_instructions));
+ WriteUnsigned(instructions_table_len_);
for (SerializationCluster* cluster : canonical_clusters) {
cluster->WriteAndMeasureAlloc(this);
@@ -7121,6 +7170,14 @@
clusters_by_size.Add(new (zone_) FakeSerializationCluster(
"DispatchTable", entry_count, dispatch_table_size_));
}
+ if (instructions_table_len_ > 0) {
+ const intptr_t memory_size =
+ compiler::target::InstructionsTable::InstanceSize(
+ instructions_table_len_) +
+ compiler::target::Array::InstanceSize(instructions_table_len_);
+ clusters_by_size.Add(new (zone_) FakeSerializationCluster(
+ "InstructionsTable", instructions_table_len_, 0, memory_size));
+ }
clusters_by_size.Sort(CompareClusters);
double total_size =
static_cast<double>(bytes_written() + GetDataSize() + text_size);
@@ -7158,7 +7215,8 @@
canonical_clusters_(nullptr),
clusters_(nullptr),
initial_field_table_(thread->isolate_group()->initial_field_table()),
- is_non_root_unit_(is_non_root_unit) {
+ is_non_root_unit_(is_non_root_unit),
+ instructions_table_(InstructionsTable::Handle(thread->zone())) {
if (Snapshot::IncludesCode(kind)) {
ASSERT(instructions_buffer != nullptr);
ASSERT(data_buffer != nullptr);
@@ -7308,7 +7366,10 @@
return NULL;
}
-void Deserializer::ReadDispatchTable(ReadStream* stream) {
+void Deserializer::ReadDispatchTable(ReadStream* stream,
+ bool deferred,
+ intptr_t deferred_code_start_index,
+ intptr_t deferred_code_end_index) {
#if defined(DART_PRECOMPILED_RUNTIME)
const uint8_t* table_snapshot_start = stream->AddressOfCurrentPosition();
const intptr_t length = stream->ReadUnsigned();
@@ -7323,8 +7384,16 @@
auto code = IG->object_store()->dispatch_table_null_error_stub();
ASSERT(code != Code::null());
uword null_entry = Code::EntryPointOf(code);
+ uword not_loaded_entry = StubCode::NotLoaded().EntryPoint();
- auto const table = new DispatchTable(length);
+ DispatchTable* table;
+ if (deferred) {
+ table = IG->dispatch_table();
+ ASSERT(table != nullptr && table->length() == length);
+ } else {
+ ASSERT(IG->dispatch_table() == nullptr);
+ table = new DispatchTable(length);
+ }
auto const array = table->array();
uword value = 0;
uword recent[kDispatchTableRecentCount] = {0};
@@ -7347,8 +7416,24 @@
repeat_count = encoded - 1;
} else {
intptr_t cluster_index = encoded - kDispatchTableIndexBase;
- code = Code::RawCast(Ref(first_code_id + cluster_index));
- value = Code::EntryPointOf(code);
+ if (deferred) {
+ intptr_t id = first_code_id + cluster_index;
+ if ((deferred_code_start_index <= id) &&
+ (id < deferred_code_end_index)) {
+ // Deferred instructions are at the end of the instructions table.
+ value = instructions_table().EntryPointAt(
+ instructions_table().length() - deferred_code_end_index + id);
+ } else {
+ // Reuse old value from the dispatch table.
+ value = array[i];
+ }
+ } else {
+ if (cluster_index < instructions_table().length()) {
+ value = instructions_table().EntryPointAt(cluster_index);
+ } else {
+ value = not_loaded_entry;
+ }
+ }
recent[recent_index] = value;
recent_index = (recent_index + 1) & kDispatchTableRecentMask;
}
@@ -7356,11 +7441,13 @@
}
ASSERT(repeat_count == 0);
- IG->set_dispatch_table(table);
- intptr_t table_snapshot_size =
- stream->AddressOfCurrentPosition() - table_snapshot_start;
- IG->set_dispatch_table_snapshot(table_snapshot_start);
- IG->set_dispatch_table_snapshot_size(table_snapshot_size);
+ if (!deferred) {
+ IG->set_dispatch_table(table);
+ intptr_t table_snapshot_size =
+ stream->AddressOfCurrentPosition() - table_snapshot_start;
+ IG->set_dispatch_table_snapshot(table_snapshot_start);
+ IG->set_dispatch_table_snapshot_size(table_snapshot_size);
+ }
#endif
}
@@ -7481,8 +7568,11 @@
return ApiError::New(msg, Heap::kOld);
}
-void Deserializer::ReadInstructions(CodePtr code, bool deferred) {
+void Deserializer::ReadInstructions(CodePtr code,
+ bool deferred,
+ bool discarded) {
if (deferred) {
+ ASSERT(!discarded);
#if defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_use_bare_instructions) {
uword entry_point = StubCode::NotLoaded().EntryPoint();
@@ -7508,8 +7598,6 @@
#if defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_use_bare_instructions) {
- // There are no serialized RawInstructions objects in this mode.
- code->untag()->instructions_ = Instructions::null();
previous_text_offset_ += ReadUnsigned();
const uword payload_start =
image_reader_->GetBareInstructionsAt(previous_text_offset_);
@@ -7528,11 +7616,23 @@
const uword monomorphic_entry_point =
payload_start + monomorphic_entry_offset;
- code->untag()->entry_point_ = entry_point;
- code->untag()->unchecked_entry_point_ = entry_point + unchecked_offset;
- code->untag()->monomorphic_entry_point_ = monomorphic_entry_point;
- code->untag()->monomorphic_unchecked_entry_point_ =
- monomorphic_entry_point + unchecked_offset;
+ ObjectPtr code_descriptor = code;
+ if (discarded) {
+ code_descriptor = static_cast<CompressedStackMapsPtr>(ReadRef());
+ }
+
+ instructions_table_.SetEntryAt(instructions_index_++, payload_start,
+ has_monomorphic_entrypoint, code_descriptor);
+
+ if (!discarded) {
+ // There are no serialized RawInstructions objects in this mode.
+ code->untag()->instructions_ = Instructions::null();
+ code->untag()->entry_point_ = entry_point;
+ code->untag()->unchecked_entry_point_ = entry_point + unchecked_offset;
+ code->untag()->monomorphic_entry_point_ = monomorphic_entry_point;
+ code->untag()->monomorphic_unchecked_entry_point_ =
+ monomorphic_entry_point + unchecked_offset;
+ }
return;
}
#endif
@@ -7554,38 +7654,32 @@
Code::InitializeCachedEntryPointsFrom(code, instr, unchecked_offset);
}
-void Deserializer::EndInstructions(const Array& refs,
- intptr_t start_index,
- intptr_t stop_index) {
+void Deserializer::EndInstructions() {
#if defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_use_bare_instructions) {
uword previous_end = image_reader_->GetBareInstructionsEnd();
- for (intptr_t id = stop_index - 1; id >= start_index; id--) {
- CodePtr code = static_cast<CodePtr>(refs.At(id));
- uword start = Code::PayloadStartOf(code);
+ for (intptr_t i = instructions_index_ - 1; i >= 0; --i) {
+ ObjectPtr descriptor = instructions_table_.DescriptorAt(i);
+ uword start = instructions_table_.PayloadStartAt(i);
ASSERT(start <= previous_end);
- code->untag()->instructions_length_ = previous_end - start;
+ if (descriptor->IsCode()) {
+ CodePtr code = static_cast<CodePtr>(descriptor);
+ code->untag()->instructions_length_ = previous_end - start;
+ }
previous_end = start;
}
- // Build an array of code objects representing the order in which the
- // [Code]'s instructions will be located in memory.
- const intptr_t count = stop_index - start_index;
- const Array& order_table =
- Array::Handle(zone_, Array::New(count, Heap::kOld));
- Object& code = Object::Handle(zone_);
- for (intptr_t i = 0; i < count; i++) {
- code = refs.At(start_index + i);
- order_table.SetAt(i, code);
- }
ObjectStore* object_store = IsolateGroup::Current()->object_store();
- GrowableObjectArray& order_tables =
- GrowableObjectArray::Handle(zone_, object_store->code_order_tables());
- if (order_tables.IsNull()) {
- order_tables = GrowableObjectArray::New(Heap::kOld);
- object_store->set_code_order_tables(order_tables);
+ GrowableObjectArray& tables =
+ GrowableObjectArray::Handle(zone_, object_store->instructions_tables());
+ if (tables.IsNull()) {
+ tables = GrowableObjectArray::New(Heap::kOld);
+ object_store->set_instructions_tables(tables);
}
- order_tables.Add(order_table, Heap::kOld);
+ if ((tables.Length() == 0) ||
+ (tables.At(tables.Length() - 1) != instructions_table_.ptr())) {
+ tables.Add(instructions_table_, Heap::kOld);
+ }
}
#endif
}
@@ -7616,6 +7710,7 @@
num_canonical_clusters_ = ReadUnsigned();
num_clusters_ = ReadUnsigned();
const intptr_t initial_field_table_len = ReadUnsigned();
+ const intptr_t instructions_table_len = ReadUnsigned();
canonical_clusters_ = new DeserializationCluster*[num_canonical_clusters_];
clusters_ = new DeserializationCluster*[num_clusters_];
@@ -7625,6 +7720,19 @@
ASSERT_EQUAL(initial_field_table_->NumFieldIds(), initial_field_table_len);
}
+#if defined(DART_PRECOMPILED_RUNTIME)
+ if (instructions_table_len > 0) {
+ ASSERT(FLAG_precompiled_mode && FLAG_use_bare_instructions);
+ const uword start_pc = image_reader_->GetBareInstructionsAt(0);
+ const uword end_pc = image_reader_->GetBareInstructionsEnd();
+ instructions_table_ =
+ InstructionsTable::New(instructions_table_len, start_pc, end_pc);
+ }
+#else
+ ASSERT(instructions_table_len == 0);
+ USE(instructions_table_len);
+#endif // defined(DART_PRECOMPILED_RUNTIME)
+
bool primary;
{
// The deserializer initializes objects without using the write barrier,
diff --git a/runtime/vm/clustered_snapshot.h b/runtime/vm/clustered_snapshot.h
index 79ef250..cbb34de 100644
--- a/runtime/vm/clustered_snapshot.h
+++ b/runtime/vm/clustered_snapshot.h
@@ -373,7 +373,10 @@
Write<int32_t>(cid);
}
- void PrepareInstructions();
+ // Sorts Code objects and reorders instructions before writing snapshot.
+ // Returns length of instructions table (in bare instructions mode).
+ intptr_t PrepareInstructions();
+
void WriteInstructions(InstructionsPtr instr,
uint32_t unchecked_offset,
CodePtr code,
@@ -460,6 +463,7 @@
intptr_t dispatch_table_size_ = 0;
intptr_t bytes_heap_allocated_ = 0;
+ intptr_t instructions_table_len_ = 0;
// True if writing VM snapshot, false for Isolate snapshot.
bool vm_;
@@ -652,18 +656,21 @@
return Read<int32_t>();
}
- void ReadInstructions(CodePtr code, bool deferred);
- void EndInstructions(const Array& refs,
- intptr_t start_index,
- intptr_t stop_index);
+ void ReadInstructions(CodePtr code, bool deferred, bool discarded);
+ void EndInstructions();
ObjectPtr GetObjectAt(uint32_t offset) const;
void Deserialize(DeserializationRoots* roots);
DeserializationCluster* ReadCluster();
- void ReadDispatchTable() { ReadDispatchTable(&stream_); }
- void ReadDispatchTable(ReadStream* stream);
+ void ReadDispatchTable() {
+ ReadDispatchTable(&stream_, /*deferred=*/false, -1, -1);
+ }
+ void ReadDispatchTable(ReadStream* stream,
+ bool deferred,
+ intptr_t deferred_code_start_index,
+ intptr_t deferred_code_end_index);
intptr_t next_index() const { return next_ref_index_; }
Heap* heap() const { return heap_; }
@@ -671,6 +678,11 @@
Snapshot::Kind kind() const { return kind_; }
FieldTable* initial_field_table() const { return initial_field_table_; }
bool is_non_root_unit() const { return is_non_root_unit_; }
+ void set_code_start_index(intptr_t value) { code_start_index_ = value; }
+ intptr_t code_start_index() { return code_start_index_; }
+ const InstructionsTable& instructions_table() const {
+ return instructions_table_;
+ }
private:
Heap* heap_;
@@ -685,10 +697,13 @@
ArrayPtr refs_;
intptr_t next_ref_index_;
intptr_t previous_text_offset_;
+ intptr_t code_start_index_ = 0;
+ intptr_t instructions_index_ = 0;
DeserializationCluster** canonical_clusters_;
DeserializationCluster** clusters_;
FieldTable* initial_field_table_;
const bool is_non_root_unit_;
+ InstructionsTable& instructions_table_;
};
#define ReadFromTo(obj, ...) d->ReadFromTo(obj, ##__VA_ARGS__);
diff --git a/runtime/vm/compiler/aot/precompiler.cc b/runtime/vm/compiler/aot/precompiler.cc
index 8946b68..8dacf84 100644
--- a/runtime/vm/compiler/aot/precompiler.cc
+++ b/runtime/vm/compiler/aot/precompiler.cc
@@ -1581,6 +1581,7 @@
Function::CreateDynamicInvocationForwarderName(selector);
function2 = function.GetDynamicInvocationForwarder(selector2);
AddFunction(function2, RetainReasons::kDynamicInvocationForwarder);
+ functions_called_dynamically_.Insert(function2);
}
} else if (function.kind() == UntaggedFunction::kRegularFunction) {
selector2 = Field::LookupGetterSymbol(selector);
@@ -1630,12 +1631,14 @@
function2 = function.GetDynamicInvocationForwarder(selector2);
AddFunction(function2,
RetainReasons::kDynamicInvocationForwarder);
+ functions_called_dynamically_.Insert(function2);
}
} else {
if (metadata.method_or_setter_called_dynamically) {
function2 = function.GetDynamicInvocationForwarder(selector2);
AddFunction(function2,
RetainReasons::kDynamicInvocationForwarder);
+ functions_called_dynamically_.Insert(function2);
}
}
}
@@ -2671,6 +2674,9 @@
// Traverse program structure and mark Code objects
// which do not have useful information as discarded.
+// Should be called after Precompiler::ReplaceFunctionStaticCallEntries().
+// Should be called before ProgramVisitor::Dedup() as Dedup may clear
+// static calls target table.
void Precompiler::DiscardCodeObjects() {
class DiscardCodeVisitor : public CodeVisitor {
public:
@@ -2680,15 +2686,45 @@
const FunctionSet& functions_called_dynamically)
: zone_(zone),
function_(Function::Handle(zone)),
+ class_(Class::Handle(zone)),
+ library_(Library::Handle(zone)),
+ loading_unit_(LoadingUnit::Handle(zone)),
+ static_calls_target_table_(Array::Handle(zone)),
+ kind_and_offset_(Smi::Handle(zone)),
+ call_target_(Code::Handle(zone)),
+ targets_of_calls_via_code_(
+ GrowableObjectArray::Handle(zone, GrowableObjectArray::New())),
functions_to_retain_(functions_to_retain),
entry_point_functions_(entry_point_functions),
functions_called_dynamically_(functions_called_dynamically) {}
+ // Certain static calls (e.g. between different loading units) are
+ // performed through Code objects indirectly. Such Code objects
+ // cannot be fully discarded.
+ void RecordCodeObjectsUsedForCalls(const Code& code) {
+ static_calls_target_table_ = code.static_calls_target_table();
+ if (static_calls_target_table_.IsNull()) return;
+
+ StaticCallsTable static_calls(static_calls_target_table_);
+ for (const auto& view : static_calls) {
+ kind_and_offset_ = view.Get<Code::kSCallTableKindAndOffset>();
+ auto const kind = Code::KindField::decode(kind_and_offset_.Value());
+ if (kind == Code::kCallViaCode) {
+ call_target_ =
+ Code::RawCast(view.Get<Code::kSCallTableCodeOrTypeTarget>());
+ ASSERT(!call_target_.IsNull());
+ targets_of_calls_via_code_.Add(call_target_);
+ }
+ }
+ }
+
void VisitCode(const Code& code) override {
++total_code_objects_;
+ RecordCodeObjectsUsedForCalls(code);
+
// Only discard Code objects corresponding to Dart functions.
- if (!code.IsFunctionCode()) {
+ if (!code.IsFunctionCode() || code.IsUnknownDartCode()) {
++non_function_codes_;
return;
}
@@ -2740,10 +2776,39 @@
ASSERT(!functions_called_dynamically_.ContainsKey(function_));
}
+ // Retain Code objects in the non-root loading unit as
+ // they are allocated while loading root unit but filled
+ // while loading another unit.
+ class_ = function_.Owner();
+ library_ = class_.library();
+ loading_unit_ = library_.loading_unit();
+ if (loading_unit_.id() != LoadingUnit::kRootId) {
+ ++codes_with_deferred_function_;
+ return;
+ }
+
+ // Retain Code objects corresponding to FFI trampolines.
+ if (function_.IsFfiTrampoline()) {
+ ++codes_with_ffi_trampoline_function_;
+ return;
+ }
+
code.set_is_discarded(true);
++discarded_codes_;
}
+ void RetainCodeObjectsUsedAsCallTargets() {
+ for (intptr_t i = 0, n = targets_of_calls_via_code_.Length(); i < n;
+ ++i) {
+ call_target_ = Code::RawCast(targets_of_calls_via_code_.At(i));
+ if (call_target_.is_discarded()) {
+ call_target_.set_is_discarded(false);
+ ++codes_used_as_call_targets_;
+ --discarded_codes_;
+ }
+ }
+ }
+
void PrintStatistics() const {
THR_Print("Discarding Code objects:\n");
THR_Print(" %8" Pd " non-function Codes\n", non_function_codes_);
@@ -2757,10 +2822,16 @@
codes_with_native_function_);
THR_Print(" %8" Pd " Codes with async closure functions\n",
codes_with_async_closure_function_);
- THR_Print(" %8" Pd " Codes with dynamically called functions\n",
- codes_with_dynamically_called_function_);
THR_Print(" %8" Pd " Codes with entry point functions\n",
codes_with_entry_point_function_);
+ THR_Print(" %8" Pd " Codes with dynamically called functions\n",
+ codes_with_dynamically_called_function_);
+ THR_Print(" %8" Pd " Codes with deferred functions\n",
+ codes_with_deferred_function_);
+ THR_Print(" %8" Pd " Codes with ffi trampoline functions\n",
+ codes_with_ffi_trampoline_function_);
+ THR_Print(" %8" Pd " Codes used as call targets\n",
+ codes_used_as_call_targets_);
THR_Print(" %8" Pd " Codes discarded\n", discarded_codes_);
THR_Print(" %8" Pd " Codes total\n", total_code_objects_);
}
@@ -2768,6 +2839,13 @@
private:
Zone* zone_;
Function& function_;
+ Class& class_;
+ Library& library_;
+ LoadingUnit& loading_unit_;
+ Array& static_calls_target_table_;
+ Smi& kind_and_offset_;
+ Code& call_target_;
+ GrowableObjectArray& targets_of_calls_via_code_;
const FunctionSet& functions_to_retain_;
const FunctionSet& entry_point_functions_;
const FunctionSet& functions_called_dynamically_;
@@ -2780,8 +2858,11 @@
intptr_t codes_with_invisible_function_ = 0;
intptr_t codes_with_native_function_ = 0;
intptr_t codes_with_async_closure_function_ = 0;
- intptr_t codes_with_dynamically_called_function_ = 0;
intptr_t codes_with_entry_point_function_ = 0;
+ intptr_t codes_with_dynamically_called_function_ = 0;
+ intptr_t codes_with_deferred_function_ = 0;
+ intptr_t codes_with_ffi_trampoline_function_ = 0;
+ intptr_t codes_used_as_call_targets_ = 0;
intptr_t discarded_codes_ = 0;
};
@@ -2796,6 +2877,7 @@
DiscardCodeVisitor visitor(Z, functions_to_retain_, entry_point_functions_,
functions_called_dynamically_);
ProgramVisitor::WalkProgram(Z, IG, &visitor);
+ visitor.RetainCodeObjectsUsedAsCallTargets();
if (FLAG_trace_precompiler) {
visitor.PrintStatistics();
diff --git a/runtime/vm/compiler/runtime_api.cc b/runtime/vm/compiler/runtime_api.cc
index 4db6350..0db035c 100644
--- a/runtime/vm/compiler/runtime_api.cc
+++ b/runtime/vm/compiler/runtime_api.cc
@@ -1151,6 +1151,16 @@
return -kWordSize;
}
+word InstructionsTable::InstanceSize(intptr_t length) {
+ return RoundedAllocationSize(InstructionsTable::InstanceSize() +
+ length *
+ dart::InstructionsTable::kBytesPerElement);
+}
+
+word InstructionsTable::NextFieldOffset() {
+ return -kWordSize;
+}
+
word Instructions::NextFieldOffset() {
return -kWordSize;
}
diff --git a/runtime/vm/compiler/runtime_api.h b/runtime/vm/compiler/runtime_api.h
index 43ec84a..de71523 100644
--- a/runtime/vm/compiler/runtime_api.h
+++ b/runtime/vm/compiler/runtime_api.h
@@ -1193,6 +1193,13 @@
static word NextFieldOffset();
};
+class InstructionsTable : public AllStatic {
+ public:
+ static word InstanceSize(intptr_t length);
+ static word InstanceSize();
+ static word NextFieldOffset();
+};
+
class Instructions : public AllStatic {
public:
static const word kMonomorphicEntryOffsetJIT;
diff --git a/runtime/vm/compiler/runtime_offsets_extracted.h b/runtime/vm/compiler/runtime_offsets_extracted.h
index 0f37245..5af8a8b 100644
--- a/runtime/vm/compiler/runtime_offsets_extracted.h
+++ b/runtime/vm/compiler/runtime_offsets_extracted.h
@@ -493,6 +493,8 @@
8;
static constexpr dart::compiler::target::word
InstructionsSection_UnalignedHeaderSize = 20;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+ 20;
static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word Integer_InstanceSize = 4;
static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -1033,6 +1035,8 @@
16;
static constexpr dart::compiler::target::word
InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+ 40;
static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -1563,6 +1567,8 @@
8;
static constexpr dart::compiler::target::word
InstructionsSection_UnalignedHeaderSize = 20;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+ 20;
static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word Integer_InstanceSize = 4;
static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -2104,6 +2110,8 @@
16;
static constexpr dart::compiler::target::word
InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+ 40;
static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -2643,6 +2651,8 @@
16;
static constexpr dart::compiler::target::word
InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+ 40;
static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -3183,6 +3193,8 @@
16;
static constexpr dart::compiler::target::word
InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+ 40;
static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -3712,6 +3724,8 @@
8;
static constexpr dart::compiler::target::word
InstructionsSection_UnalignedHeaderSize = 20;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+ 20;
static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word Integer_InstanceSize = 4;
static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -4246,6 +4260,8 @@
16;
static constexpr dart::compiler::target::word
InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+ 40;
static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -4770,6 +4786,8 @@
8;
static constexpr dart::compiler::target::word
InstructionsSection_UnalignedHeaderSize = 20;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+ 20;
static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word Integer_InstanceSize = 4;
static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -5305,6 +5323,8 @@
16;
static constexpr dart::compiler::target::word
InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+ 40;
static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -5838,6 +5858,8 @@
16;
static constexpr dart::compiler::target::word
InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+ 40;
static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -6372,6 +6394,8 @@
16;
static constexpr dart::compiler::target::word
InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word InstructionsTable_InstanceSize =
+ 40;
static constexpr dart::compiler::target::word Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word KernelProgramInfo_InstanceSize =
@@ -6957,6 +6981,8 @@
AOT_Instructions_UnalignedHeaderSize = 8;
static constexpr dart::compiler::target::word
AOT_InstructionsSection_UnalignedHeaderSize = 20;
+static constexpr dart::compiler::target::word
+ AOT_InstructionsTable_InstanceSize = 20;
static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 4;
static constexpr dart::compiler::target::word
@@ -7555,6 +7581,8 @@
AOT_Instructions_UnalignedHeaderSize = 16;
static constexpr dart::compiler::target::word
AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+ AOT_InstructionsTable_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word
@@ -8157,6 +8185,8 @@
AOT_Instructions_UnalignedHeaderSize = 16;
static constexpr dart::compiler::target::word
AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+ AOT_InstructionsTable_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word
@@ -8755,6 +8785,8 @@
AOT_Instructions_UnalignedHeaderSize = 16;
static constexpr dart::compiler::target::word
AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+ AOT_InstructionsTable_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word
@@ -9354,6 +9386,8 @@
AOT_Instructions_UnalignedHeaderSize = 16;
static constexpr dart::compiler::target::word
AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+ AOT_InstructionsTable_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word
@@ -9943,6 +9977,8 @@
AOT_Instructions_UnalignedHeaderSize = 8;
static constexpr dart::compiler::target::word
AOT_InstructionsSection_UnalignedHeaderSize = 20;
+static constexpr dart::compiler::target::word
+ AOT_InstructionsTable_InstanceSize = 20;
static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 4;
static constexpr dart::compiler::target::word
@@ -10534,6 +10570,8 @@
AOT_Instructions_UnalignedHeaderSize = 16;
static constexpr dart::compiler::target::word
AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+ AOT_InstructionsTable_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word
@@ -11129,6 +11167,8 @@
AOT_Instructions_UnalignedHeaderSize = 16;
static constexpr dart::compiler::target::word
AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+ AOT_InstructionsTable_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word
@@ -11720,6 +11760,8 @@
AOT_Instructions_UnalignedHeaderSize = 16;
static constexpr dart::compiler::target::word
AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+ AOT_InstructionsTable_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word
@@ -12312,6 +12354,8 @@
AOT_Instructions_UnalignedHeaderSize = 16;
static constexpr dart::compiler::target::word
AOT_InstructionsSection_UnalignedHeaderSize = 40;
+static constexpr dart::compiler::target::word
+ AOT_InstructionsTable_InstanceSize = 40;
static constexpr dart::compiler::target::word AOT_Int32x4_InstanceSize = 24;
static constexpr dart::compiler::target::word AOT_Integer_InstanceSize = 8;
static constexpr dart::compiler::target::word
diff --git a/runtime/vm/compiler/runtime_offsets_list.h b/runtime/vm/compiler/runtime_offsets_list.h
index d2dac49..aa2e4a2 100644
--- a/runtime/vm/compiler/runtime_offsets_list.h
+++ b/runtime/vm/compiler/runtime_offsets_list.h
@@ -340,6 +340,7 @@
SIZEOF(Instructions, UnalignedHeaderSize, UntaggedInstructions) \
SIZEOF(InstructionsSection, UnalignedHeaderSize, \
UntaggedInstructionsSection) \
+ SIZEOF(InstructionsTable, InstanceSize, UntaggedInstructionsTable) \
SIZEOF(Int32x4, InstanceSize, UntaggedInt32x4) \
SIZEOF(Integer, InstanceSize, UntaggedInteger) \
SIZEOF(KernelProgramInfo, InstanceSize, UntaggedKernelProgramInfo) \
diff --git a/runtime/vm/compiler/stub_code_compiler.cc b/runtime/vm/compiler/stub_code_compiler.cc
index bbcdc8c..7b7b39a 100644
--- a/runtime/vm/compiler/stub_code_compiler.cc
+++ b/runtime/vm/compiler/stub_code_compiler.cc
@@ -888,6 +888,8 @@
}
void StubCodeCompiler::GenerateUnknownDartCodeStub(Assembler* assembler) {
+ // Enter frame to include caller into the backtrace.
+ __ EnterStubFrame();
__ Breakpoint(); // Marker stub.
}
diff --git a/runtime/vm/compiler/stub_code_compiler_arm.cc b/runtime/vm/compiler/stub_code_compiler_arm.cc
index 515e680..d4219d1 100644
--- a/runtime/vm/compiler/stub_code_compiler_arm.cc
+++ b/runtime/vm/compiler/stub_code_compiler_arm.cc
@@ -1168,7 +1168,7 @@
// Called when invoking Dart code from C++ (VM code).
// Input parameters:
// LR : points to return address.
-// R0 : code object of the Dart function to call.
+// R0 : target code or entry point (in bare instructions mode).
// R1 : arguments descriptor array.
// R2 : arguments array.
// R3 : current thread.
@@ -1260,11 +1260,12 @@
// Call the Dart code entrypoint.
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
__ SetupGlobalPoolAndDispatchTable();
+ __ LoadImmediate(CODE_REG, 0); // GC safe value into CODE_REG.
} else {
__ LoadImmediate(PP, 0); // GC safe value into PP.
+ __ ldr(CODE_REG, Address(R0, target::VMHandles::kOffsetOfRawPtrInHandle));
+ __ ldr(R0, FieldAddress(CODE_REG, target::Code::entry_point_offset()));
}
- __ ldr(CODE_REG, Address(R0, target::VMHandles::kOffsetOfRawPtrInHandle));
- __ ldr(R0, FieldAddress(CODE_REG, target::Code::entry_point_offset()));
__ blx(R0); // R4 is the arguments descriptor array.
// Get rid of arguments pushed on the stack.
diff --git a/runtime/vm/compiler/stub_code_compiler_arm64.cc b/runtime/vm/compiler/stub_code_compiler_arm64.cc
index bc5eb6a..11062c9 100644
--- a/runtime/vm/compiler/stub_code_compiler_arm64.cc
+++ b/runtime/vm/compiler/stub_code_compiler_arm64.cc
@@ -1304,7 +1304,7 @@
// Called when invoking Dart code from C++ (VM code).
// Input parameters:
// LR : points to return address.
-// R0 : code object of the Dart function to call.
+// R0 : target code or entry point (in bare instructions mode).
// R1 : arguments descriptor array.
// R2 : arguments array.
// R3 : current thread.
@@ -1404,16 +1404,17 @@
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
__ SetupGlobalPoolAndDispatchTable();
+ __ mov(CODE_REG, ZR); // GC-safe value into CODE_REG.
} else {
// We now load the pool pointer(PP) with a GC safe value as we are about to
// invoke dart code. We don't need a real object pool here.
// Smi zero does not work because ARM64 assumes PP to be untagged.
__ LoadObject(PP, NullObject());
+ __ ldr(CODE_REG, Address(R0, VMHandles::kOffsetOfRawPtrInHandle));
+ __ ldr(R0, FieldAddress(CODE_REG, target::Code::entry_point_offset()));
}
// Call the Dart code entrypoint.
- __ ldr(CODE_REG, Address(R0, VMHandles::kOffsetOfRawPtrInHandle));
- __ ldr(R0, FieldAddress(CODE_REG, target::Code::entry_point_offset()));
__ blr(R0); // R4 is the arguments descriptor array.
__ Comment("InvokeDartCodeStub return");
diff --git a/runtime/vm/compiler/stub_code_compiler_x64.cc b/runtime/vm/compiler/stub_code_compiler_x64.cc
index 8d2987a..daf1223 100644
--- a/runtime/vm/compiler/stub_code_compiler_x64.cc
+++ b/runtime/vm/compiler/stub_code_compiler_x64.cc
@@ -1231,7 +1231,7 @@
// Called when invoking Dart code from C++ (VM code).
// Input parameters:
// RSP : points to return address.
-// RDI : target code
+// RDI : target code or entry point (in bare instructions mode).
// RSI : arguments descriptor array.
// RDX : arguments array.
// RCX : current thread.
@@ -1239,7 +1239,7 @@
__ pushq(Address(RSP, 0)); // Marker for the profiler.
__ EnterFrame(0);
- const Register kTargetCodeReg = CallingConventions::kArg1Reg;
+ const Register kTargetReg = CallingConventions::kArg1Reg;
const Register kArgDescReg = CallingConventions::kArg2Reg;
const Register kArgsReg = CallingConventions::kArg3Reg;
const Register kThreadReg = CallingConventions::kArg4Reg;
@@ -1304,8 +1304,8 @@
// Load arguments descriptor array into R10, which is passed to Dart code.
__ movq(R10, Address(kArgDescReg, VMHandles::kOffsetOfRawPtrInHandle));
- // Push arguments. At this point we only need to preserve kTargetCodeReg.
- ASSERT(kTargetCodeReg != RDX);
+ // Push arguments. At this point we only need to preserve kTargetReg.
+ ASSERT(kTargetReg != RDX);
// Load number of arguments into RBX and adjust count for type arguments.
__ movq(RBX, FieldAddress(R10, target::ArgumentsDescriptor::count_offset()));
@@ -1339,14 +1339,14 @@
// Call the Dart code entrypoint.
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
__ movq(PP, Address(THR, target::Thread::global_object_pool_offset()));
+ __ xorq(CODE_REG, CODE_REG); // GC-safe value into CODE_REG.
} else {
__ xorq(PP, PP); // GC-safe value into PP.
+ __ movq(CODE_REG, Address(kTargetReg, VMHandles::kOffsetOfRawPtrInHandle));
+ __ movq(kTargetReg,
+ FieldAddress(CODE_REG, target::Code::entry_point_offset()));
}
- __ movq(CODE_REG,
- Address(kTargetCodeReg, VMHandles::kOffsetOfRawPtrInHandle));
- __ movq(kTargetCodeReg,
- FieldAddress(CODE_REG, target::Code::entry_point_offset()));
- __ call(kTargetCodeReg); // R10 is the arguments descriptor array.
+ __ call(kTargetReg); // R10 is the arguments descriptor array.
// Read the saved number of passed arguments as Smi.
__ movq(RDX, Address(RBP, kArgumentsDescOffset));
diff --git a/runtime/vm/dart_entry.cc b/runtime/vm/dart_entry.cc
index 058d039..0b5b142 100644
--- a/runtime/vm/dart_entry.cc
+++ b/runtime/vm/dart_entry.cc
@@ -144,7 +144,8 @@
// Now Call the invoke stub which will invoke the dart function.
const Code& code = Code::Handle(zone, function.CurrentCode());
- return InvokeCode(code, arguments_descriptor, arguments, thread);
+ return InvokeCode(code, function.entry_point(), arguments_descriptor,
+ arguments, thread);
}
extern "C" {
@@ -155,10 +156,16 @@
const Array& arguments_descriptor,
const Array& arguments,
Thread* thread);
+typedef uword /*ObjectPtr*/ (*invokestub_bare_instructions)(
+ uword entry_point,
+ const Array& arguments_descriptor,
+ const Array& arguments,
+ Thread* thread);
}
NO_SANITIZE_SAFE_STACK
ObjectPtr DartEntry::InvokeCode(const Code& code,
+ uword entry_point,
const Array& arguments_descriptor,
const Array& arguments,
Thread* thread) {
@@ -166,19 +173,27 @@
ASSERT(thread->no_callback_scope_depth() == 0);
ASSERT(!IsolateGroup::Current()->null_safety_not_set());
- invokestub entrypoint =
- reinterpret_cast<invokestub>(StubCode::InvokeDartCode().EntryPoint());
+ const uword stub = StubCode::InvokeDartCode().EntryPoint();
SuspendLongJumpScope suspend_long_jump_scope(thread);
TransitionToGenerated transition(thread);
#if defined(USING_SIMULATOR)
return bit_copy<ObjectPtr, int64_t>(Simulator::Current()->Call(
- reinterpret_cast<intptr_t>(entrypoint), reinterpret_cast<intptr_t>(&code),
+ static_cast<intptr_t>(stub),
+ ((FLAG_precompiled_mode && FLAG_use_bare_instructions)
+ ? static_cast<intptr_t>(entry_point)
+ : reinterpret_cast<intptr_t>(&code)),
reinterpret_cast<intptr_t>(&arguments_descriptor),
reinterpret_cast<intptr_t>(&arguments),
reinterpret_cast<intptr_t>(thread)));
#else
- return static_cast<ObjectPtr>(
- entrypoint(code, arguments_descriptor, arguments, thread));
+ if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
+ return static_cast<ObjectPtr>(
+ (reinterpret_cast<invokestub_bare_instructions>(stub))(
+ entry_point, arguments_descriptor, arguments, thread));
+ } else {
+ return static_cast<ObjectPtr>((reinterpret_cast<invokestub>(stub))(
+ code, arguments_descriptor, arguments, thread));
+ }
#endif
}
diff --git a/runtime/vm/dart_entry.h b/runtime/vm/dart_entry.h
index 2a33464..d91d838 100644
--- a/runtime/vm/dart_entry.h
+++ b/runtime/vm/dart_entry.h
@@ -194,6 +194,7 @@
// Invokes the specified code as if it was a Dart function.
// On success, returns an InstancePtr. On failure, an ErrorPtr.
static ObjectPtr InvokeCode(const Code& code,
+ uword entry_point,
const Array& arguments_descriptor,
const Array& arguments,
Thread* thread);
diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc
index 6eff1b4..ec54a16 100644
--- a/runtime/vm/object.cc
+++ b/runtime/vm/object.cc
@@ -156,6 +156,7 @@
ClassPtr Object::code_class_ = static_cast<ClassPtr>(RAW_NULL);
ClassPtr Object::instructions_class_ = static_cast<ClassPtr>(RAW_NULL);
ClassPtr Object::instructions_section_class_ = static_cast<ClassPtr>(RAW_NULL);
+ClassPtr Object::instructions_table_class_ = static_cast<ClassPtr>(RAW_NULL);
ClassPtr Object::object_pool_class_ = static_cast<ClassPtr>(RAW_NULL);
ClassPtr Object::pc_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
ClassPtr Object::code_source_map_class_ = static_cast<ClassPtr>(RAW_NULL);
@@ -824,6 +825,9 @@
Class::New<InstructionsSection, RTN::InstructionsSection>(isolate_group);
instructions_section_class_ = cls.ptr();
+ cls = Class::New<InstructionsTable, RTN::InstructionsTable>(isolate_group);
+ instructions_table_class_ = cls.ptr();
+
cls = Class::New<ObjectPool, RTN::ObjectPool>(isolate_group);
object_pool_class_ = cls.ptr();
@@ -1245,6 +1249,7 @@
code_class_ = static_cast<ClassPtr>(RAW_NULL);
instructions_class_ = static_cast<ClassPtr>(RAW_NULL);
instructions_section_class_ = static_cast<ClassPtr>(RAW_NULL);
+ instructions_table_class_ = static_cast<ClassPtr>(RAW_NULL);
object_pool_class_ = static_cast<ClassPtr>(RAW_NULL);
pc_descriptors_class_ = static_cast<ClassPtr>(RAW_NULL);
code_source_map_class_ = static_cast<ClassPtr>(RAW_NULL);
@@ -1345,6 +1350,7 @@
SET_CLASS_NAME(code, Code);
SET_CLASS_NAME(instructions, Instructions);
SET_CLASS_NAME(instructions_section, InstructionsSection);
+ SET_CLASS_NAME(instructions_table, InstructionsTable);
SET_CLASS_NAME(object_pool, ObjectPool);
SET_CLASS_NAME(code_source_map, CodeSourceMap);
SET_CLASS_NAME(pc_descriptors, PcDescriptors);
@@ -4869,6 +4875,8 @@
return Symbols::Instructions().ToCString();
case kInstructionsSectionCid:
return Symbols::InstructionsSection().ToCString();
+ case kInstructionsTableCid:
+ return Symbols::InstructionsTable().ToCString();
case kObjectPoolCid:
return Symbols::ObjectPool().ToCString();
case kCodeSourceMapCid:
@@ -9757,7 +9765,10 @@
kernel::ProcedureAttributesMetadata metadata;
metadata = kernel::ProcedureAttributesOf(*this, zone);
if (IsGetterFunction() || IsImplicitGetterFunction() || IsMethodExtractor()) {
- return metadata.getter_called_dynamically;
+ // Dynamic method call through field/getter involves dynamic call of
+ // the field/getter.
+ return metadata.getter_called_dynamically ||
+ metadata.method_or_setter_called_dynamically;
} else {
return metadata.method_or_setter_called_dynamically;
}
@@ -14149,6 +14160,125 @@
return "InstructionsSection";
}
+void InstructionsTable::set_length(intptr_t value) const {
+ StoreNonPointer(&untag()->length_, value);
+}
+
+void InstructionsTable::set_start_pc(uword value) const {
+ StoreNonPointer(&untag()->start_pc_, value);
+}
+
+void InstructionsTable::set_end_pc(uword value) const {
+ StoreNonPointer(&untag()->end_pc_, value);
+}
+
+void InstructionsTable::set_descriptors(const Array& value) const {
+ untag()->set_descriptors(value.ptr());
+}
+
+InstructionsTablePtr InstructionsTable::New(intptr_t length,
+ uword start_pc,
+ uword end_pc) {
+ ASSERT(Object::instructions_table_class() != Class::null());
+ ASSERT(length >= 0);
+ ASSERT(start_pc <= end_pc);
+ ASSERT(Utils::IsAligned(start_pc, kPayloadAlignment));
+ Thread* thread = Thread::Current();
+ InstructionsTable& result = InstructionsTable::Handle(thread->zone());
+ {
+ uword size = InstructionsTable::InstanceSize(length);
+ ObjectPtr raw = Object::Allocate(InstructionsTable::kClassId, size,
+ Heap::kOld, /*compressed*/ false);
+ NoSafepointScope no_safepoint;
+ result ^= raw;
+ result.set_length(length);
+ }
+ const Array& descriptors =
+ (length == 0) ? Object::empty_array()
+ : Array::Handle(Array::New(length, Heap::kOld));
+ result.set_descriptors(descriptors);
+ result.set_start_pc(start_pc);
+ result.set_end_pc(end_pc);
+ return result.ptr();
+}
+
+void InstructionsTable::SetEntryAt(intptr_t index,
+ uword payload_start,
+ bool has_monomorphic_entrypoint,
+ ObjectPtr descriptor) const {
+ ASSERT((0 <= index) && (index < length()));
+ ASSERT(ContainsPc(payload_start));
+ ASSERT(Utils::IsAligned(payload_start, kPayloadAlignment));
+
+ const uint32_t pc_offset = ConvertPcToOffset(payload_start);
+ ASSERT((index == 0) || (PcOffsetAt(index - 1) <= pc_offset));
+ ASSERT((pc_offset & kHasMonomorphicEntrypointFlag) == 0);
+
+ untag()->data()[index] =
+ pc_offset |
+ (has_monomorphic_entrypoint ? kHasMonomorphicEntrypointFlag : 0);
+ descriptors()->untag()->set_element(index, descriptor);
+}
+
+bool InstructionsTable::ContainsPc(InstructionsTablePtr table, uword pc) {
+ return (InstructionsTable::start_pc(table) <= pc) &&
+ (pc < InstructionsTable::end_pc(table));
+}
+
+uint32_t InstructionsTable::ConvertPcToOffset(InstructionsTablePtr table,
+ uword pc) {
+ ASSERT(InstructionsTable::ContainsPc(table, pc));
+ const uint32_t pc_offset =
+ static_cast<uint32_t>(pc - InstructionsTable::start_pc(table));
+ ASSERT(InstructionsTable::start_pc(table) + pc_offset == pc); // No overflow.
+ return pc_offset;
+}
+
+intptr_t InstructionsTable::FindEntry(InstructionsTablePtr table, uword pc) {
+ // This can run in the middle of GC and must not allocate handles.
+ NoSafepointScope no_safepoint;
+ if (!InstructionsTable::ContainsPc(table, pc)) return -1;
+ const uint32_t pc_offset = InstructionsTable::ConvertPcToOffset(table, pc);
+ intptr_t lo = 0;
+ intptr_t hi = InstructionsTable::length(table) - 1;
+ while (lo <= hi) {
+ intptr_t mid = (hi - lo + 1) / 2 + lo;
+ ASSERT(mid >= lo);
+ ASSERT(mid <= hi);
+ if (pc_offset < InstructionsTable::PcOffsetAt(table, mid)) {
+ hi = mid - 1;
+ } else if ((mid != hi) &&
+ (pc_offset >= InstructionsTable::PcOffsetAt(table, mid + 1))) {
+ lo = mid + 1;
+ } else {
+ return mid;
+ }
+ }
+ return -1;
+}
+
+ObjectPtr InstructionsTable::DescriptorAt(InstructionsTablePtr table,
+ intptr_t index) {
+ ASSERT((0 <= index) && (index < InstructionsTable::length(table)));
+ return table->untag()->descriptors()->untag()->element(index);
+}
+
+uword InstructionsTable::PayloadStartAt(InstructionsTablePtr table,
+ intptr_t index) {
+ return InstructionsTable::start_pc(table) +
+ InstructionsTable::PcOffsetAt(table, index);
+}
+
+uword InstructionsTable::EntryPointAt(intptr_t index) const {
+ return PayloadStartAt(index) + (HasMonomorphicEntryPointAt(index)
+ ? Instructions::kPolymorphicEntryOffsetAOT
+ : 0);
+}
+
+const char* InstructionsTable::ToCString() const {
+ return "InstructionsTable";
+}
+
ObjectPoolPtr ObjectPool::New(intptr_t len) {
ASSERT(Object::object_pool_class() != Class::null());
if (len < 0 || len > kMaxElements) {
@@ -16979,7 +17109,8 @@
}
bool Code::IsUnknownDartCode(CodePtr code) {
- return code == StubCode::UnknownDartCode().ptr();
+ return StubCode::HasBeenInitialized() &&
+ (code == StubCode::UnknownDartCode().ptr());
}
void Code::DisableDartCode() const {
diff --git a/runtime/vm/object.h b/runtime/vm/object.h
index 4f4fb6c..8c000c9 100644
--- a/runtime/vm/object.h
+++ b/runtime/vm/object.h
@@ -479,6 +479,9 @@
static ClassPtr instructions_section_class() {
return instructions_section_class_;
}
+ static ClassPtr instructions_table_class() {
+ return instructions_table_class_;
+ }
static ClassPtr object_pool_class() { return object_pool_class_; }
static ClassPtr pc_descriptors_class() { return pc_descriptors_class_; }
static ClassPtr code_source_map_class() { return code_source_map_class_; }
@@ -790,6 +793,7 @@
static ClassPtr instructions_class_; // Class of the Instructions vm object.
static ClassPtr instructions_section_class_; // Class of InstructionsSection.
+ static ClassPtr instructions_table_class_; // Class of InstructionsTable.
static ClassPtr object_pool_class_; // Class of the ObjectPool vm object.
static ClassPtr pc_descriptors_class_; // Class of PcDescriptors vm object.
static ClassPtr code_source_map_class_; // Class of CodeSourceMap vm object.
@@ -2662,6 +2666,8 @@
static intptr_t code_offset() { return OFFSET_OF(UntaggedFunction, code_); }
+ uword entry_point() const { return untag()->entry_point_; }
+
static intptr_t entry_point_offset(
CodeEntryKind entry_kind = CodeEntryKind::kNormal) {
switch (entry_kind) {
@@ -5402,6 +5408,107 @@
friend class Class;
};
+// Table which maps ranges of machine code to [Code] or
+// [CompressedStackMaps] objects.
+// Used in AOT in bare instructions mode.
+class InstructionsTable : public Object {
+ public:
+ static const intptr_t kBytesPerElement = sizeof(uint32_t);
+ static const intptr_t kMaxElements = kIntptrMax / kBytesPerElement;
+
+ static const uint32_t kHasMonomorphicEntrypointFlag = 0x1;
+ static const uint32_t kPayloadAlignment = Instructions::kBarePayloadAlignment;
+ static const uint32_t kPayloadMask = ~(kPayloadAlignment - 1);
+ COMPILE_ASSERT((kPayloadMask & kHasMonomorphicEntrypointFlag) == 0);
+
+ static intptr_t InstanceSize() {
+ ASSERT_EQUAL(sizeof(UntaggedInstructionsTable),
+ OFFSET_OF_RETURNED_VALUE(UntaggedInstructionsTable, data));
+ return 0;
+ }
+ static intptr_t InstanceSize(intptr_t len) {
+ ASSERT(0 <= len && len <= kMaxElements);
+ return RoundedAllocationSize(sizeof(UntaggedInstructionsTable) +
+ len * kBytesPerElement);
+ }
+
+ static InstructionsTablePtr New(intptr_t length,
+ uword start_pc,
+ uword end_pc);
+
+ void SetEntryAt(intptr_t index,
+ uword payload_start,
+ bool has_monomorphic_entrypoint,
+ ObjectPtr descriptor) const;
+
+ bool ContainsPc(uword pc) const { return ContainsPc(ptr(), pc); }
+ static bool ContainsPc(InstructionsTablePtr table, uword pc);
+
+ // Looks for the entry in the [table] by the given [pc].
+ // Returns index of an entry which contains [pc], or -1 if not found.
+ static intptr_t FindEntry(InstructionsTablePtr table, uword pc);
+
+ intptr_t length() const { return InstructionsTable::length(this->ptr()); }
+ static intptr_t length(InstructionsTablePtr table) {
+ return table->untag()->length_;
+ }
+
+ // Returns descriptor object for the entry with given index.
+ ObjectPtr DescriptorAt(intptr_t index) const {
+ return InstructionsTable::DescriptorAt(this->ptr(), index);
+ }
+ static ObjectPtr DescriptorAt(InstructionsTablePtr table, intptr_t index);
+
+ // Returns start address of the instructions entry with given index.
+ uword PayloadStartAt(intptr_t index) const {
+ return InstructionsTable::PayloadStartAt(this->ptr(), index);
+ }
+ static uword PayloadStartAt(InstructionsTablePtr table, intptr_t index);
+
+ // Returns entry point of the instructions with given index.
+ uword EntryPointAt(intptr_t index) const;
+
+ private:
+ uword start_pc() const { return InstructionsTable::start_pc(this->ptr()); }
+ static uword start_pc(InstructionsTablePtr table) {
+ return table->untag()->start_pc_;
+ }
+
+ uword end_pc() const { return InstructionsTable::end_pc(this->ptr()); }
+ static uword end_pc(InstructionsTablePtr table) {
+ return table->untag()->end_pc_;
+ }
+
+ ArrayPtr descriptors() const { return untag()->descriptors_; }
+
+ static uint32_t DataAt(InstructionsTablePtr table, intptr_t index) {
+ ASSERT((0 <= index) && (index < InstructionsTable::length(table)));
+ return table->untag()->data()[index];
+ }
+ uint32_t PcOffsetAt(intptr_t index) const {
+ return InstructionsTable::PcOffsetAt(this->ptr(), index);
+ }
+ static uint32_t PcOffsetAt(InstructionsTablePtr table, intptr_t index) {
+ return DataAt(table, index) & kPayloadMask;
+ }
+ bool HasMonomorphicEntryPointAt(intptr_t index) const {
+ return (DataAt(this->ptr(), index) & kHasMonomorphicEntrypointFlag) != 0;
+ }
+
+ void set_length(intptr_t value) const;
+ void set_start_pc(uword value) const;
+ void set_end_pc(uword value) const;
+ void set_descriptors(const Array& value) const;
+
+ uint32_t ConvertPcToOffset(uword pc) const {
+ return InstructionsTable::ConvertPcToOffset(this->ptr(), pc);
+ }
+ static uint32_t ConvertPcToOffset(InstructionsTablePtr table, uword pc);
+
+ FINAL_HEAP_OBJECT_IMPLEMENTATION(InstructionsTable, Object);
+ friend class Class;
+};
+
class LocalVarDescriptors : public Object {
public:
intptr_t Length() const;
@@ -6412,8 +6519,8 @@
// Set by precompiler if this Code object doesn't contain
// useful information besides instructions and compressed stack map.
- // Such object is serialized in a shorter form. (In future such
- // Code objects will not be re-created during snapshot deserialization.)
+ // Such objects are serialized in a shorter form and replaced with
+ // StubCode::UnknownDartCode() during snapshot deserialization.
class DiscardedBit : public BitField<int32_t, bool, kDiscardedBit, 1> {};
class PtrOffBits
diff --git a/runtime/vm/object_graph.cc b/runtime/vm/object_graph.cc
index c9b0bf3..9df16ee 100644
--- a/runtime/vm/object_graph.cc
+++ b/runtime/vm/object_graph.cc
@@ -1308,6 +1308,7 @@
case kImmutableArrayCid:
case kInstructionsCid:
case kInstructionsSectionCid:
+ case kInstructionsTableCid:
case kLinkedHashMapCid:
case kMintCid:
case kNeverCid:
diff --git a/runtime/vm/object_service.cc b/runtime/vm/object_service.cc
index 9edd135..7f3da19 100644
--- a/runtime/vm/object_service.cc
+++ b/runtime/vm/object_service.cc
@@ -623,6 +623,10 @@
Object::PrintJSONImpl(stream, ref);
}
+void InstructionsTable::PrintJSONImpl(JSONStream* stream, bool ref) const {
+ Object::PrintJSONImpl(stream, ref);
+}
+
void WeakSerializationReference::PrintJSONImpl(JSONStream* stream,
bool ref) const {
JSONObject jsobj(stream);
diff --git a/runtime/vm/object_store.h b/runtime/vm/object_store.h
index a452fd3..e741716 100644
--- a/runtime/vm/object_store.h
+++ b/runtime/vm/object_store.h
@@ -231,7 +231,7 @@
RW(Code, unreachable_tts_stub) \
RW(Code, slow_tts_stub) \
RW(Array, dispatch_table_code_entries) \
- RW(GrowableObjectArray, code_order_tables) \
+ RW(GrowableObjectArray, instructions_tables) \
RW(Array, obfuscation_map) \
RW(GrowableObjectArray, ffi_callback_functions) \
RW(Class, ffi_pointer_class) \
diff --git a/runtime/vm/raw_object.cc b/runtime/vm/raw_object.cc
index 7fb0ce7..7e32ade 100644
--- a/runtime/vm/raw_object.cc
+++ b/runtime/vm/raw_object.cc
@@ -115,6 +115,13 @@
instance_size = InstructionsSection::InstanceSize(section_size);
break;
}
+ case kInstructionsTableCid: {
+ const InstructionsTablePtr raw_instructions_table =
+ static_cast<const InstructionsTablePtr>(this);
+ intptr_t length = raw_instructions_table->untag()->length_;
+ instance_size = InstructionsTable::InstanceSize(length);
+ break;
+ }
case kContextCid: {
const ContextPtr raw_context = static_cast<const ContextPtr>(this);
intptr_t num_variables = raw_context->untag()->num_variables_;
@@ -575,6 +582,7 @@
TypedData::ElementSizeInBytes(raw_obj->GetClassId()) *
Smi::Value(raw_obj->untag()->length()))
VARIABLE_COMPRESSED_VISITOR(ContextScope, raw_obj->untag()->num_variables_)
+VARIABLE_VISITOR(InstructionsTable, raw_obj->untag()->length_)
NULL_VISITOR(Mint)
NULL_VISITOR(Double)
NULL_VISITOR(Float32x4)
diff --git a/runtime/vm/raw_object.h b/runtime/vm/raw_object.h
index bbec4f1..4163c3e 100644
--- a/runtime/vm/raw_object.h
+++ b/runtime/vm/raw_object.h
@@ -1835,6 +1835,22 @@
friend class Image;
};
+class UntaggedInstructionsTable : public UntaggedObject {
+ RAW_HEAP_OBJECT_IMPLEMENTATION(InstructionsTable);
+
+ VISIT_FROM(ObjectPtr, descriptors)
+ POINTER_FIELD(ArrayPtr, descriptors)
+ VISIT_TO_LENGTH(ObjectPtr, &descriptors_)
+
+ intptr_t length_;
+ uword start_pc_;
+ uword end_pc_;
+
+ // Variable length data follows here.
+ uint32_t* data() { OPEN_ARRAY_START(uint32_t, uint32_t); }
+ const uint32_t* data() const { OPEN_ARRAY_START(uint32_t, uint32_t); }
+};
+
class UntaggedPcDescriptors : public UntaggedObject {
public:
// The macro argument V is passed two arguments, the raw name of the enum value
diff --git a/runtime/vm/raw_object_snapshot.cc b/runtime/vm/raw_object_snapshot.cc
index 414430b..9f8b670 100644
--- a/runtime/vm/raw_object_snapshot.cc
+++ b/runtime/vm/raw_object_snapshot.cc
@@ -549,6 +549,7 @@
MESSAGE_SNAPSHOT_UNREACHABLE(ICData);
MESSAGE_SNAPSHOT_UNREACHABLE(Instructions);
MESSAGE_SNAPSHOT_UNREACHABLE(InstructionsSection);
+MESSAGE_SNAPSHOT_UNREACHABLE(InstructionsTable);
MESSAGE_SNAPSHOT_UNREACHABLE(KernelProgramInfo);
MESSAGE_SNAPSHOT_UNREACHABLE(Library);
MESSAGE_SNAPSHOT_UNREACHABLE(LibraryPrefix);
diff --git a/runtime/vm/reverse_pc_lookup_cache.cc b/runtime/vm/reverse_pc_lookup_cache.cc
index 70f06df..cb2a30c 100644
--- a/runtime/vm/reverse_pc_lookup_cache.cc
+++ b/runtime/vm/reverse_pc_lookup_cache.cc
@@ -7,12 +7,14 @@
#include "vm/isolate.h"
#include "vm/object.h"
#include "vm/object_store.h"
+#include "vm/stub_code.h"
namespace dart {
-CodePtr ReversePc::LookupInGroup(IsolateGroup* group,
- uword pc,
- bool is_return_address) {
+ObjectPtr ReversePc::FindCodeDescriptorInGroup(IsolateGroup* group,
+ uword pc,
+ bool is_return_address,
+ uword* code_start) {
#if defined(DART_PRECOMPILED_RUNTIME)
// This can run in the middle of GC and must not allocate handles.
NoSafepointScope no_safepoint;
@@ -24,47 +26,37 @@
// This expected number of tables is low, so we go through them linearly. If
// this changes, would could sort the table list during deserialization and
// binary search for the table.
- GrowableObjectArrayPtr tables = group->object_store()->code_order_tables();
+ GrowableObjectArrayPtr tables = group->object_store()->instructions_tables();
intptr_t tables_length = Smi::Value(tables->untag()->length_);
for (intptr_t i = 0; i < tables_length; i++) {
- ArrayPtr table =
- static_cast<ArrayPtr>(tables->untag()->data_->untag()->data()[i]);
- intptr_t lo = 0;
- intptr_t hi = Smi::Value(table->untag()->length_) - 1;
-
- // Fast check if pc belongs to this table.
- if (lo > hi) {
- continue;
- }
- CodePtr first = static_cast<CodePtr>(table->untag()->data()[lo]);
- if (pc < Code::PayloadStartOf(first)) {
- continue;
- }
- CodePtr last = static_cast<CodePtr>(table->untag()->data()[hi]);
- if (pc >= (Code::PayloadStartOf(last) + Code::PayloadSizeOf(last))) {
- continue;
- }
-
- // Binary search within the table for the matching Code.
- while (lo <= hi) {
- intptr_t mid = (hi - lo + 1) / 2 + lo;
- ASSERT(mid >= lo);
- ASSERT(mid <= hi);
- CodePtr code = static_cast<CodePtr>(table->untag()->data()[mid]);
- uword code_start = Code::PayloadStartOf(code);
- uword code_end = code_start + Code::PayloadSizeOf(code);
- if (pc < code_start) {
- hi = mid - 1;
- } else if (pc >= code_end) {
- lo = mid + 1;
- } else {
- return code;
- }
+ InstructionsTablePtr table = static_cast<InstructionsTablePtr>(
+ tables->untag()->data_->untag()->data()[i]);
+ intptr_t index = InstructionsTable::FindEntry(table, pc);
+ if (index >= 0) {
+ *code_start = InstructionsTable::PayloadStartAt(table, index);
+ return InstructionsTable::DescriptorAt(table, index);
}
}
#endif // defined(DART_PRECOMPILED_RUNTIME)
- return Code::null();
+ *code_start = 0;
+ return Object::null();
+}
+
+ObjectPtr ReversePc::FindCodeDescriptor(IsolateGroup* group,
+ uword pc,
+ bool is_return_address,
+ uword* code_start) {
+ ASSERT(FLAG_precompiled_mode && FLAG_use_bare_instructions);
+ NoSafepointScope no_safepoint;
+
+ ObjectPtr code_descriptor =
+ FindCodeDescriptorInGroup(group, pc, is_return_address, code_start);
+ if (code_descriptor == Object::null()) {
+ code_descriptor = FindCodeDescriptorInGroup(Dart::vm_isolate_group(), pc,
+ is_return_address, code_start);
+ }
+ return code_descriptor;
}
CodePtr ReversePc::Lookup(IsolateGroup* group,
@@ -73,11 +65,19 @@
ASSERT(FLAG_precompiled_mode && FLAG_use_bare_instructions);
NoSafepointScope no_safepoint;
- CodePtr code = LookupInGroup(group, pc, is_return_address);
- if (code == Code::null()) {
- code = LookupInGroup(Dart::vm_isolate_group(), pc, is_return_address);
+ uword code_start;
+ ObjectPtr code_descriptor =
+ FindCodeDescriptor(group, pc, is_return_address, &code_start);
+ if (code_descriptor != Object::null()) {
+ if (!code_descriptor->IsCode()) {
+ ASSERT(StubCode::UnknownDartCode().PayloadStart() == 0);
+ ASSERT(StubCode::UnknownDartCode().Size() == kUwordMax);
+ ASSERT(StubCode::UnknownDartCode().IsFunctionCode());
+ ASSERT(StubCode::UnknownDartCode().IsUnknownDartCode());
+ code_descriptor = StubCode::UnknownDartCode().ptr();
+ }
}
- return code;
+ return static_cast<CodePtr>(code_descriptor);
}
CompressedStackMapsPtr ReversePc::FindCompressedStackMaps(
@@ -88,10 +88,17 @@
ASSERT(FLAG_precompiled_mode && FLAG_use_bare_instructions);
NoSafepointScope no_safepoint;
- CodePtr code = Lookup(group, pc, is_return_address);
- if (code != Code::null()) {
- *code_start = Code::PayloadStartOf(code);
- return code->untag()->compressed_stackmaps();
+ ObjectPtr code_descriptor =
+ FindCodeDescriptor(group, pc, is_return_address, code_start);
+ if (code_descriptor != Object::null()) {
+ if (code_descriptor->IsCode()) {
+ CodePtr code = static_cast<CodePtr>(code_descriptor);
+ ASSERT(*code_start == Code::PayloadStartOf(code));
+ return code->untag()->compressed_stackmaps();
+ } else {
+ ASSERT(code_descriptor->IsCompressedStackMaps());
+ return static_cast<CompressedStackMapsPtr>(code_descriptor);
+ }
}
*code_start = 0;
diff --git a/runtime/vm/reverse_pc_lookup_cache.h b/runtime/vm/reverse_pc_lookup_cache.h
index 9eb0558..e214747 100644
--- a/runtime/vm/reverse_pc_lookup_cache.h
+++ b/runtime/vm/reverse_pc_lookup_cache.h
@@ -32,9 +32,14 @@
uword* code_start);
private:
- static CodePtr LookupInGroup(IsolateGroup* group,
- uword pc,
- bool is_return_address);
+ static ObjectPtr FindCodeDescriptorInGroup(IsolateGroup* group,
+ uword pc,
+ bool is_return_address,
+ uword* code_start);
+ static ObjectPtr FindCodeDescriptor(IsolateGroup* group,
+ uword pc,
+ bool is_return_address,
+ uword* code_start);
};
} // namespace dart
diff --git a/runtime/vm/stack_frame.cc b/runtime/vm/stack_frame.cc
index f5df240..b083155 100644
--- a/runtime/vm/stack_frame.cc
+++ b/runtime/vm/stack_frame.cc
@@ -351,24 +351,10 @@
#if defined(DART_PRECOMPILED_RUNTIME)
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
NoSafepointScope no_safepoint;
- Code code;
- code = ReversePc::Lookup(isolate_group(), pc(),
- /*is_return_address=*/true);
- if (!code.IsNull()) {
- // This is needed in order to test stack traces with the future
- // behavior of ReversePc::Lookup which will return
- // StubCode::UnknownDartCode() if code object is omitted from
- // the snapshot.
- if (code.is_discarded()) {
- ASSERT(StubCode::UnknownDartCode().PayloadStart() == 0);
- ASSERT(StubCode::UnknownDartCode().Size() == kUwordMax);
- ASSERT(StubCode::UnknownDartCode().IsFunctionCode());
- ASSERT(StubCode::UnknownDartCode().IsUnknownDartCode());
- return StubCode::UnknownDartCode().ptr();
- }
- return code.ptr();
- }
- UNREACHABLE();
+ CodePtr code = ReversePc::Lookup(isolate_group(), pc(),
+ /*is_return_address=*/true);
+ ASSERT(code != Code::null());
+ return code;
}
#endif // defined(DART_PRECOMPILED_RUNTIME)
diff --git a/runtime/vm/symbols.h b/runtime/vm/symbols.h
index dc6d663..4921334 100644
--- a/runtime/vm/symbols.h
+++ b/runtime/vm/symbols.h
@@ -186,6 +186,7 @@
V(InitPrefix, "init:") \
V(Instructions, "Instructions") \
V(InstructionsSection, "InstructionsSection") \
+ V(InstructionsTable, "InstructionsTable") \
V(Int, "int") \
V(Int16List, "Int16List") \
V(Int32List, "Int32List") \
diff --git a/runtime/vm/tagged_pointer.h b/runtime/vm/tagged_pointer.h
index b0cb20d..4df36b4 100644
--- a/runtime/vm/tagged_pointer.h
+++ b/runtime/vm/tagged_pointer.h
@@ -300,6 +300,7 @@
DEFINE_TAGGED_POINTER(ObjectPool, Object)
DEFINE_TAGGED_POINTER(Instructions, Object)
DEFINE_TAGGED_POINTER(InstructionsSection, Object)
+DEFINE_TAGGED_POINTER(InstructionsTable, Object)
DEFINE_TAGGED_POINTER(PcDescriptors, Object)
DEFINE_TAGGED_POINTER(CodeSourceMap, Object)
DEFINE_TAGGED_POINTER(CompressedStackMaps, Object)
diff --git a/runtime/vm/type_testing_stubs_test.cc b/runtime/vm/type_testing_stubs_test.cc
index e00f107..18c90d1 100644
--- a/runtime/vm/type_testing_stubs_test.cc
+++ b/runtime/vm/type_testing_stubs_test.cc
@@ -224,8 +224,8 @@
auto& stc2 = SubtypeTestCache::Handle();
// First invocation will a) specialize the TTS b) may create SubtypeTestCache
- result = DartEntry::InvokeCode(invoke_tts, arguments_descriptor, arguments,
- thread);
+ result = DartEntry::InvokeCode(invoke_tts, invoke_tts.EntryPoint(),
+ arguments_descriptor, arguments, thread);
stc ^= pool.ObjectAt(kSubtypeTestCacheIndex);
tts = instantiated_dst_type.type_test_stub();
if (!result.IsError()) {
@@ -234,8 +234,8 @@
lazy(result, stc);
// Second invocation will a) keep TTS b) keep optional SubtypeTestCache
- result2 = DartEntry::InvokeCode(invoke_tts, arguments_descriptor, arguments,
- thread);
+ result2 = DartEntry::InvokeCode(invoke_tts, invoke_tts.EntryPoint(),
+ arguments_descriptor, arguments, thread);
stc2 ^= pool.ObjectAt(kSubtypeTestCacheIndex);
tts2 = instantiated_dst_type.type_test_stub();
abi_regs_modified ^= abi_regs_modified_box.At(0);
@@ -252,8 +252,8 @@
TypeTestingStubGenerator::SpecializeStubFor(thread, instantiated_dst_type);
tts = instantiated_dst_type.type_test_stub();
- result2 = DartEntry::InvokeCode(invoke_tts, arguments_descriptor, arguments,
- thread);
+ result2 = DartEntry::InvokeCode(invoke_tts, invoke_tts.EntryPoint(),
+ arguments_descriptor, arguments, thread);
stc2 ^= pool.ObjectAt(kSubtypeTestCacheIndex);
tts2 = instantiated_dst_type.type_test_stub();
abi_regs_modified ^= abi_regs_modified_box.At(0);
diff --git a/tests/corelib/apply2_test.dart b/tests/corelib/apply2_test.dart
index 4973e87..23805a1 100644
--- a/tests/corelib/apply2_test.dart
+++ b/tests/corelib/apply2_test.dart
@@ -2,6 +2,9 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
+// VMOptions=
+// VMOptions=--dwarf_stack_traces --no-retain_function_objects --no-retain_code_objects
+
import "package:expect/expect.dart";
apply(Function function, List? positional, Map<Symbol, dynamic>? named) {
diff --git a/tests/corelib/bigint_from_test.dart b/tests/corelib/bigint_from_test.dart
index babceff..a12abd9 100644
--- a/tests/corelib/bigint_from_test.dart
+++ b/tests/corelib/bigint_from_test.dart
@@ -7,6 +7,7 @@
// VMOptions=--intrinsify --enable-asserts
// VMOptions=--no-intrinsify --enable-asserts
// VMOptions=--optimization-counter-threshold=5 --no-background-compilation
+// VMOptions=--dwarf_stack_traces --no-retain_function_objects --no-retain_code_objects
import "package:expect/expect.dart";
diff --git a/tests/corelib_2/apply2_test.dart b/tests/corelib_2/apply2_test.dart
index 557aa16..507b2a2 100644
--- a/tests/corelib_2/apply2_test.dart
+++ b/tests/corelib_2/apply2_test.dart
@@ -2,6 +2,9 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
+// VMOptions=
+// VMOptions=--dwarf_stack_traces --no-retain_function_objects --no-retain_code_objects
+
import "package:expect/expect.dart";
apply(Function function, List positional, Map<Symbol, dynamic> named) {
diff --git a/tests/corelib_2/bigint_from_test.dart b/tests/corelib_2/bigint_from_test.dart
index 1ef06b3..52bd855 100644
--- a/tests/corelib_2/bigint_from_test.dart
+++ b/tests/corelib_2/bigint_from_test.dart
@@ -7,6 +7,7 @@
// VMOptions=--intrinsify --enable-asserts
// VMOptions=--no-intrinsify --enable-asserts
// VMOptions=--optimization-counter-threshold=5 --no-background-compilation
+// VMOptions=--dwarf_stack_traces --no-retain_function_objects --no-retain_code_objects
import "package:expect/expect.dart";
diff --git a/tests/language/async/identifier_test.dart b/tests/language/async/identifier_test.dart
index dec9c55..0707259 100644
--- a/tests/language/async/identifier_test.dart
+++ b/tests/language/async/identifier_test.dart
@@ -2,6 +2,9 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
+// VMOptions=
+// VMOptions=--dwarf_stack_traces --no-retain_function_objects --no-retain_code_objects
+
import 'dart:async' as async;
import 'lib.dart' as l; // Minimal library containing "int async;".
diff --git a/tests/language/deferred/function_type_test.dart b/tests/language/deferred/function_type_test.dart
index 49e151b..b69eae6 100644
--- a/tests/language/deferred/function_type_test.dart
+++ b/tests/language/deferred/function_type_test.dart
@@ -2,6 +2,9 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
+// VMOptions=
+// VMOptions=--dwarf_stack_traces --no-retain_function_objects --no-retain_code_objects
+
import 'function_type_lib.dart' deferred as lib;
main() {
diff --git a/tests/language/deferred/shared_and_unshared_classes_test.dart b/tests/language/deferred/shared_and_unshared_classes_test.dart
index a815c74..43c8ecd 100644
--- a/tests/language/deferred/shared_and_unshared_classes_test.dart
+++ b/tests/language/deferred/shared_and_unshared_classes_test.dart
@@ -2,6 +2,9 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
+// VMOptions=
+// VMOptions=--dwarf_stack_traces --no-retain_function_objects --no-retain_code_objects
+
import "package:expect/expect.dart";
import "package:async_helper/async_helper.dart";
import "shared_and_unshared_classes_lib1.dart" deferred as lib1;
diff --git a/tests/language_2/async/identifier_test.dart b/tests/language_2/async/identifier_test.dart
index cc299a1..ad935d2 100644
--- a/tests/language_2/async/identifier_test.dart
+++ b/tests/language_2/async/identifier_test.dart
@@ -2,6 +2,9 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
+// VMOptions=
+// VMOptions=--dwarf_stack_traces --no-retain_function_objects --no-retain_code_objects
+
import 'dart:async' as async;
import 'lib.dart' as l; // Minimal library containing "int async;".
diff --git a/tests/language_2/deferred/function_type_test.dart b/tests/language_2/deferred/function_type_test.dart
index 49e151b..b69eae6 100644
--- a/tests/language_2/deferred/function_type_test.dart
+++ b/tests/language_2/deferred/function_type_test.dart
@@ -2,6 +2,9 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
+// VMOptions=
+// VMOptions=--dwarf_stack_traces --no-retain_function_objects --no-retain_code_objects
+
import 'function_type_lib.dart' deferred as lib;
main() {
diff --git a/tests/language_2/deferred/shared_and_unshared_classes_test.dart b/tests/language_2/deferred/shared_and_unshared_classes_test.dart
index a815c74..43c8ecd 100644
--- a/tests/language_2/deferred/shared_and_unshared_classes_test.dart
+++ b/tests/language_2/deferred/shared_and_unshared_classes_test.dart
@@ -2,6 +2,9 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.
+// VMOptions=
+// VMOptions=--dwarf_stack_traces --no-retain_function_objects --no-retain_code_objects
+
import "package:expect/expect.dart";
import "package:async_helper/async_helper.dart";
import "shared_and_unshared_classes_lib1.dart" deferred as lib1;
To view, visit change 189560. To unsubscribe, or for help writing mail filters, visit settings.
Attention is currently required from: Ryan Macnak, Vyacheslav Egorov, Tess Strickland.
Attention is currently required from: Alexander Markov, Vyacheslav Egorov, Tess Strickland.
Patch set 18:Code-Review +1
4 comments:
File runtime/vm/clustered_snapshot.cc:
Patch Set #18, Line 7733: USE(instructions_table_len);
ASSERT should count as a (dead) use. DEBUG_ASSERT is the variant that avoids the use unless we're in DEBUG mode.
File runtime/vm/compiler/stub_code_compiler.cc:
Could you move the NotLoaded stub to be here, to keep the marker stubs together?
File runtime/vm/raw_object.h:
Patch Set #18, Line 1842: POINTER_FIELD(ArrayPtr, descriptors)
Add entry for (InstructionsTable, descriptors) to raw_object_fields.cc
File runtime/vm/reverse_pc_lookup_cache.cc:
^ ...is low (one per loading unit),...
To view, visit change 189560. To unsubscribe, or for help writing mail filters, visit settings.
Attention is currently required from: Vyacheslav Egorov, Tess Strickland.
Patch set 19:Commit-Queue +1
5 comments:
Patchset:
Thank you, Ryan!
File runtime/vm/clustered_snapshot.cc:
Patch Set #18, Line 7733: USE(instructions_table_len);
ASSERT should count as a (dead) use. […]
Done
File runtime/vm/compiler/stub_code_compiler.cc:
Could you move the NotLoaded stub to be here, to keep the marker stubs together?
Done
File runtime/vm/raw_object.h:
Patch Set #18, Line 1842: POINTER_FIELD(ArrayPtr, descriptors)
Add entry for (InstructionsTable, descriptors) to raw_object_fields. […]
Done
File runtime/vm/reverse_pc_lookup_cache.cc:
^ ...is low (one per loading unit),...
Done
To view, visit change 189560. To unsubscribe, or for help writing mail filters, visit settings.
Attention is currently required from: Vyacheslav Egorov, Tess Strickland.
go/dart-cbuild result: SUCCESS
Details: https://goto.google.com/dart-cbuild/find/9bd6f9eb77b1ef40483039266f14e8bc816f00a6
Attention is currently required from: Vyacheslav Egorov, Tess Strickland.
Patch set 19:Commit-Queue +2
Attention is currently required from: Vyacheslav Egorov, Tess Strickland.
CQ is trying the patch.
Note: The patchset #19 "Address review" sent to CQ was uploaded after this CL was CR+1-ed.
Reviewer, please verify there is nothing unexpected https://dart-review.googlesource.com/c/189560/19
Bot data: {"action": "start", "triggered_at": "2021-04-13T02:35:54.0Z", "revision": "9bd6f9eb77b1ef40483039266f14e8bc816f00a6"}
commi...@chromium.org submitted this change.
[vm/aot] Discard Code objects from the heap
While deserializing AOT snapshot, Code objects which do not contain
valuable information besides entry point and stack maps are discarded
and not allocated on the heap (they are replaced with
StubCode::UnknownDartCode()).
PC -> Code/CompressedStackMaps lookup is implemented using a separate
table (InstructionsTable).
Flutter gallery in release-sizeopt mode:
Heap size of snapshot objects: arm -26.89%, arm64 -27.68%
Large Flutter application in release mode with --dwarf-stack-traces:
Heap size of snapshot objects: -24.3%.
Discarded Code objects: 72.5% of all Code objects.
Issue: https://github.com/dart-lang/sdk/issues/44852.
TEST=existing tests; "--dwarf_stack_traces --no-retain_function_objects
--no-retain_code_objects" mode is enabled for a few tests.
Change-Id: I5fe3e283630c8e8f4442319d5dcae38d174dd0d8
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/189560
Commit-Queue: Alexander Markov <alexm...@google.com>
Reviewed-by: Ryan Macnak <rma...@google.com>
---
M runtime/vm/class_id.h
M runtime/vm/clustered_snapshot.cc
M runtime/vm/clustered_snapshot.h
M runtime/vm/compiler/aot/precompiler.cc
M runtime/vm/compiler/runtime_api.cc
M runtime/vm/compiler/runtime_api.h
M runtime/vm/compiler/runtime_offsets_extracted.h
M runtime/vm/compiler/runtime_offsets_list.h
M runtime/vm/compiler/stub_code_compiler.cc
M runtime/vm/compiler/stub_code_compiler_arm.cc
M runtime/vm/compiler/stub_code_compiler_arm64.cc
M runtime/vm/compiler/stub_code_compiler_ia32.cc
M runtime/vm/compiler/stub_code_compiler_x64.cc
M runtime/vm/dart_entry.cc
M runtime/vm/dart_entry.h
M runtime/vm/object.cc
M runtime/vm/object.h
M runtime/vm/object_graph.cc
M runtime/vm/object_service.cc
M runtime/vm/object_store.h
M runtime/vm/raw_object.cc
M runtime/vm/raw_object.h
M runtime/vm/raw_object_fields.cc
M runtime/vm/raw_object_snapshot.cc
M runtime/vm/reverse_pc_lookup_cache.cc
M runtime/vm/reverse_pc_lookup_cache.h
M runtime/vm/stack_frame.cc
M runtime/vm/symbols.h
M runtime/vm/tagged_pointer.h
M runtime/vm/type_testing_stubs_test.cc
M tests/corelib/apply2_test.dart
M tests/corelib/bigint_from_test.dart
M tests/corelib_2/apply2_test.dart
M tests/corelib_2/bigint_from_test.dart
M tests/language/async/identifier_test.dart
M tests/language/deferred/function_type_test.dart
M tests/language/deferred/shared_and_unshared_classes_test.dart
M tests/language_2/async/identifier_test.dart
M tests/language_2/deferred/function_type_test.dart
M tests/language_2/deferred/shared_and_unshared_classes_test.dart
40 files changed, 794 insertions(+), 229 deletions(-)
diff --git a/runtime/vm/class_id.h b/runtime/vm/class_id.h
index ed8a237..f59c94b 100644
--- a/runtime/vm/class_id.h
+++ b/runtime/vm/class_id.h
@@ -32,6 +32,7 @@
V(Code) \
V(Instructions) \
V(InstructionsSection) \
+ V(InstructionsTable) \
V(ObjectPool) \
V(PcDescriptors) \
V(CodeSourceMap) \
@@ -420,7 +421,8 @@
IsOneByteStringClassId(index) || IsTwoByteStringClassId(index) ||
IsTypedDataClassId(index) || (index == kContextCid) ||
(index == kTypeArgumentsCid) || (index == kInstructionsCid) ||
- (index == kInstructionsSectionCid) || (index == kObjectPoolCid) ||
+ (index == kInstructionsSectionCid) ||
+ (index == kInstructionsTableCid) || (index == kObjectPoolCid) ||
(index == kPcDescriptorsCid) || (index == kCodeSourceMapCid) ||
(index == kCompressedStackMapsCid) ||
(index == kLocalVarDescriptorsCid) ||
diff --git a/runtime/vm/clustered_snapshot.cc b/runtime/vm/clustered_snapshot.cc
index 537bfe0..585c296 100644@@ -7625,6 +7720,18 @@
ASSERT_EQUAL(initial_field_table_->NumFieldIds(), initial_field_table_len);
}
+#if defined(DART_PRECOMPILED_RUNTIME)
+ if (instructions_table_len > 0) {
+ ASSERT(FLAG_precompiled_mode && FLAG_use_bare_instructions);
+ const uword start_pc = image_reader_->GetBareInstructionsAt(0);
+ const uword end_pc = image_reader_->GetBareInstructionsEnd();
+ instructions_table_ =
+ InstructionsTable::New(instructions_table_len, start_pc, end_pc);
+ }
+#else
+ ASSERT(instructions_table_len == 0);
index bbcdc8c..0058e1a 100644
--- a/runtime/vm/compiler/stub_code_compiler.cc
+++ b/runtime/vm/compiler/stub_code_compiler.cc
@@ -888,9 +888,17 @@
}
void StubCodeCompiler::GenerateUnknownDartCodeStub(Assembler* assembler) {
+ // Enter frame to include caller into the backtrace.
+ __ EnterStubFrame();
__ Breakpoint(); // Marker stub.
}
+void StubCodeCompiler::GenerateNotLoadedStub(Assembler* assembler) {
+ __ EnterStubFrame();
+ __ CallRuntime(kNotLoadedRuntimeEntry, 0);
+ __ Breakpoint();
+}
+
} // namespace compiler
} // namespace dart
diff --git a/runtime/vm/compiler/stub_code_compiler_arm.cc b/runtime/vm/compiler/stub_code_compiler_arm.cc
index 515e680..b4901a6 100644
--- a/runtime/vm/compiler/stub_code_compiler_arm.cc
+++ b/runtime/vm/compiler/stub_code_compiler_arm.cc
@@ -1168,7 +1168,7 @@
// Called when invoking Dart code from C++ (VM code).
// Input parameters:
// LR : points to return address.
-// R0 : code object of the Dart function to call.
+// R0 : target code or entry point (in bare instructions mode).
// R1 : arguments descriptor array.
// R2 : arguments array.
// R3 : current thread.
@@ -1260,11 +1260,12 @@
// Call the Dart code entrypoint.
if (FLAG_precompiled_mode && FLAG_use_bare_instructions) {
__ SetupGlobalPoolAndDispatchTable();
+ __ LoadImmediate(CODE_REG, 0); // GC safe value into CODE_REG.
} else {
__ LoadImmediate(PP, 0); // GC safe value into PP.
+ __ ldr(CODE_REG, Address(R0, target::VMHandles::kOffsetOfRawPtrInHandle));
+ __ ldr(R0, FieldAddress(CODE_REG, target::Code::entry_point_offset()));
}
- __ ldr(CODE_REG, Address(R0, target::VMHandles::kOffsetOfRawPtrInHandle));
- __ ldr(R0, FieldAddress(CODE_REG, target::Code::entry_point_offset()));
__ blx(R0); // R4 is the arguments descriptor array.
// Get rid of arguments pushed on the stack.
@@ -3290,12 +3291,6 @@
CODE_REG, target::Code::entry_point_offset(CodeEntryKind::kMonomorphic)));
}
-void StubCodeCompiler::GenerateNotLoadedStub(Assembler* assembler) {
- __ EnterStubFrame();
- __ CallRuntime(kNotLoadedRuntimeEntry, 0);
- __ bkpt(0);
-}
-
// Instantiate type arguments from instantiator and function type args.
// R3 uninstantiated type arguments.
// R2 instantiator type arguments.
diff --git a/runtime/vm/compiler/stub_code_compiler_arm64.cc b/runtime/vm/compiler/stub_code_compiler_arm64.cc
index bc5eb6a..0d7cacc 100644
@@ -3450,12 +3451,6 @@
__ br(R1);
}
-void StubCodeCompiler::GenerateNotLoadedStub(Assembler* assembler) {
- __ EnterStubFrame();
- __ CallRuntime(kNotLoadedRuntimeEntry, 0);
- __ brk(0);
-}
-
// Instantiate type arguments from instantiator and function type args.
// R3 uninstantiated type arguments.
// R2 instantiator type arguments.
diff --git a/runtime/vm/compiler/stub_code_compiler_ia32.cc b/runtime/vm/compiler/stub_code_compiler_ia32.cc
index 2286706..ef29020 100644
--- a/runtime/vm/compiler/stub_code_compiler_ia32.cc
+++ b/runtime/vm/compiler/stub_code_compiler_ia32.cc
@@ -2784,12 +2784,6 @@
__ int3(); // AOT only.
}
-void StubCodeCompiler::GenerateNotLoadedStub(Assembler* assembler) {
- __ EnterStubFrame();
- __ CallRuntime(kNotLoadedRuntimeEntry, 0);
- __ int3();
-}
-
// Instantiate type arguments from instantiator and function type args.
// EBX: uninstantiated type arguments.
// EDX: instantiator type arguments.
diff --git a/runtime/vm/compiler/stub_code_compiler_x64.cc b/runtime/vm/compiler/stub_code_compiler_x64.cc
index 8d2987a..611e083 100644
@@ -3380,12 +3380,6 @@
__ jmp(RCX);
}
-void StubCodeCompiler::GenerateNotLoadedStub(Assembler* assembler) {
- __ EnterStubFrame();
- __ CallRuntime(kNotLoadedRuntimeEntry, 0);
- __ int3();
-}
-
// Instantiate type arguments from instantiator and function type args.
// RBX: uninstantiated type arguments.
// RDX: instantiator type arguments.
diff --git a/runtime/vm/raw_object_fields.cc b/runtime/vm/raw_object_fields.cc
index 07f24db..7329337 100644
--- a/runtime/vm/raw_object_fields.cc
+++ b/runtime/vm/raw_object_fields.cc
@@ -106,6 +106,7 @@
F(CallSiteData, args_descriptor_) \
F(ICData, entries_) \
F(ICData, owner_) \
+ F(InstructionsTable, descriptors_) \
F(MegamorphicCache, buckets_) \
F(MegamorphicCache, mask_) \
F(SubtypeTestCache, cache_) \
diff --git a/runtime/vm/raw_object_snapshot.cc b/runtime/vm/raw_object_snapshot.cc
index 414430b..9f8b670 100644
--- a/runtime/vm/raw_object_snapshot.cc
+++ b/runtime/vm/raw_object_snapshot.cc
@@ -549,6 +549,7 @@
MESSAGE_SNAPSHOT_UNREACHABLE(ICData);
MESSAGE_SNAPSHOT_UNREACHABLE(Instructions);
MESSAGE_SNAPSHOT_UNREACHABLE(InstructionsSection);
+MESSAGE_SNAPSHOT_UNREACHABLE(InstructionsTable);
MESSAGE_SNAPSHOT_UNREACHABLE(KernelProgramInfo);
MESSAGE_SNAPSHOT_UNREACHABLE(Library);
MESSAGE_SNAPSHOT_UNREACHABLE(LibraryPrefix);
diff --git a/runtime/vm/reverse_pc_lookup_cache.cc b/runtime/vm/reverse_pc_lookup_cache.cc
index 70f06df..517d8a1 100644
--- a/runtime/vm/reverse_pc_lookup_cache.cc
+++ b/runtime/vm/reverse_pc_lookup_cache.cc
@@ -7,12 +7,14 @@
#include "vm/isolate.h"
#include "vm/object.h"
#include "vm/object_store.h"
+#include "vm/stub_code.h"
namespace dart {
-CodePtr ReversePc::LookupInGroup(IsolateGroup* group,
- uword pc,
- bool is_return_address) {
+ObjectPtr ReversePc::FindCodeDescriptorInGroup(IsolateGroup* group,
+ uword pc,
+ bool is_return_address,
+ uword* code_start) {
#if defined(DART_PRECOMPILED_RUNTIME)
// This can run in the middle of GC and must not allocate handles.
NoSafepointScope no_safepoint;
@@ -21,50 +23,40 @@
pc--;
}
- // This expected number of tables is low, so we go through them linearly. If
- // this changes, would could sort the table list during deserialization and
- // binary search for the table.
- GrowableObjectArrayPtr tables = group->object_store()->code_order_tables();
+ // This expected number of tables is low (one per loading unit), so we go
+ // through them linearly. If this changes, would could sort the table list
+ // during deserialization and binary search for the table.
To view, visit change 189560. To unsubscribe, or for help writing mail filters, visit settings.
go/dart-cbuild result: FAILURE (REGRESSIONS DETECTED)
Details: https://goto.google.com/dart-cbuild/find/5129cff930d9039852c4a037e096fd97551eec1e
Bugs: go/dart-cbuild-bug/5129cff930d9039852c4a037e096fd97551eec1e