From 165c583d57af613836cf7d08242ce969521db00b Mon Sep 17 00:00:00 2001 From: Martin Kustermann Date: Fri, 6 Apr 2018 07:00:50 +0000 Subject: [PATCH] [VM] Introduction of type testing stubs - Part 1 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This CL: * Adds a field to [RawAbstractType] which will always hold a pointer to the entrypoint of a type testing stub * Makes this new field be initialized to a default stub whenever a instances are created (e.g. via Type::New(), snapshot reader, ...) * Makes the clustered snapshotter write a reference to the corresponding [RawInstructions] object when writing the field and do the reverse when reading it. * Makes us call the type testing stub for performing assert-assignable checks. To reduce unnecessary loads on callsites, we store the entrypoint of the type testing stubs directly in the type objects. This means that the caller of type testing stubs can simply branch there without populating a code object first. This also means that the type testing stubs themselves have no access to a pool and we therefore also don't hold on to the [Code] object, only the [Instruction] object is necessary. The type testing stubs do not setup a frame themselves and also have no safepoint. In the case when the type testing stubs could not determine a positive answer they will tail-call a general-purpose stub. The general-purpose stub sets up a stub frame, tries to consult a [SubtypeTestCache] and bails out to runtime if this was unsuccessful. This CL is just the the first, for ease of reviewing. The actual type-specialized type testing stubs will be generated in later CLs. Issue https://github.com/dart-lang/sdk/issues/31798 Change-Id: I174a11b3b812799f399a60af799144c2ba3c26ec Reviewed-on: https://dart-review.googlesource.com/44787 Reviewed-by: Vyacheslav Egorov Reviewed-by: RĂ©gis Crelier --- runtime/vm/class_finalizer.cc | 5 + runtime/vm/clustered_snapshot.cc | 155 +++++++++++++++-- runtime/vm/clustered_snapshot.h | 13 +- runtime/vm/compiler/assembler/assembler_arm.h | 7 + .../vm/compiler/assembler/assembler_arm64.h | 3 + runtime/vm/compiler/assembler/assembler_x64.h | 7 + .../compiler/assembler/assembler_x64_test.cc | 2 +- .../compiler/backend/flow_graph_compiler.cc | 59 +++++-- .../vm/compiler/backend/flow_graph_compiler.h | 28 ++- .../backend/flow_graph_compiler_arm.cc | 109 ++++++++---- .../backend/flow_graph_compiler_arm64.cc | 102 +++++++---- .../backend/flow_graph_compiler_ia32.cc | 2 +- .../backend/flow_graph_compiler_x64.cc | 95 +++++++--- runtime/vm/dart.cc | 3 + runtime/vm/image_snapshot.cc | 49 ++++-- runtime/vm/object.cc | 67 ++++++- runtime/vm/object.h | 23 ++- runtime/vm/raw_object.h | 2 + runtime/vm/raw_object_snapshot.cc | 12 ++ runtime/vm/snapshot.cc | 22 +++ runtime/vm/snapshot.h | 8 + runtime/vm/stub_code.cc | 10 +- runtime/vm/stub_code.h | 6 + runtime/vm/stub_code_arm.cc | 163 +++++++++++++++++ runtime/vm/stub_code_arm64.cc | 164 ++++++++++++++++++ runtime/vm/stub_code_dbc.cc | 15 ++ runtime/vm/stub_code_ia32.cc | 15 ++ runtime/vm/stub_code_x64.cc | 158 ++++++++++++++++- runtime/vm/thread.h | 4 +- runtime/vm/type_testing_stubs.cc | 69 ++++++++ runtime/vm/type_testing_stubs.h | 45 +++++ runtime/vm/vm_sources.gni | 2 + 32 files changed, 1259 insertions(+), 165 deletions(-) create mode 100644 runtime/vm/type_testing_stubs.cc create mode 100644 runtime/vm/type_testing_stubs.h diff --git a/runtime/vm/class_finalizer.cc b/runtime/vm/class_finalizer.cc index e31ec60f2592..a3b0e33946a4 100644 --- a/runtime/vm/class_finalizer.cc +++ b/runtime/vm/class_finalizer.cc @@ -18,6 +18,7 @@ #include "vm/symbols.h" #include "vm/timeline.h" #include "vm/type_table.h" +#include "vm/type_testing_stubs.h" namespace dart { @@ -589,6 +590,10 @@ void ClassFinalizer::ResolveType(const Class& cls, const AbstractType& type) { } } } + + // After resolving, we re-initialize the type testing stub. + type.SetTypeTestingStub( + Instructions::Handle(TypeTestingStubGenerator::DefaultCodeForType(type))); } void ClassFinalizer::FinalizeTypeParameters(const Class& cls, diff --git a/runtime/vm/clustered_snapshot.cc b/runtime/vm/clustered_snapshot.cc index dcf5f7b7577c..c098003511a9 100644 --- a/runtime/vm/clustered_snapshot.cc +++ b/runtime/vm/clustered_snapshot.cc @@ -3014,7 +3014,8 @@ class LibraryPrefixDeserializationCluster : public DeserializationCluster { #if !defined(DART_PRECOMPILED_RUNTIME) class TypeSerializationCluster : public SerializationCluster { public: - TypeSerializationCluster() : SerializationCluster("Type") {} + explicit TypeSerializationCluster(const TypeTestingStubFinder& ttsf) + : SerializationCluster("Type"), type_testing_stubs_(ttsf) {} virtual ~TypeSerializationCluster() {} void Trace(Serializer* s, RawObject* object) { @@ -3069,6 +3070,12 @@ class TypeSerializationCluster : public SerializationCluster { } s->WriteTokenPosition(type->ptr()->token_pos_); s->Write(type->ptr()->type_state_); + if (s->kind() == Snapshot::kFullAOT) { + RawInstructions* instr = type_testing_stubs_.LookupByAddresss( + type->ptr()->type_test_stub_entry_point_); + const int32_t text_offset = s->GetTextOffset(instr, Code::null()); + s->Write(text_offset); + } } count = objects_.length(); for (intptr_t i = 0; i < count; i++) { @@ -3080,18 +3087,35 @@ class TypeSerializationCluster : public SerializationCluster { } s->WriteTokenPosition(type->ptr()->token_pos_); s->Write(type->ptr()->type_state_); + if (s->kind() == Snapshot::kFullAOT) { + RawInstructions* instr = type_testing_stubs_.LookupByAddresss( + type->ptr()->type_test_stub_entry_point_); + const int32_t text_offset = s->GetTextOffset(instr, Code::null()); + s->Write(text_offset); + } + } + + // The [Type::dynamic_type()] object is not serialized, so we manually send + // the type testing stub for it. + if (s->kind() == Snapshot::kFullAOT && s->for_vm_isolate()) { + RawInstructions* instr = type_testing_stubs_.LookupByAddresss( + Type::dynamic_type().type_test_stub_entry_point()); + const int32_t text_offset = s->GetTextOffset(instr, Code::null()); + s->Write(text_offset); } } private: GrowableArray canonical_objects_; GrowableArray objects_; + const TypeTestingStubFinder& type_testing_stubs_; }; #endif // !DART_PRECOMPILED_RUNTIME class TypeDeserializationCluster : public DeserializationCluster { public: - TypeDeserializationCluster() {} + TypeDeserializationCluster() + : type_(AbstractType::Handle()), instr_(Instructions::Handle()) {} virtual ~TypeDeserializationCluster() {} void ReadAlloc(Deserializer* d) { @@ -3126,6 +3150,12 @@ class TypeDeserializationCluster : public DeserializationCluster { } type->ptr()->token_pos_ = d->ReadTokenPosition(); type->ptr()->type_state_ = d->Read(); + if (d->kind() == Snapshot::kFullAOT) { + const int32_t text_offset = d->Read(); + instr_ = d->GetInstructionsAt(text_offset); + type_ = type; + type_.SetTypeTestingStub(instr_); + } } for (intptr_t id = start_index_; id < stop_index_; id++) { @@ -3139,18 +3169,53 @@ class TypeDeserializationCluster : public DeserializationCluster { } type->ptr()->token_pos_ = d->ReadTokenPosition(); type->ptr()->type_state_ = d->Read(); + if (d->kind() == Snapshot::kFullAOT) { + const int32_t text_offset = d->Read(); + instr_ = d->GetInstructionsAt(text_offset); + type_ = type; + type_.SetTypeTestingStub(instr_); + } + } + + // The [Type::dynamic_type()] object is not serialized, so we manually send + // the type testing stub for it. + if (d->kind() == Snapshot::kFullAOT && d->for_vm_isolate()) { + const int32_t text_offset = d->Read(); + Dart::vm_isolate()->heap()->WriteProtect(false); + instr_ = d->GetInstructionsAt(text_offset); + Type::dynamic_type().SetTypeTestingStub(instr_); + Dart::vm_isolate()->heap()->WriteProtect(true); + } + } + + void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { + if (kind != Snapshot::kFullAOT) { + for (intptr_t id = canonical_start_index_; id < canonical_stop_index_; + id++) { + type_ ^= refs.At(id); + instr_ = TypeTestingStubGenerator::DefaultCodeForType(type_); + type_.SetTypeTestingStub(instr_); + } + for (intptr_t id = start_index_; id < stop_index_; id++) { + type_ ^= refs.At(id); + instr_ = TypeTestingStubGenerator::DefaultCodeForType(type_); + type_.SetTypeTestingStub(instr_); + } } } private: intptr_t canonical_start_index_; intptr_t canonical_stop_index_; + AbstractType& type_; + Instructions& instr_; }; #if !defined(DART_PRECOMPILED_RUNTIME) class TypeRefSerializationCluster : public SerializationCluster { public: - TypeRefSerializationCluster() : SerializationCluster("TypeRef") {} + explicit TypeRefSerializationCluster(const TypeTestingStubFinder& ttsf) + : SerializationCluster("TypeRef"), type_testing_stubs_(ttsf) {} virtual ~TypeRefSerializationCluster() {} void Trace(Serializer* s, RawObject* object) { @@ -3183,17 +3248,25 @@ class TypeRefSerializationCluster : public SerializationCluster { for (RawObject** p = from; p <= to; p++) { s->WriteRef(*p); } + if (s->kind() == Snapshot::kFullAOT) { + RawInstructions* instr = type_testing_stubs_.LookupByAddresss( + type->ptr()->type_test_stub_entry_point_); + const int32_t text_offset = s->GetTextOffset(instr, Code::null()); + s->Write(text_offset); + } } } private: GrowableArray objects_; + const TypeTestingStubFinder& type_testing_stubs_; }; #endif // !DART_PRECOMPILED_RUNTIME class TypeRefDeserializationCluster : public DeserializationCluster { public: - TypeRefDeserializationCluster() {} + TypeRefDeserializationCluster() + : type_(AbstractType::Handle()), instr_(Instructions::Handle()) {} virtual ~TypeRefDeserializationCluster() {} void ReadAlloc(Deserializer* d) { @@ -3218,14 +3291,26 @@ class TypeRefDeserializationCluster : public DeserializationCluster { for (RawObject** p = from; p <= to; p++) { *p = d->ReadRef(); } + if (d->kind() == Snapshot::kFullAOT) { + const int32_t text_offset = d->Read(); + instr_ = d->GetInstructionsAt(text_offset); + type_ = type; + type_.SetTypeTestingStub(instr_); + } } } + + private: + AbstractType& type_; + Instructions& instr_; }; #if !defined(DART_PRECOMPILED_RUNTIME) class TypeParameterSerializationCluster : public SerializationCluster { public: - TypeParameterSerializationCluster() : SerializationCluster("TypeParameter") {} + explicit TypeParameterSerializationCluster(const TypeTestingStubFinder& ttsf) + : SerializationCluster("TypeParameter"), type_testing_stubs_(ttsf) {} + virtual ~TypeParameterSerializationCluster() {} void Trace(Serializer* s, RawObject* object) { @@ -3263,17 +3348,25 @@ class TypeParameterSerializationCluster : public SerializationCluster { s->WriteTokenPosition(type->ptr()->token_pos_); s->Write(type->ptr()->index_); s->Write(type->ptr()->type_state_); + if (s->kind() == Snapshot::kFullAOT) { + RawInstructions* instr = type_testing_stubs_.LookupByAddresss( + type->ptr()->type_test_stub_entry_point_); + const int32_t text_offset = s->GetTextOffset(instr, Code::null()); + s->Write(text_offset); + } } } private: GrowableArray objects_; + const TypeTestingStubFinder& type_testing_stubs_; }; #endif // !DART_PRECOMPILED_RUNTIME class TypeParameterDeserializationCluster : public DeserializationCluster { public: - TypeParameterDeserializationCluster() {} + TypeParameterDeserializationCluster() + : type_(AbstractType::Handle()), instr_(Instructions::Handle()) {} virtual ~TypeParameterDeserializationCluster() {} void ReadAlloc(Deserializer* d) { @@ -3303,8 +3396,28 @@ class TypeParameterDeserializationCluster : public DeserializationCluster { type->ptr()->token_pos_ = d->ReadTokenPosition(); type->ptr()->index_ = d->Read(); type->ptr()->type_state_ = d->Read(); + if (d->kind() == Snapshot::kFullAOT) { + const int32_t text_offset = d->Read(); + instr_ = d->GetInstructionsAt(text_offset); + type_ = type; + type_.SetTypeTestingStub(instr_); + } + } + } + + void PostLoad(const Array& refs, Snapshot::Kind kind, Zone* zone) { + if (kind != Snapshot::kFullAOT) { + for (intptr_t id = start_index_; id < stop_index_; id++) { + type_ ^= refs.At(id); + instr_ = TypeTestingStubGenerator::DefaultCodeForType(type_); + type_.SetTypeTestingStub(instr_); + } } } + + private: + AbstractType& type_; + Instructions& instr_; }; #if !defined(DART_PRECOMPILED_RUNTIME) @@ -4556,7 +4669,8 @@ Serializer::Serializer(Thread* thread, uint8_t** buffer, ReAlloc alloc, intptr_t initial_size, - ImageWriter* image_writer) + ImageWriter* image_writer, + bool vm_isolate) : StackResource(thread), heap_(thread->isolate()->heap()), zone_(thread->zone()), @@ -4568,7 +4682,8 @@ Serializer::Serializer(Thread* thread, num_cids_(0), num_base_objects_(0), num_written_objects_(0), - next_ref_index_(1) + next_ref_index_(1), + vm_isolate_(vm_isolate) #if defined(SNAPSHOT_BACKTRACE) , current_parent_(Object::null()), @@ -4668,11 +4783,11 @@ SerializationCluster* Serializer::NewClusterForClass(intptr_t cid) { case kLibraryPrefixCid: return new (Z) LibraryPrefixSerializationCluster(); case kTypeCid: - return new (Z) TypeSerializationCluster(); + return new (Z) TypeSerializationCluster(type_testing_stubs_); case kTypeRefCid: - return new (Z) TypeRefSerializationCluster(); + return new (Z) TypeRefSerializationCluster(type_testing_stubs_); case kTypeParameterCid: - return new (Z) TypeParameterSerializationCluster(); + return new (Z) TypeParameterSerializationCluster(type_testing_stubs_); case kBoundedTypeCid: return new (Z) BoundedTypeSerializationCluster(); case kClosureCid: @@ -5120,7 +5235,8 @@ Deserializer::Deserializer(Thread* thread, const uint8_t* buffer, intptr_t size, const uint8_t* instructions_buffer, - const uint8_t* data_buffer) + const uint8_t* data_buffer, + bool vm_isolate) : StackResource(thread), heap_(thread->isolate()->heap()), zone_(thread->zone()), @@ -5129,7 +5245,8 @@ Deserializer::Deserializer(Thread* thread, image_reader_(NULL), refs_(NULL), next_ref_index_(1), - clusters_(NULL) { + clusters_(NULL), + vm_isolate_(vm_isolate) { if (Snapshot::IncludesCode(kind)) { ASSERT(instructions_buffer != NULL); ASSERT(data_buffer != NULL); @@ -5716,7 +5833,8 @@ intptr_t FullSnapshotWriter::WriteVMSnapshot() { ASSERT(vm_snapshot_data_buffer_ != NULL); Serializer serializer(thread(), kind_, vm_snapshot_data_buffer_, alloc_, - kInitialSize, vm_image_writer_); + kInitialSize, vm_image_writer_, + /*vm_isolate=*/true); serializer.ReserveHeader(); serializer.WriteVersionAndFeatures(true); @@ -5746,7 +5864,8 @@ void FullSnapshotWriter::WriteIsolateSnapshot(intptr_t num_base_objects) { thread(), Timeline::GetIsolateStream(), "WriteIsolateSnapshot")); Serializer serializer(thread(), kind_, isolate_snapshot_data_buffer_, alloc_, - kInitialSize, isolate_image_writer_); + kInitialSize, isolate_image_writer_, + /*vm_isolate=*/false); ObjectStore* object_store = isolate()->object_store(); ASSERT(object_store != NULL); @@ -5816,7 +5935,8 @@ FullSnapshotReader::FullSnapshotReader(const Snapshot* snapshot, RawApiError* FullSnapshotReader::ReadVMSnapshot() { Deserializer deserializer(thread_, kind_, buffer_, size_, - instructions_buffer_, data_buffer_); + instructions_buffer_, data_buffer_, + /*vm_isolate=*/true); RawApiError* error = deserializer.VerifyVersionAndFeatures(/*isolate=*/NULL); if (error != ApiError::null()) { @@ -5840,7 +5960,8 @@ RawApiError* FullSnapshotReader::ReadVMSnapshot() { RawApiError* FullSnapshotReader::ReadIsolateSnapshot() { Deserializer deserializer(thread_, kind_, buffer_, size_, - instructions_buffer_, data_buffer_); + instructions_buffer_, data_buffer_, + /*vm_isolate=*/false); RawApiError* error = deserializer.VerifyVersionAndFeatures(thread_->isolate()); diff --git a/runtime/vm/clustered_snapshot.h b/runtime/vm/clustered_snapshot.h index 4900da6305c5..ff618f8e26c5 100644 --- a/runtime/vm/clustered_snapshot.h +++ b/runtime/vm/clustered_snapshot.h @@ -15,6 +15,7 @@ #include "vm/heap.h" #include "vm/object.h" #include "vm/snapshot.h" +#include "vm/type_testing_stubs.h" #include "vm/version.h" #if defined(DEBUG) @@ -131,7 +132,8 @@ class Serializer : public StackResource { uint8_t** buffer, ReAlloc alloc, intptr_t initial_size, - ImageWriter* image_writer_); + ImageWriter* image_writer_, + bool vm_isolate); ~Serializer(); intptr_t WriteVMSnapshot(const Array& symbols, @@ -261,7 +263,10 @@ class Serializer : public StackResource { Snapshot::Kind kind() const { return kind_; } intptr_t next_ref_index() const { return next_ref_index_; } + bool for_vm_isolate() const { return vm_isolate_; } + private: + TypeTestingStubFinder type_testing_stubs_; Heap* heap_; Zone* zone_; Snapshot::Kind kind_; @@ -274,6 +279,7 @@ class Serializer : public StackResource { intptr_t num_written_objects_; intptr_t next_ref_index_; SmiObjectIdMap smi_ids_; + bool vm_isolate_; #if defined(SNAPSHOT_BACKTRACE) RawObject* current_parent_; @@ -290,7 +296,8 @@ class Deserializer : public StackResource { const uint8_t* buffer, intptr_t size, const uint8_t* instructions_buffer, - const uint8_t* data_buffer); + const uint8_t* data_buffer, + bool vm_isolate); ~Deserializer(); void ReadIsolateSnapshot(ObjectStore* object_store); @@ -362,6 +369,7 @@ class Deserializer : public StackResource { intptr_t next_index() const { return next_ref_index_; } Heap* heap() const { return heap_; } Snapshot::Kind kind() const { return kind_; } + bool for_vm_isolate() const { return vm_isolate_; } private: Heap* heap_; @@ -375,6 +383,7 @@ class Deserializer : public StackResource { RawArray* refs_; intptr_t next_ref_index_; DeserializationCluster** clusters_; + bool vm_isolate_; }; class FullSnapshotWriter { diff --git a/runtime/vm/compiler/assembler/assembler_arm.h b/runtime/vm/compiler/assembler/assembler_arm.h index b4f890a4486d..ab6f811cb12e 100644 --- a/runtime/vm/compiler/assembler/assembler_arm.h +++ b/runtime/vm/compiler/assembler/assembler_arm.h @@ -353,6 +353,13 @@ class Assembler : public ValueObject { void Bind(Label* label); void Jump(Label* label) { b(label); } + void LoadField(Register dst, FieldAddress address) { ldr(dst, address); } + + void CompareWithFieldValue(Register value, FieldAddress address) { + ldr(TMP, address); + cmp(value, Operand(TMP)); + } + // Misc. functionality intptr_t CodeSize() const { return buffer_.Size(); } intptr_t prologue_offset() const { return prologue_offset_; } diff --git a/runtime/vm/compiler/assembler/assembler_arm64.h b/runtime/vm/compiler/assembler/assembler_arm64.h index cfe69b962689..8961a811d8f6 100644 --- a/runtime/vm/compiler/assembler/assembler_arm64.h +++ b/runtime/vm/compiler/assembler/assembler_arm64.h @@ -438,6 +438,8 @@ class Assembler : public ValueObject { void Bind(Label* label); void Jump(Label* label) { b(label); } + void LoadField(Register dst, FieldAddress address) { ldr(dst, address); } + // Misc. functionality intptr_t CodeSize() const { return buffer_.Size(); } intptr_t prologue_offset() const { return prologue_offset_; } @@ -1506,6 +1508,7 @@ class Assembler : public ValueObject { void EnterFrame(intptr_t frame_size); void LeaveFrame(); + void Ret() { ret(LR); } void CheckCodePointer(); void RestoreCodePointer(); diff --git a/runtime/vm/compiler/assembler/assembler_x64.h b/runtime/vm/compiler/assembler/assembler_x64.h index 83a80fff9ffe..5b85011d70cd 100644 --- a/runtime/vm/compiler/assembler/assembler_x64.h +++ b/runtime/vm/compiler/assembler/assembler_x64.h @@ -651,6 +651,7 @@ class Assembler : public ValueObject { } // Methods for High-level operations and implemented on all architectures. + void Ret() { ret(); } void CompareRegisters(Register a, Register b); void BranchIf(Condition condition, Label* label) { j(condition, label); } @@ -799,6 +800,12 @@ class Assembler : public ValueObject { void Bind(Label* label); void Jump(Label* label) { jmp(label); } + void LoadField(Register dst, FieldAddress address) { movq(dst, address); } + + void CompareWithFieldValue(Register value, FieldAddress address) { + cmpq(value, address); + } + void Comment(const char* format, ...) PRINTF_ATTRIBUTE(2, 3); static bool EmittingComments(); diff --git a/runtime/vm/compiler/assembler/assembler_x64_test.cc b/runtime/vm/compiler/assembler/assembler_x64_test.cc index a18453c1f44c..202a3e114ea0 100644 --- a/runtime/vm/compiler/assembler/assembler_x64_test.cc +++ b/runtime/vm/compiler/assembler/assembler_x64_test.cc @@ -3226,7 +3226,7 @@ ASSEMBLER_TEST_RUN(PackedDoubleNegate, test) { EXPECT_FLOAT_EQ(-1.0, res, 0.000001f); EXPECT_DISASSEMBLY_NOT_WINDOWS_ENDS_WITH( "movups xmm10,[rax]\n" - "movq r11,[thr+0xf8]\n" + "movq r11,[thr+0x...]\n" "xorpd xmm10,[r11]\n" "movaps xmm0,xmm10\n" "pop thr\n" diff --git a/runtime/vm/compiler/backend/flow_graph_compiler.cc b/runtime/vm/compiler/backend/flow_graph_compiler.cc index 074df7523dda..dd93b428a353 100644 --- a/runtime/vm/compiler/backend/flow_graph_compiler.cc +++ b/runtime/vm/compiler/backend/flow_graph_compiler.cc @@ -1855,29 +1855,14 @@ void FlowGraphCompiler::EmitTestAndCall(const CallTargets& targets, } } -bool FlowGraphCompiler::GenerateSubclassTypeCheck(Register class_id_reg, +bool FlowGraphCompiler::GenerateSubtypeRangeCheck(Register class_id_reg, const Class& type_class, Label* is_subtype) { HierarchyInfo* hi = Thread::Current()->hierarchy_info(); if (hi != NULL) { - // We test up to 4 different cid ranges, if we would need to test more in - // order to get a definite answer we fall back to the old mechanism (namely - // of going into the subtyping cache) - static const intptr_t kMaxNumberOfCidRangesToTest = 4; - const CidRangeVector& ranges = hi->SubtypeRangesForClass(type_class); if (ranges.length() <= kMaxNumberOfCidRangesToTest) { - Label fail; - int bias = 0; - for (intptr_t i = 0; i < ranges.length(); ++i) { - const CidRange& range = ranges[i]; - if (!range.IsIllegalRange()) { - bias = EmitTestAndCallCheckCid(assembler(), is_subtype, class_id_reg, - range, bias, - /*jump_on_miss=*/false); - } - } - __ Bind(&fail); + GenerateCidRangesCheck(assembler(), class_id_reg, ranges, is_subtype); return true; } } @@ -1891,6 +1876,46 @@ bool FlowGraphCompiler::GenerateSubclassTypeCheck(Register class_id_reg, return false; } +void FlowGraphCompiler::GenerateCidRangesCheck(Assembler* assembler, + Register class_id_reg, + const CidRangeVector& cid_ranges, + Label* is_subtype) { + Label fail; + int bias = 0; + for (intptr_t i = 0; i < cid_ranges.length(); ++i) { + const CidRange& range = cid_ranges[i]; + if (!range.IsIllegalRange()) { + bias = EmitTestAndCallCheckCid(assembler, is_subtype, class_id_reg, range, + bias, + /*jump_on_miss=*/false); + } + } + assembler->Bind(&fail); +} + +void FlowGraphCompiler::GenerateAssertAssignableAOT( + const AbstractType& dst_type, + const String& dst_name, + const Register instance_reg, + const Register instantiator_type_args_reg, + const Register function_type_args_reg, + const Register subtype_cache_reg, + const Register dst_type_reg, + const Register dst_name_reg, + Label* done) { + // If the int type is assignable to [dst_type] we special case it on the + // caller side! + const Type& int_type = Type::Handle(zone(), Type::IntType()); + if (int_type.IsSubtypeOf(dst_type, NULL, NULL, Heap::kOld)) { + __ BranchIfSmi(instance_reg, done); + } + + __ LoadObject(dst_type_reg, dst_type); + __ LoadObject(dst_name_reg, dst_name); + __ LoadObject(subtype_cache_reg, + SubtypeTestCache::ZoneHandle(zone(), SubtypeTestCache::New())); +} + #undef __ #endif diff --git a/runtime/vm/compiler/backend/flow_graph_compiler.h b/runtime/vm/compiler/backend/flow_graph_compiler.h index b6daea040ae7..b55066240425 100644 --- a/runtime/vm/compiler/backend/flow_graph_compiler.h +++ b/runtime/vm/compiler/backend/flow_graph_compiler.h @@ -332,6 +332,21 @@ class FlowGraphCompiler : public ValueObject { const AbstractType& dst_type, const String& dst_name, LocationSummary* locs); + void GenerateAssertAssignableAOT(TokenPosition token_pos, + intptr_t deopt_id, + const AbstractType& dst_type, + const String& dst_name, + LocationSummary* locs); + + void GenerateAssertAssignableAOT(const AbstractType& dst_type, + const String& dst_name, + const Register instance_reg, + const Register instantiator_type_args_reg, + const Register function_type_args_reg, + const Register subtype_cache_reg, + const Register dst_type_reg, + const Register dst_name_reg, + Label* done); // DBC emits calls very differently from all other architectures due to its // interpreted nature. @@ -400,10 +415,21 @@ class FlowGraphCompiler : public ValueObject { // Returns true if no further checks are necessary but the code coming after // the emitted code here is still required do a runtime call (for the negative // case of throwing an exception). - bool GenerateSubclassTypeCheck(Register class_id_reg, + bool GenerateSubtypeRangeCheck(Register class_id_reg, const Class& type_class, Label* is_subtype_lbl); + // We test up to 4 different cid ranges, if we would need to test more in + // order to get a definite answer we fall back to the old mechanism (namely + // of going into the subtyping cache) + static const intptr_t kMaxNumberOfCidRangesToTest = 4; + + // Falls through to false. + static void GenerateCidRangesCheck(Assembler* assembler, + Register class_id_reg, + const CidRangeVector& cid_ranges, + Label* is_subtype_lbl); + void EmitOptimizedInstanceCall(const StubEntry& stub_entry, const ICData& ic_data, intptr_t deopt_id, diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc b/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc index bdbf67e4da38..8611b07349e5 100644 --- a/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc +++ b/runtime/vm/compiler/backend/flow_graph_compiler_arm.cc @@ -381,7 +381,7 @@ bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest( // Fast case for cid-range based checks. // Warning: This code destroys the contents of [kClassIdReg]. - if (GenerateSubclassTypeCheck(kClassIdReg, type_class, is_instance_lbl)) { + if (GenerateSubtypeRangeCheck(kClassIdReg, type_class, is_instance_lbl)) { return false; } @@ -641,15 +641,14 @@ void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos, !dst_type.IsVoidType())); const Register kInstantiatorTypeArgumentsReg = R2; const Register kFunctionTypeArgumentsReg = R1; - __ PushList((1 << kInstantiatorTypeArgumentsReg) | - (1 << kFunctionTypeArgumentsReg)); - // A null object is always assignable and is returned as result. - Label is_assignable, runtime_call; - __ CompareObject(R0, Object::null_object()); - __ b(&is_assignable, EQ); // Generate throw new TypeError() if the type is malformed or malbounded. if (dst_type.IsMalformedOrMalbounded()) { + // A null object is always assignable and is returned as result. + Label is_assignable; + __ CompareObject(R0, Object::null_object()); + __ b(&is_assignable, EQ); + __ PushObject(Object::null_object()); // Make room for the result. __ Push(R0); // Push the source object. __ PushObject(dst_name); // Push the name of the destination. @@ -660,37 +659,79 @@ void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos, __ bkpt(0); __ Bind(&is_assignable); // For a null object. + return; + } + + if (FLAG_precompiled_mode && !dst_type.IsDartFunctionType() && + !dst_type.IsFunctionType() && + (dst_type.IsType() || dst_type.IsTypeParameter())) { + GenerateAssertAssignableAOT(token_pos, deopt_id, dst_type, dst_name, locs); + } else { + Label is_assignable_fast, is_assignable, runtime_call; + + // A null object is always assignable and is returned as result. + __ CompareObject(R0, Object::null_object()); + __ b(&is_assignable_fast, EQ); + + __ PushList((1 << kInstantiatorTypeArgumentsReg) | + (1 << kFunctionTypeArgumentsReg)); + + // Generate inline type check, linking to runtime call if not assignable. + SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); + test_cache = GenerateInlineInstanceof(token_pos, dst_type, &is_assignable, + &runtime_call); + + __ Bind(&runtime_call); + __ ldm(IA, SP, + (1 << kFunctionTypeArgumentsReg) | + (1 << kInstantiatorTypeArgumentsReg)); + __ PushObject(Object::null_object()); // Make room for the result. + __ Push(R0); // Push the source object. + __ PushObject(dst_type); // Push the type of the destination. + __ PushList((1 << kInstantiatorTypeArgumentsReg) | + (1 << kFunctionTypeArgumentsReg)); + __ PushObject(dst_name); // Push the name of the destination. + __ LoadUniqueObject(R0, test_cache); + __ Push(R0); + GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); + // Pop the parameters supplied to the runtime entry. The result of the + // type check runtime call is the checked value. + __ Drop(6); + __ Pop(R0); + __ Bind(&is_assignable); __ PopList((1 << kFunctionTypeArgumentsReg) | (1 << kInstantiatorTypeArgumentsReg)); - return; + __ Bind(&is_assignable_fast); } +} - // Generate inline type check, linking to runtime call if not assignable. - SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); - test_cache = GenerateInlineInstanceof(token_pos, dst_type, &is_assignable, - &runtime_call); - - __ Bind(&runtime_call); - __ ldm( - IA, SP, - (1 << kFunctionTypeArgumentsReg) | (1 << kInstantiatorTypeArgumentsReg)); - __ PushObject(Object::null_object()); // Make room for the result. - __ Push(R0); // Push the source object. - __ PushObject(dst_type); // Push the type of the destination. - __ PushList((1 << kInstantiatorTypeArgumentsReg) | - (1 << kFunctionTypeArgumentsReg)); - __ PushObject(dst_name); // Push the name of the destination. - __ LoadUniqueObject(R0, test_cache); - __ Push(R0); - GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); - // Pop the parameters supplied to the runtime entry. The result of the - // type check runtime call is the checked value. - __ Drop(6); - __ Pop(R0); - - __ Bind(&is_assignable); - __ PopList((1 << kFunctionTypeArgumentsReg) | - (1 << kInstantiatorTypeArgumentsReg)); +void FlowGraphCompiler::GenerateAssertAssignableAOT( + TokenPosition token_pos, + intptr_t deopt_id, + const AbstractType& dst_type, + const String& dst_name, + LocationSummary* locs) { + const Register kInstanceReg = R0; + const Register kInstantiatorTypeArgumentsReg = R2; + const Register kFunctionTypeArgumentsReg = R1; + + const Register kSubtypeTestCacheReg = R3; + const Register kDstTypeReg = R8; + const Register kDstNameReg = R4; + + Label done; + + GenerateAssertAssignableAOT(dst_type, dst_name, kInstanceReg, + kInstantiatorTypeArgumentsReg, + kFunctionTypeArgumentsReg, kSubtypeTestCacheReg, + kDstTypeReg, kDstNameReg, &done); + + __ LoadField(R9, + FieldAddress(kDstTypeReg, + AbstractType::type_test_stub_entry_point_offset())); + __ blx(R9); + EmitCallsiteMetadata(token_pos, deopt_id, RawPcDescriptors::kOther, locs); + __ Bind(&done); } void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) { diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc b/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc index dd22a371cfb8..6b86eb54b64a 100644 --- a/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc +++ b/runtime/vm/compiler/backend/flow_graph_compiler_arm64.cc @@ -367,7 +367,7 @@ bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest( // Fast case for cid-range based checks. // Warning: This code destroys the contents of [kClassIdReg]. - if (GenerateSubclassTypeCheck(kClassIdReg, type_class, is_instance_lbl)) { + if (GenerateSubtypeRangeCheck(kClassIdReg, type_class, is_instance_lbl)) { return false; } @@ -622,14 +622,14 @@ void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos, !dst_type.IsVoidType())); const Register kInstantiatorTypeArgumentsReg = R1; const Register kFunctionTypeArgumentsReg = R2; - __ PushPair(kFunctionTypeArgumentsReg, kInstantiatorTypeArgumentsReg); - // A null object is always assignable and is returned as result. - Label is_assignable, runtime_call; - __ CompareObject(R0, Object::null_object()); - __ b(&is_assignable, EQ); // Generate throw new TypeError() if the type is malformed or malbounded. if (dst_type.IsMalformedOrMalbounded()) { + // A null object is always assignable and is returned as result. + Label is_assignable, runtime_call; + __ CompareObject(R0, Object::null_object()); + __ b(&is_assignable, EQ); + __ PushObject(Object::null_object()); // Make room for the result. __ Push(R0); // Push the source object. __ PushObject(dst_name); // Push the name of the destination. @@ -640,33 +640,75 @@ void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos, __ brk(0); __ Bind(&is_assignable); // For a null object. - __ PopPair(kFunctionTypeArgumentsReg, kInstantiatorTypeArgumentsReg); return; } - // Generate inline type check, linking to runtime call if not assignable. - SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); - test_cache = GenerateInlineInstanceof(token_pos, dst_type, &is_assignable, - &runtime_call); - - __ Bind(&runtime_call); - __ ldp(kFunctionTypeArgumentsReg, kInstantiatorTypeArgumentsReg, - Address(SP, 0 * kWordSize, Address::PairOffset)); - __ PushObject(Object::null_object()); // Make room for the result. - __ Push(R0); // Push the source object. - __ PushObject(dst_type); // Push the type of the destination. - __ PushPair(kFunctionTypeArgumentsReg, kInstantiatorTypeArgumentsReg); - __ PushObject(dst_name); // Push the name of the destination. - __ LoadUniqueObject(R0, test_cache); - __ Push(R0); - GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); - // Pop the parameters supplied to the runtime entry. The result of the - // type check runtime call is the checked value. - __ Drop(6); - __ Pop(R0); - - __ Bind(&is_assignable); - __ PopPair(kFunctionTypeArgumentsReg, kInstantiatorTypeArgumentsReg); + if (FLAG_precompiled_mode && !dst_type.IsDartFunctionType() && + !dst_type.IsFunctionType() && + (dst_type.IsType() || dst_type.IsTypeParameter())) { + GenerateAssertAssignableAOT(token_pos, deopt_id, dst_type, dst_name, locs); + } else { + Label is_assignable_fast, is_assignable, runtime_call; + + // A null object is always assignable and is returned as result. + __ CompareObject(R0, Object::null_object()); + __ b(&is_assignable_fast, EQ); + + __ PushPair(kFunctionTypeArgumentsReg, kInstantiatorTypeArgumentsReg); + + // Generate inline type check, linking to runtime call if not assignable. + SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); + test_cache = GenerateInlineInstanceof(token_pos, dst_type, &is_assignable, + &runtime_call); + + __ Bind(&runtime_call); + __ ldp(kFunctionTypeArgumentsReg, kInstantiatorTypeArgumentsReg, + Address(SP, 0 * kWordSize, Address::PairOffset)); + __ PushObject(Object::null_object()); // Make room for the result. + __ Push(R0); // Push the source object. + __ PushObject(dst_type); // Push the type of the destination. + __ PushPair(kFunctionTypeArgumentsReg, kInstantiatorTypeArgumentsReg); + __ PushObject(dst_name); // Push the name of the destination. + __ LoadUniqueObject(R0, test_cache); + __ Push(R0); + GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); + // Pop the parameters supplied to the runtime entry. The result of the + // type check runtime call is the checked value. + __ Drop(6); + __ Pop(R0); + __ Bind(&is_assignable); + __ PopPair(kFunctionTypeArgumentsReg, kInstantiatorTypeArgumentsReg); + __ Bind(&is_assignable_fast); + } +} + +void FlowGraphCompiler::GenerateAssertAssignableAOT( + TokenPosition token_pos, + intptr_t deopt_id, + const AbstractType& dst_type, + const String& dst_name, + LocationSummary* locs) { + const Register kInstanceReg = R0; + const Register kInstantiatorTypeArgumentsReg = R1; + const Register kFunctionTypeArgumentsReg = R2; + + const Register kSubtypeTestCacheReg = R3; + const Register kDstTypeReg = R8; + const Register kDstNameReg = R4; + + Label done; + + GenerateAssertAssignableAOT(dst_type, dst_name, kInstanceReg, + kInstantiatorTypeArgumentsReg, + kFunctionTypeArgumentsReg, kSubtypeTestCacheReg, + kDstTypeReg, kDstNameReg, &done); + + __ LoadField(R9, + FieldAddress(kDstTypeReg, + AbstractType::type_test_stub_entry_point_offset())); + __ blr(R9); + EmitCallsiteMetadata(token_pos, deopt_id, RawPcDescriptors::kOther, locs); + __ Bind(&done); } void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) { diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc b/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc index 712ab2eef270..0580e5cd9e3e 100644 --- a/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc +++ b/runtime/vm/compiler/backend/flow_graph_compiler_ia32.cc @@ -384,7 +384,7 @@ bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest( // Fast case for cid-range based checks. // Warning: This code destroys the contents of [kClassIdReg]. - if (GenerateSubclassTypeCheck(kClassIdReg, type_class, is_instance_lbl)) { + if (GenerateSubtypeRangeCheck(kClassIdReg, type_class, is_instance_lbl)) { return false; } diff --git a/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc b/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc index 25fb8a76bcb0..384ea50368f2 100644 --- a/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc +++ b/runtime/vm/compiler/backend/flow_graph_compiler_x64.cc @@ -382,7 +382,7 @@ bool FlowGraphCompiler::GenerateInstantiatedTypeNoArgumentsTest( // Fast case for cid-range based checks. // Warning: This code destroys the contents of [kClassIdReg]. - if (GenerateSubclassTypeCheck(kClassIdReg, type_class, is_instance_lbl)) { + if (GenerateSubtypeRangeCheck(kClassIdReg, type_class, is_instance_lbl)) { return false; } @@ -630,13 +630,17 @@ void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos, ASSERT(dst_type.IsMalformedOrMalbounded() || (!dst_type.IsDynamicType() && !dst_type.IsObjectType() && !dst_type.IsVoidType())); - // A null object is always assignable and is returned as result. - Label is_assignable, runtime_call; - __ CompareObject(RAX, Object::null_object()); - __ j(EQUAL, &is_assignable); + const Register kInstantiatorTypeArgumentsReg = RDX; + const Register kFunctionTypeArgumentsReg = RCX; + + // A null object is always assignable and is returned as result. // Generate throw new TypeError() if the type is malformed or malbounded. if (dst_type.IsMalformedOrMalbounded()) { + Label is_assignable; + __ CompareObject(RAX, Object::null_object()); + __ j(EQUAL, &is_assignable); + __ PushObject(Object::null_object()); // Make room for the result. __ pushq(RAX); // Push the source object. __ PushObject(dst_name); // Push the name of the destination. @@ -650,28 +654,65 @@ void FlowGraphCompiler::GenerateAssertAssignable(TokenPosition token_pos, return; } - // Generate inline type check, linking to runtime call if not assignable. - SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); - // The registers RAX, RCX, RDX are preserved across the call. - test_cache = GenerateInlineInstanceof(token_pos, dst_type, &is_assignable, - &runtime_call); - - __ Bind(&runtime_call); - __ PushObject(Object::null_object()); // Make room for the result. - __ pushq(RAX); // Push the source object. - __ PushObject(dst_type); // Push the type of the destination. - __ pushq(RDX); // Instantiator type arguments. - __ pushq(RCX); // Function type arguments. - __ PushObject(dst_name); // Push the name of the destination. - __ LoadUniqueObject(RAX, test_cache); - __ pushq(RAX); - GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); - // Pop the parameters supplied to the runtime entry. The result of the - // type check runtime call is the checked value. - __ Drop(6); - __ popq(RAX); - - __ Bind(&is_assignable); + if (FLAG_precompiled_mode && !dst_type.IsDartFunctionType() && + !dst_type.IsFunctionType() && + (dst_type.IsType() || dst_type.IsTypeParameter())) { + GenerateAssertAssignableAOT(token_pos, deopt_id, dst_type, dst_name, locs); + } else { + Label is_assignable, runtime_call; + + // A null object is always assignable and is returned as result. + __ CompareObject(RAX, Object::null_object()); + __ j(EQUAL, &is_assignable); + + // Generate inline type check, linking to runtime call if not assignable. + SubtypeTestCache& test_cache = SubtypeTestCache::ZoneHandle(zone()); + // The registers RAX, RCX, RDX are preserved across the call. + test_cache = GenerateInlineInstanceof(token_pos, dst_type, &is_assignable, + &runtime_call); + + __ Bind(&runtime_call); + __ PushObject(Object::null_object()); // Make room for the result. + __ pushq(RAX); // Push the source object. + __ PushObject(dst_type); // Push the type of the destination. + __ pushq(kInstantiatorTypeArgumentsReg); + __ pushq(kFunctionTypeArgumentsReg); + __ PushObject(dst_name); // Push the name of the destination. + __ LoadUniqueObject(RAX, test_cache); + __ pushq(RAX); + GenerateRuntimeCall(token_pos, deopt_id, kTypeCheckRuntimeEntry, 6, locs); + // Pop the parameters supplied to the runtime entry. The result of the + // type check runtime call is the checked value. + __ Drop(6); + __ popq(RAX); + __ Bind(&is_assignable); + } +} + +void FlowGraphCompiler::GenerateAssertAssignableAOT( + TokenPosition token_pos, + intptr_t deopt_id, + const AbstractType& dst_type, + const String& dst_name, + LocationSummary* locs) { + const Register kInstanceReg = RAX; + const Register kInstantiatorTypeArgumentsReg = RDX; + const Register kFunctionTypeArgumentsReg = RCX; + + Label done; + + const Register subtype_cache_reg = R9; + const Register dst_type_reg = RBX; + const Register dst_name_reg = R10; + + GenerateAssertAssignableAOT(dst_type, dst_name, kInstanceReg, + kInstantiatorTypeArgumentsReg, + kFunctionTypeArgumentsReg, subtype_cache_reg, + dst_type_reg, dst_name_reg, &done); + + __ call(FieldAddress(RBX, AbstractType::type_test_stub_entry_point_offset())); + EmitCallsiteMetadata(token_pos, deopt_id, RawPcDescriptors::kOther, locs); + __ Bind(&done); } void FlowGraphCompiler::EmitInstructionEpilogue(Instruction* instr) { diff --git a/runtime/vm/dart.cc b/runtime/vm/dart.cc index c0039667349b..fb393bd1983b 100644 --- a/runtime/vm/dart.cc +++ b/runtime/vm/dart.cc @@ -217,6 +217,7 @@ char* Dart::InitOnce(const uint8_t* vm_isolate_snapshot, return strdup("Precompiled runtime requires a precompiled snapshot"); #else StubCode::InitOnce(); + Object::FinishInitOnce(vm_isolate_); // MallocHooks can't be initialized until StubCode has been since stack // trace generation relies on stub methods that are generated in // StubCode::InitOnce(). @@ -234,6 +235,7 @@ char* Dart::InitOnce(const uint8_t* vm_isolate_snapshot, // Must copy before leaving the zone. return strdup(error.ToErrorCString()); } + Object::FinishInitOnce(vm_isolate_); #if !defined(PRODUCT) if (tds.enabled()) { tds.SetNumArguments(2); @@ -262,6 +264,7 @@ char* Dart::InitOnce(const uint8_t* vm_isolate_snapshot, #else vm_snapshot_kind_ = Snapshot::kNone; StubCode::InitOnce(); + Object::FinishInitOnce(vm_isolate_); // MallocHooks can't be initialized until StubCode has been since stack // trace generation relies on stub methods that are generated in // StubCode::InitOnce(). diff --git a/runtime/vm/image_snapshot.cc b/runtime/vm/image_snapshot.cc index 9900a7c7f099..5947ee64ef9a 100644 --- a/runtime/vm/image_snapshot.cc +++ b/runtime/vm/image_snapshot.cc @@ -10,6 +10,7 @@ #include "vm/object.h" #include "vm/stub_code.h" #include "vm/timeline.h" +#include "vm/type_testing_stubs.h" namespace dart { @@ -149,6 +150,7 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) { Object& owner = Object::Handle(zone); String& str = String::Handle(zone); + TypeTestingStubFinder tts; for (intptr_t i = 0; i < instructions_.length(); i++) { const Instructions& insns = *instructions_[i].insns_; const Code& code = *instructions_[i].code_; @@ -180,28 +182,41 @@ void AssemblyImageWriter::WriteText(WriteStream* clustered_stream, bool vm) { // 2. Write a label at the entry point. // Linux's perf uses these labels. - owner = code.owner(); - if (owner.IsNull()) { - const char* name = StubCode::NameOfStub(insns.UncheckedEntryPoint()); - assembly_stream_.Print("Precompiled_Stub_%s:\n", name); - } else if (owner.IsClass()) { - str = Class::Cast(owner).Name(); - const char* name = str.ToCString(); - EnsureIdentifier(const_cast(name)); - assembly_stream_.Print("Precompiled_AllocationStub_%s_%" Pd ":\n", name, - i); - } else if (owner.IsFunction()) { - const char* name = Function::Cast(owner).ToQualifiedCString(); - EnsureIdentifier(const_cast(name)); - assembly_stream_.Print("Precompiled_%s_%" Pd ":\n", name, i); + if (code.IsNull()) { + const char* name = tts.StubNameFromAddresss(insns.UncheckedEntryPoint()); + assembly_stream_.Print("Precompiled_%s:\n", name); } else { - UNREACHABLE(); + owner = code.owner(); + if (owner.IsNull()) { + const char* name = StubCode::NameOfStub(insns.UncheckedEntryPoint()); + if (name != NULL) { + assembly_stream_.Print("Precompiled_Stub_%s:\n", name); + } else { + const char* name = + tts.StubNameFromAddresss(insns.UncheckedEntryPoint()); + assembly_stream_.Print("Precompiled__%s:\n", name); + } + } else if (owner.IsClass()) { + str = Class::Cast(owner).Name(); + const char* name = str.ToCString(); + EnsureIdentifier(const_cast(name)); + assembly_stream_.Print("Precompiled_AllocationStub_%s_%" Pd ":\n", name, + i); + } else if (owner.IsFunction()) { + const char* name = Function::Cast(owner).ToQualifiedCString(); + EnsureIdentifier(const_cast(name)); + assembly_stream_.Print("Precompiled_%s_%" Pd ":\n", name, i); + } else { + UNREACHABLE(); + } } #ifdef DART_PRECOMPILER // Create a label for use by DWARF. - intptr_t dwarf_index = dwarf_->AddCode(code); - assembly_stream_.Print(".Lcode%" Pd ":\n", dwarf_index); + if (!code.IsNull()) { + const intptr_t dwarf_index = dwarf_->AddCode(code); + assembly_stream_.Print(".Lcode%" Pd ":\n", dwarf_index); + } #endif { diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc index 7c1f494be62b..2fda3f4b7799 100644 --- a/runtime/vm/object.cc +++ b/runtime/vm/object.cc @@ -42,12 +42,14 @@ #include "vm/runtime_entry.h" #include "vm/scopes.h" #include "vm/stack_frame.h" +#include "vm/stub_code.h" #include "vm/symbols.h" #include "vm/tags.h" #include "vm/thread_registry.h" #include "vm/timeline.h" #include "vm/timer.h" #include "vm/type_table.h" +#include "vm/type_testing_stubs.h" #include "vm/unicode.h" #include "vm/weak_code.h" #include "vm/zone_text_buffer.h" @@ -999,6 +1001,22 @@ void Object::InitOnce(Isolate* isolate) { ASSERT(extractor_parameter_names_->IsArray()); } +void Object::FinishInitOnce(Isolate* isolate) { + // The type testing stubs we initialize in AbstractType objects for the + // canonical type of kDynamicCid/kVoidCid/kVectorCid need to be set in this + // method, which is called after StubCode::InitOnce(). + Instructions& instr = Instructions::Handle(); + + instr = TypeTestingStubGenerator::DefaultCodeForType(*dynamic_type_); + dynamic_type_->SetTypeTestingStub(instr); + + instr = TypeTestingStubGenerator::DefaultCodeForType(*void_type_); + void_type_->SetTypeTestingStub(instr); + + instr = TypeTestingStubGenerator::DefaultCodeForType(*vector_type_); + vector_type_->SetTypeTestingStub(instr); +} + // An object visitor which will mark all visited objects. This is used to // premark all objects in the vm_isolate_ heap. Also precalculates hash // codes so that we can get the identity hash code of objects in the read- @@ -16934,6 +16952,19 @@ const char* AbstractType::ToCString() const { return "AbstractType"; } +void AbstractType::SetTypeTestingStub(const Instructions& instr) const { + if (instr.IsNull()) { + // This only happens during bootstrapping when creating Type objects before + // we have the instructions. + ASSERT(type_class_id() == kDynamicCid || type_class_id() == kVoidCid || + type_class_id() == kVectorCid); + StoreNonPointer(&raw_ptr()->type_test_stub_entry_point_, 0); + } else { + StoreNonPointer(&raw_ptr()->type_test_stub_entry_point_, + instr.UncheckedEntryPoint()); + } +} + RawType* Type::NullType() { return Isolate::Current()->object_store()->null_type(); } @@ -17842,7 +17873,8 @@ RawType* Type::New(const Object& clazz, const TypeArguments& arguments, TokenPosition token_pos, Heap::Space space) { - const Type& result = Type::Handle(Type::New(space)); + Zone* Z = Thread::Current()->zone(); + const Type& result = Type::Handle(Z, Type::New(space)); if (clazz.IsClass()) { result.set_type_class(Class::Cast(clazz)); } else { @@ -17852,6 +17884,9 @@ RawType* Type::New(const Object& clazz, result.SetHash(0); result.set_token_pos(token_pos); result.StoreNonPointer(&result.raw_ptr()->type_state_, RawType::kAllocated); + + result.SetTypeTestingStub(Instructions::Handle( + Z, TypeTestingStubGenerator::DefaultCodeForType(result))); return result.raw(); } @@ -17878,7 +17913,8 @@ const char* Type::ToCString() const { const char* class_name; if (HasResolvedTypeClass()) { cls = type_class(); - class_name = String::Handle(zone, cls.Name()).ToCString(); + const String& name = String::Handle(zone, cls.Name()); + class_name = name.IsNull() ? "" : name.ToCString(); } else { class_name = UnresolvedClass::Handle(zone, unresolved_class()).ToCString(); } @@ -17961,6 +17997,9 @@ RawTypeRef* TypeRef::InstantiateFrom( space); ASSERT(!instantiated_ref_type.IsTypeRef()); instantiated_type_ref.set_type(instantiated_ref_type); + + instantiated_type_ref.SetTypeTestingStub(Instructions::Handle( + TypeTestingStubGenerator::DefaultCodeForType(instantiated_type_ref))); return instantiated_type_ref.raw(); } @@ -18046,8 +18085,12 @@ RawTypeRef* TypeRef::New() { } RawTypeRef* TypeRef::New(const AbstractType& type) { - const TypeRef& result = TypeRef::Handle(TypeRef::New()); + Zone* Z = Thread::Current()->zone(); + const TypeRef& result = TypeRef::Handle(Z, TypeRef::New()); result.set_type(type); + + result.SetTypeTestingStub(Instructions::Handle( + Z, TypeTestingStubGenerator::DefaultCodeForType(result))); return result.raw(); } @@ -18349,7 +18392,8 @@ RawTypeParameter* TypeParameter::New(const Class& parameterized_class, const AbstractType& bound, TokenPosition token_pos) { ASSERT(parameterized_class.IsNull() != parameterized_function.IsNull()); - const TypeParameter& result = TypeParameter::Handle(TypeParameter::New()); + Zone* Z = Thread::Current()->zone(); + const TypeParameter& result = TypeParameter::Handle(Z, TypeParameter::New()); result.set_parameterized_class(parameterized_class); result.set_parameterized_function(parameterized_function); result.set_index(index); @@ -18359,6 +18403,9 @@ RawTypeParameter* TypeParameter::New(const Class& parameterized_class, result.set_token_pos(token_pos); result.StoreNonPointer(&result.raw_ptr()->type_state_, RawTypeParameter::kAllocated); + + result.SetTypeTestingStub(Instructions::Handle( + Z, TypeTestingStubGenerator::DefaultCodeForType(result))); return result.raw(); } @@ -18616,11 +18663,15 @@ RawBoundedType* BoundedType::New() { RawBoundedType* BoundedType::New(const AbstractType& type, const AbstractType& bound, const TypeParameter& type_parameter) { - const BoundedType& result = BoundedType::Handle(BoundedType::New()); + Zone* Z = Thread::Current()->zone(); + const BoundedType& result = BoundedType::Handle(Z, BoundedType::New()); result.set_type(type); result.set_bound(bound); result.SetHash(0); result.set_type_parameter(type_parameter); + + result.SetTypeTestingStub(Instructions::Handle( + Z, TypeTestingStubGenerator::DefaultCodeForType(result))); return result.raw(); } @@ -18691,9 +18742,13 @@ RawMixinAppType* MixinAppType::New() { RawMixinAppType* MixinAppType::New(const AbstractType& super_type, const Array& mixin_types) { - const MixinAppType& result = MixinAppType::Handle(MixinAppType::New()); + Zone* Z = Thread::Current()->zone(); + const MixinAppType& result = MixinAppType::Handle(Z, MixinAppType::New()); result.set_super_type(super_type); result.set_mixin_types(mixin_types); + + result.SetTypeTestingStub(Instructions::Handle( + Z, TypeTestingStubGenerator::DefaultCodeForType(result))); return result.raw(); } diff --git a/runtime/vm/object.h b/runtime/vm/object.h index 463013e80eff..1adf6baa6e98 100644 --- a/runtime/vm/object.h +++ b/runtime/vm/object.h @@ -42,9 +42,10 @@ class ArgumentsDescriptor; class Assembler; class Closure; class Code; -class DisassemblyFormatter; class DeoptInstr; +class DisassemblyFormatter; class FinalizablePersistentHandle; +class HierarchyInfo; class LocalScope; #define REUSABLE_FORWARD_DECLARATION(name) class Reusable##name##HandleScope; @@ -555,6 +556,7 @@ class Object { // Initialize the VM isolate. static void InitNull(Isolate* isolate); static void InitOnce(Isolate* isolate); + static void FinishInitOnce(Isolate* isolate); static void FinalizeVMIsolate(Isolate* isolate); // Initialize a new isolate either from a Kernel IR, from source, or from a @@ -4625,6 +4627,9 @@ class Code : public Object { } RawInstructions* instructions() const { return raw_ptr()->instructions_; } + static RawInstructions* InstructionsOf(const RawCode* code) { + return code->ptr()->instructions_; + } static intptr_t saved_instructions_offset() { return OFFSET_OF(RawCode, instructions_); @@ -6096,6 +6101,16 @@ class AbstractType : public Instance { const TypeArguments& instantiator_type_args, const TypeArguments& function_type_args); + static intptr_t type_test_stub_entry_point_offset() { + return OFFSET_OF(RawAbstractType, type_test_stub_entry_point_); + } + + uword type_test_stub_entry_point() const { + return raw_ptr()->type_test_stub_entry_point_; + } + + void SetTypeTestingStub(const Instructions& instr) const; + private: // Check the 'is subtype of' or 'is more specific than' relationship. bool TypeTest(TypeTestKind test_kind, @@ -6135,6 +6150,12 @@ class Type : public AbstractType { static intptr_t type_class_id_offset() { return OFFSET_OF(RawType, type_class_id_); } + static intptr_t arguments_offset() { + return OFFSET_OF(RawType, type_class_id_); + } + static intptr_t type_state_offset() { + return OFFSET_OF(RawType, type_state_); + } static intptr_t hash_offset() { return OFFSET_OF(RawType, hash_); } virtual bool IsFinalized() const { return (raw_ptr()->type_state_ == RawType::kFinalizedInstantiated) || diff --git a/runtime/vm/raw_object.h b/runtime/vm/raw_object.h index a9877902f8af..f613ef603ac9 100644 --- a/runtime/vm/raw_object.h +++ b/runtime/vm/raw_object.h @@ -1786,11 +1786,13 @@ class RawAbstractType : public RawInstance { kFinalizedInstantiated, // Instantiated type ready for use. kFinalizedUninstantiated, // Uninstantiated type ready for use. }; + uword type_test_stub_entry_point_; // Accessed from generated code. private: RAW_HEAP_OBJECT_IMPLEMENTATION(AbstractType); friend class ObjectStore; + friend class StubCode; }; class RawType : public RawAbstractType { diff --git a/runtime/vm/raw_object_snapshot.cc b/runtime/vm/raw_object_snapshot.cc index e7387561c3a9..dd41cf19270b 100644 --- a/runtime/vm/raw_object_snapshot.cc +++ b/runtime/vm/raw_object_snapshot.cc @@ -218,6 +218,9 @@ RawType* Type::ReadFrom(SnapshotReader* reader, type.set_token_pos(TokenPosition::SnapshotDecode(reader->Read())); type.set_type_state(reader->Read()); + // Read the code object for the type testing stub and set its entrypoint. + reader->EnqueueTypePostprocessing(type); + // Set all the object fields. READ_OBJECT_FIELDS(type, type.raw()->from(), type.raw()->to(), kAsReference); @@ -294,6 +297,9 @@ RawTypeRef* TypeRef::ReadFrom(SnapshotReader* reader, TypeRef& type_ref = TypeRef::ZoneHandle(reader->zone(), TypeRef::New()); reader->AddBackRef(object_id, &type_ref, kIsDeserialized); + // Read the code object for the type testing stub and set its entrypoint. + reader->EnqueueTypePostprocessing(type_ref); + // Set all the object fields. READ_OBJECT_FIELDS(type_ref, type_ref.raw()->from(), type_ref.raw()->to(), kAsReference); @@ -337,6 +343,9 @@ RawTypeParameter* TypeParameter::ReadFrom(SnapshotReader* reader, type_parameter.set_index(reader->Read()); type_parameter.set_type_state(reader->Read()); + // Read the code object for the type testing stub and set its entrypoint. + reader->EnqueueTypePostprocessing(type_parameter); + // Set all the object fields. READ_OBJECT_FIELDS(type_parameter, type_parameter.raw()->from(), type_parameter.raw()->to(), kAsReference); @@ -392,6 +401,9 @@ RawBoundedType* BoundedType::ReadFrom(SnapshotReader* reader, BoundedType::ZoneHandle(reader->zone(), BoundedType::New()); reader->AddBackRef(object_id, &bounded_type, kIsDeserialized); + // Read the code object for the type testing stub and set its entrypoint. + reader->EnqueueTypePostprocessing(bounded_type); + // Set all the object fields. READ_OBJECT_FIELDS(bounded_type, bounded_type.raw()->from(), bounded_type.raw()->to(), kAsReference); diff --git a/runtime/vm/snapshot.cc b/runtime/vm/snapshot.cc index 1b91df883f46..447fbf46a70f 100644 --- a/runtime/vm/snapshot.cc +++ b/runtime/vm/snapshot.cc @@ -15,8 +15,10 @@ #include "vm/object.h" #include "vm/object_store.h" #include "vm/snapshot_ids.h" +#include "vm/stub_code.h" #include "vm/symbols.h" #include "vm/timeline.h" +#include "vm/type_testing_stubs.h" #include "vm/version.h" // We currently only expect the Dart mutator to read snapshots. @@ -189,6 +191,7 @@ SnapshotReader::SnapshotReader(const uint8_t* buffer, heap_(isolate()->heap()), old_space_(thread_->isolate()->heap()->old_space()), cls_(Class::Handle(zone_)), + code_(Code::Handle(zone_)), obj_(Object::Handle(zone_)), pobj_(PassiveObject::Handle(zone_)), array_(Array::Handle(zone_)), @@ -208,6 +211,7 @@ SnapshotReader::SnapshotReader(const uint8_t* buffer, ? Object::vm_isolate_snapshot_object_table().Length() : 0), backward_references_(backward_refs), + types_to_postprocess_(GrowableObjectArray::Handle(zone_)), objects_to_rehash_(GrowableObjectArray::Handle(zone_)) {} RawObject* SnapshotReader::ReadObject() { @@ -215,6 +219,7 @@ RawObject* SnapshotReader::ReadObject() { LongJumpScope jump; if (setjmp(*jump.Set()) == 0) { objects_to_rehash_ = GrowableObjectArray::New(HEAP_SPACE(kind_)); + types_to_postprocess_ = GrowableObjectArray::New(HEAP_SPACE(kind_)); PassiveObject& obj = PassiveObject::Handle(zone(), ReadObjectImpl(kAsInlinedObject)); @@ -234,6 +239,7 @@ RawObject* SnapshotReader::ReadObject() { } else { result = obj.raw(); } + RunDelayedTypePostprocessing(); const Object& ok = Object::Handle(zone_, RunDelayedRehashingOfMaps()); objects_to_rehash_ = GrowableObjectArray::null(); if (!ok.IsNull()) { @@ -248,6 +254,22 @@ RawObject* SnapshotReader::ReadObject() { } } +void SnapshotReader::EnqueueTypePostprocessing(const AbstractType& type) { + types_to_postprocess_.Add(type, HEAP_SPACE(kind_)); +} + +void SnapshotReader::RunDelayedTypePostprocessing() { + if (types_to_postprocess_.Length() > 0) { + AbstractType& type = AbstractType::Handle(); + Instructions& instr = Instructions::Handle(); + for (intptr_t i = 0; i < types_to_postprocess_.Length(); ++i) { + type ^= types_to_postprocess_.At(i); + instr = TypeTestingStubGenerator::DefaultCodeForType(type); + type.SetTypeTestingStub(instr); + } + } +} + void SnapshotReader::EnqueueRehashingOfMap(const LinkedHashMap& map) { objects_to_rehash_.Add(map, HEAP_SPACE(kind_)); } diff --git a/runtime/vm/snapshot.h b/runtime/vm/snapshot.h index 44cb6dbae61e..6dd74d4792ff 100644 --- a/runtime/vm/snapshot.h +++ b/runtime/vm/snapshot.h @@ -326,6 +326,7 @@ class SnapshotReader : public BaseReader { PassiveObject* PassiveObjectHandle() { return &pobj_; } Array* ArrayHandle() { return &array_; } Class* ClassHandle() { return &cls_; } + Code* CodeHandle() { return &code_; } String* StringHandle() { return &str_; } AbstractType* TypeHandle() { return &type_; } TypeArguments* TypeArgumentsHandle() { return &type_arguments_; } @@ -371,6 +372,9 @@ class SnapshotReader : public BaseReader { PageSpace* old_space() const { return old_space_; } private: + void EnqueueTypePostprocessing(const AbstractType& type); + void RunDelayedTypePostprocessing(); + void EnqueueRehashingOfMap(const LinkedHashMap& map); RawObject* RunDelayedRehashingOfMaps(); @@ -434,6 +438,7 @@ class SnapshotReader : public BaseReader { Heap* heap_; // Heap of the current isolate. PageSpace* old_space_; // Old space of the current isolate. Class& cls_; // Temporary Class handle. + Code& code_; // Temporary Code handle. Object& obj_; // Temporary Object handle. PassiveObject& pobj_; // Temporary PassiveObject handle. Array& array_; // Temporary Array handle. @@ -450,6 +455,7 @@ class SnapshotReader : public BaseReader { UnhandledException& error_; // Error handle. intptr_t max_vm_isolate_object_id_; ZoneGrowableArray* backward_references_; + GrowableObjectArray& types_to_postprocess_; GrowableObjectArray& objects_to_rehash_; friend class ApiError; @@ -770,6 +776,8 @@ class SnapshotWriter : public BaseWriter { friend class RawSubtypeTestCache; friend class RawTokenStream; friend class RawType; + friend class RawTypeRef; + friend class RawBoundedType; friend class RawTypeArguments; friend class RawTypeParameter; friend class RawUserTag; diff --git a/runtime/vm/stub_code.cc b/runtime/vm/stub_code.cc index 9bd84f1f9c71..a6648081b424 100644 --- a/runtime/vm/stub_code.cc +++ b/runtime/vm/stub_code.cc @@ -82,14 +82,8 @@ RawCode* StubCode::Generate(const char* name, void StubCode::VisitObjectPointers(ObjectPointerVisitor* visitor) {} bool StubCode::HasBeenInitialized() { -#if !defined(TARGET_ARCH_DBC) - // Use JumpToHandler and InvokeDart as canaries. - const StubEntry* entry_1 = StubCode::JumpToFrame_entry(); - const StubEntry* entry_2 = StubCode::InvokeDartCode_entry(); - return (entry_1 != NULL) && (entry_2 != NULL); -#else - return true; -#endif + // Use AsynchronousGapMarker as canary. + return StubCode::AsynchronousGapMarker_entry() != NULL; } bool StubCode::InInvocationStub(uword pc) { diff --git a/runtime/vm/stub_code.h b/runtime/vm/stub_code.h index 0ef2edcb68b0..b352ef1ff54a 100644 --- a/runtime/vm/stub_code.h +++ b/runtime/vm/stub_code.h @@ -67,6 +67,9 @@ class SnapshotWriter; V(Subtype1TestCache) \ V(Subtype2TestCache) \ V(Subtype4TestCache) \ + V(DefaultTypeTest) \ + V(UnreachableTypeTest) \ + V(SlowTypeTest) \ V(CallClosureNoSuchMethod) \ V(FrameAwaitingMaterialization) \ V(AsynchronousGapMarker) @@ -82,6 +85,9 @@ class SnapshotWriter; V(Deoptimize) \ V(DeoptimizeLazyFromReturn) \ V(DeoptimizeLazyFromThrow) \ + V(DefaultTypeTest) \ + V(UnreachableTypeTest) \ + V(SlowTypeTest) \ V(FrameAwaitingMaterialization) \ V(AsynchronousGapMarker) diff --git a/runtime/vm/stub_code_arm.cc b/runtime/vm/stub_code_arm.cc index 9b9ca0b20b2b..ca285e167128 100644 --- a/runtime/vm/stub_code_arm.cc +++ b/runtime/vm/stub_code_arm.cc @@ -3,6 +3,7 @@ // BSD-style license that can be found in the LICENSE file. #include "vm/globals.h" + #if defined(TARGET_ARCH_ARM) && !defined(DART_PRECOMPILED_RUNTIME) #include "vm/compiler/assembler/assembler.h" @@ -12,11 +13,13 @@ #include "vm/dart_entry.h" #include "vm/heap.h" #include "vm/instructions.h" +#include "vm/isolate.h" #include "vm/object_store.h" #include "vm/runtime_entry.h" #include "vm/stack_frame.h" #include "vm/stub_code.h" #include "vm/tags.h" +#include "vm/type_testing_stubs.h" #define __ assembler-> @@ -1726,6 +1729,9 @@ void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) { // R2: instantiator type arguments (only if n == 4, can be raw_null). // R1: function type arguments (only if n == 4, can be raw_null). // R3: SubtypeTestCache. +// +// Preserves R0/R2 +// // Result in R1: null -> not found, otherwise result (true or false). static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) { ASSERT((n == 1) || (n == 2) || (n == 4)); @@ -1838,6 +1844,163 @@ void StubCode::GenerateSubtype4TestCacheStub(Assembler* assembler) { GenerateSubtypeNTestCacheStub(assembler, 4); } +// Used to test whether a given value is of a given type (different variants, +// all have the same calling convention). +// +// Inputs: +// - R0 : instance to test against. +// - R2 : instantiator type arguments (if needed). +// - R1 : function type arguments (if needed). +// +// - R3 : subtype test cache. +// +// - R8 : type to test against. +// - R4 : name of destination variable. +// +// Preserves R0/R2. +// +// Note of warning: The caller will not populate CODE_REG and we have therefore +// no access to the pool. +void StubCode::GenerateDefaultTypeTestStub(Assembler* assembler) { + Label done; + + const Register kInstanceReg = R0; + const Register kDstTypeReg = R8; + + // Fast case for 'null'. + __ CompareObject(kInstanceReg, Object::null_object()); + __ BranchIf(EQUAL, &done); + + // Fast case for 'int'. + Label not_smi; + __ BranchIfNotSmi(kInstanceReg, ¬_smi); + __ CompareObject(kDstTypeReg, Object::ZoneHandle(Type::IntType())); + __ BranchIf(EQUAL, &done); + __ Bind(¬_smi); + + // Tail call the [SubtypeTestCache]-based implementation. + __ ldr(CODE_REG, Address(THR, Thread::slow_type_test_stub_offset())); + __ ldr(R9, FieldAddress(CODE_REG, Code::entry_point_offset())); + __ bx(R9); + + __ Bind(&done); + __ Ret(); +} + +void StubCode::GenerateUnreachableTypeTestStub(Assembler* assembler) { + __ Breakpoint(); +} + +void StubCode::GenerateSlowTypeTestStub(Assembler* assembler) { + Label done, call_runtime; + + const Register kInstanceReg = R0; + const Register kInstantiatorTypeArgumentsReg = R2; + const Register kFunctionTypeArgumentsReg = R1; + const Register kDstTypeReg = R8; + const Register kDstNameReg = R4; + const Register kSubtypeTestCacheReg = R3; + + __ EnterStubFrame(); + +#ifdef DEBUG + // Guaranteed by caller. + Label no_error; + __ CompareObject(kInstanceReg, Object::null_object()); + __ BranchIf(NOT_EQUAL, &no_error); + __ Breakpoint(); + __ Bind(&no_error); +#endif + + // Need to handle slow cases of [Smi]s here because the + // [SubtypeTestCache]-based stubs do not handle [Smi]s. + Label non_smi_value; + __ BranchIfSmi(kInstanceReg, &call_runtime); + + const Register kTmp = NOTFP; + + // If this is not a [Type] object, we'll go to the runtime. + Label is_simple, is_instantiated, is_uninstantiated; + __ LoadClassId(kTmp, kDstTypeReg); + __ cmp(kTmp, Operand(kTypeCid)); + __ BranchIf(NOT_EQUAL, &is_uninstantiated); + + // Check whether this [Type] is instantiated/uninstantiated. + __ ldrb(kTmp, FieldAddress(kDstTypeReg, Type::type_state_offset())); + __ cmp(kTmp, Operand(RawType::kFinalizedInstantiated)); + __ BranchIf(NOT_EQUAL, &is_uninstantiated); + + // Check whether this [Type] is with/without arguments. + __ LoadField(kTmp, FieldAddress(kDstTypeReg, Type::arguments_offset())); + __ CompareObject(kTmp, Object::null_object()); + __ BranchIf(NOT_EQUAL, &is_instantiated); + // Fall through to &is_simple + + const intptr_t kRegsToSave = (1 << kSubtypeTestCacheReg) | + (1 << kDstNameReg) | (1 << kDstTypeReg) | + (1 << kFunctionTypeArgumentsReg); + __ Bind(&is_simple); + { + __ PushList(kRegsToSave); + __ BranchLink(*StubCode::Subtype1TestCache_entry()); + __ CompareObject(R1, Bool::True()); + __ PopList(kRegsToSave); + __ BranchIf(EQUAL, &done); // Cache said: yes. + __ Jump(&call_runtime); + } + + __ Bind(&is_instantiated); + { + __ PushList(kRegsToSave); + __ BranchLink(*StubCode::Subtype2TestCache_entry()); + __ CompareObject(R1, Bool::True()); + __ PopList(kRegsToSave); + __ BranchIf(EQUAL, &done); // Cache said: yes. + __ Jump(&call_runtime); + } + + __ Bind(&is_uninstantiated); + { + __ PushList(kRegsToSave); + __ BranchLink(*StubCode::Subtype4TestCache_entry()); + __ CompareObject(R1, Bool::True()); + __ PopList(kRegsToSave); + __ BranchIf(EQUAL, &done); // Cache said: yes. + // Fall through to runtime_call + } + + __ Bind(&call_runtime); + + // We cannot really ensure here that dynamic/Object never occur here (though + // it is guaranteed at dart_precompiled_runtime time). This is because we do + // constant evaluation with default stubs and only install optimized versions + // before writing out the AOT snapshot. So dynamic/Object will run with + // default stub in constant evaluation. + __ CompareObject(kDstTypeReg, Type::dynamic_type()); + __ BranchIf(EQUAL, &done); + __ CompareObject(kDstTypeReg, Type::Handle(Type::ObjectType())); + __ BranchIf(EQUAL, &done); + + __ PushObject(Object::null_object()); // Make room for result. + __ Push(kInstanceReg); + __ Push(kDstTypeReg); + __ Push(kInstantiatorTypeArgumentsReg); + __ Push(kFunctionTypeArgumentsReg); + __ Push(kDstNameReg); + __ Push(kSubtypeTestCacheReg); + __ CallRuntime(kTypeCheckRuntimeEntry, 6); + __ Pop(kSubtypeTestCacheReg); + __ Pop(kDstNameReg); + __ Pop(kFunctionTypeArgumentsReg); + __ Pop(kInstantiatorTypeArgumentsReg); + __ Pop(kDstTypeReg); + __ Pop(kInstanceReg); + __ Drop(1); // Discard return value. + __ Bind(&done); + __ LeaveStubFrame(); + __ Ret(); +} + // Return the current stack pointer address, used to do stack alignment checks. void StubCode::GenerateGetCStackPointerStub(Assembler* assembler) { __ mov(R0, Operand(SP)); diff --git a/runtime/vm/stub_code_arm64.cc b/runtime/vm/stub_code_arm64.cc index e23bb9076094..3ae08bf2cd52 100644 --- a/runtime/vm/stub_code_arm64.cc +++ b/runtime/vm/stub_code_arm64.cc @@ -1772,6 +1772,9 @@ void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) { // R1: instantiator type arguments (only if n == 4, can be raw_null). // R2: function type arguments (only if n == 4, can be raw_null). // R3: SubtypeTestCache. +// +// Preserves R0/R2/R8. +// // Result in R1: null -> not found, otherwise result (true or false). static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) { ASSERT((n == 1) || (n == 2) || (n == 4)); @@ -1884,6 +1887,167 @@ void StubCode::GenerateSubtype4TestCacheStub(Assembler* assembler) { GenerateSubtypeNTestCacheStub(assembler, 4); } +// Used to test whether a given value is of a given type (different variants, +// all have the same calling convention). +// +// Inputs: +// - R0 : instance to test against. +// - R2 : instantiator type arguments (if needed). +// - R1 : function type arguments (if needed). +// +// - R3 : subtype test cache. +// +// - R8 : type to test against. +// - R4 : name of destination variable. +// +// Preserves R0/R2. +// +// Note of warning: The caller will not populate CODE_REG and we have therefore +// no access to the pool. +void StubCode::GenerateDefaultTypeTestStub(Assembler* assembler) { + Label done; + + const Register kInstanceReg = R0; + const Register kDstTypeReg = R8; + + // Fast case for 'null'. + __ CompareObject(kInstanceReg, Object::null_object()); + __ BranchIf(EQUAL, &done); + + // Fast case for 'int'. + Label not_smi; + __ BranchIfNotSmi(kInstanceReg, ¬_smi); + __ CompareObject(kDstTypeReg, Object::ZoneHandle(Type::IntType())); + __ BranchIf(EQUAL, &done); + __ Bind(¬_smi); + + // Tail call the [SubtypeTestCache]-based implementation. + __ ldr(CODE_REG, Address(THR, Thread::slow_type_test_stub_offset())); + __ ldr(R9, FieldAddress(CODE_REG, Code::entry_point_offset())); + __ br(R9); + + __ Bind(&done); + __ Ret(); +} + +void StubCode::GenerateUnreachableTypeTestStub(Assembler* assembler) { + __ Breakpoint(); +} + +void StubCode::GenerateSlowTypeTestStub(Assembler* assembler) { + Label done, call_runtime; + + const Register kInstanceReg = R0; + const Register kInstantiatorTypeArgumentsReg = R1; + const Register kFunctionTypeArgumentsReg = R2; + + const Register kSubtypeTestCacheReg = R3; + const Register kDstTypeReg = R8; + const Register kDstNameReg = R4; + + __ EnterStubFrame(); + +#ifdef DEBUG + // Guaranteed by caller. + Label no_error; + __ CompareObject(kInstanceReg, Object::null_object()); + __ BranchIf(NOT_EQUAL, &no_error); + __ Breakpoint(); + __ Bind(&no_error); +#endif + + // Need to handle slow cases of [Smi]s here because the + // [SubtypeTestCache]-based stubs do not handle [Smi]s. + Label non_smi_value; + __ BranchIfSmi(kInstanceReg, &call_runtime); + + const Register kTmp = R9; + + // If this is not a [Type] object, we'll go to the runtime. + Label is_simple, is_instantiated, is_uninstantiated; + __ LoadClassId(kTmp, kDstTypeReg); + __ cmp(kTmp, Operand(kTypeCid)); + __ BranchIf(NOT_EQUAL, &is_uninstantiated); + + // Check whether this [Type] is instantiated/uninstantiated. + __ ldr(kTmp, FieldAddress(kDstTypeReg, Type::type_state_offset()), kByte); + __ cmp(kTmp, Operand(RawType::kFinalizedInstantiated)); + __ BranchIf(NOT_EQUAL, &is_uninstantiated); + + // Check whether this [Type] is with/without arguments. + __ LoadField(kTmp, FieldAddress(kDstTypeReg, Type::arguments_offset())); + __ CompareObject(kTmp, Object::null_object()); + __ BranchIf(NOT_EQUAL, &is_instantiated); + // Fall through to &is_simple + + __ Bind(&is_simple); + { + __ PushPair(kInstantiatorTypeArgumentsReg, kSubtypeTestCacheReg); + __ PushPair(kDstNameReg, TMP); + __ BranchLink(*StubCode::Subtype1TestCache_entry()); + __ CompareObject(R1, Bool::True()); + __ PopPair(kDstNameReg, TMP); + __ PopPair(kInstantiatorTypeArgumentsReg, kSubtypeTestCacheReg); + __ BranchIf(EQUAL, &done); // Cache said: yes. + __ Jump(&call_runtime); + } + + __ Bind(&is_instantiated); + { + __ PushPair(kInstantiatorTypeArgumentsReg, kSubtypeTestCacheReg); + __ PushPair(kDstNameReg, TMP); + __ BranchLink(*StubCode::Subtype2TestCache_entry()); + __ CompareObject(R1, Bool::True()); + __ PopPair(kDstNameReg, TMP); + __ PopPair(kInstantiatorTypeArgumentsReg, kSubtypeTestCacheReg); + __ BranchIf(EQUAL, &done); // Cache said: yes. + __ Jump(&call_runtime); + } + + __ Bind(&is_uninstantiated); + { + __ PushPair(kInstantiatorTypeArgumentsReg, kSubtypeTestCacheReg); + __ PushPair(kDstNameReg, TMP); + __ BranchLink(*StubCode::Subtype4TestCache_entry()); + __ CompareObject(R1, Bool::True()); + __ PopPair(kDstNameReg, TMP); + __ PopPair(kInstantiatorTypeArgumentsReg, kSubtypeTestCacheReg); + __ BranchIf(EQUAL, &done); // Cache said: yes. + // Fall through to runtime_call + } + + __ Bind(&call_runtime); + + // We cannot really ensure here that dynamic/Object never occur here (though + // it is guaranteed at dart_precompiled_runtime time). This is because we do + // constant evaluation with default stubs and only install optimized versions + // before writing out the AOT snapshot. So dynamic/Object will run with + // default stub in constant evaluation. + __ CompareObject(kDstTypeReg, Type::dynamic_type()); + __ BranchIf(EQUAL, &done); + __ CompareObject(kDstTypeReg, Type::Handle(Type::ObjectType())); + __ BranchIf(EQUAL, &done); + + __ PushObject(Object::null_object()); // Make room for result. + __ Push(kInstanceReg); + __ Push(kDstTypeReg); + __ Push(kInstantiatorTypeArgumentsReg); + __ Push(kFunctionTypeArgumentsReg); + __ Push(kDstNameReg); + __ Push(kSubtypeTestCacheReg); + __ CallRuntime(kTypeCheckRuntimeEntry, 6); + __ Pop(kSubtypeTestCacheReg); + __ Pop(kDstNameReg); + __ Pop(kFunctionTypeArgumentsReg); + __ Pop(kInstantiatorTypeArgumentsReg); + __ Pop(kDstTypeReg); + __ Pop(kInstanceReg); + __ Drop(1); // Discard return value. + __ Bind(&done); + __ LeaveStubFrame(); + __ Ret(); +} + void StubCode::GenerateGetCStackPointerStub(Assembler* assembler) { __ mov(R0, CSP); __ ret(); diff --git a/runtime/vm/stub_code_dbc.cc b/runtime/vm/stub_code_dbc.cc index 4b7157c6b444..15bda33cb7c1 100644 --- a/runtime/vm/stub_code_dbc.cc +++ b/runtime/vm/stub_code_dbc.cc @@ -83,6 +83,21 @@ void StubCode::GenerateDeoptimizeStub(Assembler* assembler) { __ Trap(); } +// TODO(kustermann): Don't generate this stub. +void StubCode::GenerateDefaultTypeTestStub(Assembler* assembler) { + __ Trap(); +} + +// TODO(kustermann): Don't generate this stub. +void StubCode::GenerateUnreachableTypeTestStub(Assembler* assembler) { + __ Trap(); +} + +// TODO(kustermann): Don't generate this stub. +void StubCode::GenerateSlowTypeTestStub(Assembler* assembler) { + __ Trap(); +} + void StubCode::GenerateFrameAwaitingMaterializationStub(Assembler* assembler) { __ Trap(); } diff --git a/runtime/vm/stub_code_ia32.cc b/runtime/vm/stub_code_ia32.cc index 9ef1116db871..70f6c53e349b 100644 --- a/runtime/vm/stub_code_ia32.cc +++ b/runtime/vm/stub_code_ia32.cc @@ -1779,6 +1779,21 @@ void StubCode::GenerateSubtype4TestCacheStub(Assembler* assembler) { GenerateSubtypeNTestCacheStub(assembler, 4); } +void StubCode::GenerateDefaultTypeTestStub(Assembler* assembler) { + // Only used in AOT and therefore not on ia32. + __ Breakpoint(); +} + +void StubCode::GenerateUnreachableTypeTestStub(Assembler* assembler) { + // Only used in AOT and therefore not on ia32. + __ Breakpoint(); +} + +void StubCode::GenerateSlowTypeTestStub(Assembler* assembler) { + // Only used in AOT and therefore not on ia32. + __ Breakpoint(); +} + // Return the current stack pointer address, used to do stack alignment checks. // TOS + 0: return address // Result in EAX. diff --git a/runtime/vm/stub_code_x64.cc b/runtime/vm/stub_code_x64.cc index 5183d28fedf0..b2134b9683db 100644 --- a/runtime/vm/stub_code_x64.cc +++ b/runtime/vm/stub_code_x64.cc @@ -3,6 +3,9 @@ // BSD-style license that can be found in the LICENSE file. #include "vm/globals.h" + +#include "vm/stub_code.h" + #if defined(TARGET_ARCH_X64) && !defined(DART_PRECOMPILED_RUNTIME) #include "vm/compiler/assembler/assembler.h" @@ -15,8 +18,8 @@ #include "vm/resolver.h" #include "vm/scavenger.h" #include "vm/stack_frame.h" -#include "vm/stub_code.h" #include "vm/tags.h" +#include "vm/type_testing_stubs.h" #define __ assembler-> @@ -1717,7 +1720,7 @@ void StubCode::GenerateDebugStepCheckStub(Assembler* assembler) { // // - TOS + 0: return address. // -// Preserves R9/RAX/RCX/RDX. +// Preserves R9/RAX/RCX/RDX, RBX. // // Result in R8: null -> not found, otherwise result (true or false). static void GenerateSubtypeNTestCacheStub(Assembler* assembler, int n) { @@ -1841,6 +1844,157 @@ void StubCode::GenerateSubtype4TestCacheStub(Assembler* assembler) { GenerateSubtypeNTestCacheStub(assembler, 4); } +// Used to test whether a given value is of a given type (different variants, +// all have the same calling convention). +// +// Inputs: +// - R9 : RawSubtypeTestCache +// - RAX : instance to test against. +// - RDX : instantiator type arguments (if needed). +// - RCX : function type arguments (if needed). +// +// - RBX : type to test against. +// - R10 : name of destination variable. +// +// Preserves R9/RAX/RCX/RDX, RBX, R10. +// +// Note of warning: The caller will not populate CODE_REG and we have therefore +// no access to the pool. +void StubCode::GenerateDefaultTypeTestStub(Assembler* assembler) { + Label done; + + const Register kInstanceReg = RAX; + const Register kDstTypeReg = RBX; + + // Fast case for 'null'. + __ CompareObject(kInstanceReg, Object::null_object()); + __ BranchIf(EQUAL, &done); + + // Fast case for 'int'. + Label not_smi; + __ BranchIfNotSmi(kInstanceReg, ¬_smi); + __ CompareObject(kDstTypeReg, Object::ZoneHandle(Type::IntType())); + __ BranchIf(EQUAL, &done); + __ Bind(¬_smi); + + // Tail call the [SubtypeTestCache]-based implementation. + __ movq(CODE_REG, Address(THR, Thread::slow_type_test_stub_offset())); + __ jmp(FieldAddress(CODE_REG, Code::entry_point_offset())); + + __ Bind(&done); + __ Ret(); +} + +void StubCode::GenerateUnreachableTypeTestStub(Assembler* assembler) { + __ Breakpoint(); +} + +void StubCode::GenerateSlowTypeTestStub(Assembler* assembler) { + Label done, call_runtime; + + const Register kInstanceReg = RAX; + const Register kInstantiatorTypeArgumentsReg = RDX; + const Register kFunctionTypeArgumentsReg = RCX; + const Register kDstTypeReg = RBX; + const Register kDstNameReg = R10; + const Register kSubtypeTestCacheReg = R9; + + __ EnterStubFrame(); + +#ifdef DEBUG + // Guaranteed by caller. + Label no_error; + __ CompareObject(kInstanceReg, Object::null_object()); + __ BranchIf(NOT_EQUAL, &no_error); + __ Breakpoint(); + __ Bind(&no_error); +#endif + + // Need to handle slow cases of [Smi]s here because the + // [SubtypeTestCache]-based stubs do not handle [Smi]s. + __ BranchIfSmi(kInstanceReg, &call_runtime); + + const Register kTmp = RDI; + + // If this is not a [Type] object, we'll go to the runtime. + Label is_simple, is_instantiated, is_uninstantiated; + __ LoadClassId(kTmp, kDstTypeReg); + __ cmpq(kTmp, Immediate(kTypeCid)); + __ BranchIf(NOT_EQUAL, &is_uninstantiated); + + // Check whether this [Type] is instantiated/uninstantiated. + __ cmpb(FieldAddress(kDstTypeReg, Type::type_state_offset()), + Immediate(RawType::kFinalizedInstantiated)); + __ BranchIf(NOT_EQUAL, &is_uninstantiated); + + // Check whether this [Type] is with/without arguments. + __ LoadField(kTmp, FieldAddress(kDstTypeReg, Type::arguments_offset())); + __ CompareObject(kTmp, Object::null_object()); + __ BranchIf(NOT_EQUAL, &is_instantiated); + // Fall through to &is_simple + + __ Bind(&is_simple); + { + __ pushq(kDstNameReg); + __ Call(*StubCode::Subtype1TestCache_entry()); + __ popq(kDstNameReg); + __ CompareObject(R8, Bool::True()); + __ BranchIf(EQUAL, &done); // Cache said: yes. + __ Jump(&call_runtime); + } + + __ Bind(&is_instantiated); + { + __ pushq(kDstNameReg); + __ Call(*StubCode::Subtype2TestCache_entry()); + __ popq(kDstNameReg); + __ CompareObject(R8, Bool::True()); + __ BranchIf(EQUAL, &done); // Cache said: yes. + __ Jump(&call_runtime); + } + + __ Bind(&is_uninstantiated); + { + __ pushq(kDstNameReg); + __ Call(*StubCode::Subtype4TestCache_entry()); + __ popq(kDstNameReg); + __ CompareObject(R8, Bool::True()); + __ BranchIf(EQUAL, &done); // Cache said: yes. + // Fall through to runtime_call + } + + __ Bind(&call_runtime); + + // We cannot really ensure here that dynamic/Object never occur here (though + // it is guaranteed at dart_precompiled_runtime time). This is because we do + // constant evaluation with default stubs and only install optimized versions + // before writing out the AOT snapshot. So dynamic/Object will run with + // default stub in constant evaluation. + __ CompareObject(kDstTypeReg, Type::dynamic_type()); + __ BranchIf(EQUAL, &done); + __ CompareObject(kDstTypeReg, Type::Handle(Type::ObjectType())); + __ BranchIf(EQUAL, &done); + + __ PushObject(Object::null_object()); // Make room for result. + __ pushq(kInstanceReg); + __ pushq(kDstTypeReg); + __ pushq(kInstantiatorTypeArgumentsReg); + __ pushq(kFunctionTypeArgumentsReg); + __ pushq(kDstNameReg); + __ pushq(kSubtypeTestCacheReg); + __ CallRuntime(kTypeCheckRuntimeEntry, 6); + __ popq(kSubtypeTestCacheReg); + __ popq(kDstNameReg); + __ popq(kFunctionTypeArgumentsReg); + __ popq(kInstantiatorTypeArgumentsReg); + __ popq(kDstTypeReg); + __ popq(kInstanceReg); + __ Drop(1); // Discard return value. + __ Bind(&done); + __ LeaveStubFrame(); + __ Ret(); +} + // Return the current stack pointer address, used to stack alignment // checks. // TOS + 0: return address diff --git a/runtime/vm/thread.h b/runtime/vm/thread.h index 3f98f693b1bd..fbd825ceb9c8 100644 --- a/runtime/vm/thread.h +++ b/runtime/vm/thread.h @@ -97,7 +97,9 @@ class Zone; V(RawCode*, lazy_deopt_from_return_stub_, \ StubCode::DeoptimizeLazyFromReturn_entry()->code(), NULL) \ V(RawCode*, lazy_deopt_from_throw_stub_, \ - StubCode::DeoptimizeLazyFromThrow_entry()->code(), NULL) + StubCode::DeoptimizeLazyFromThrow_entry()->code(), NULL) \ + V(RawCode*, slow_type_test_stub_, StubCode::SlowTypeTest_entry()->code(), \ + NULL) #endif diff --git a/runtime/vm/type_testing_stubs.cc b/runtime/vm/type_testing_stubs.cc new file mode 100644 index 000000000000..5b80645fb7d3 --- /dev/null +++ b/runtime/vm/type_testing_stubs.cc @@ -0,0 +1,69 @@ +// Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +#include "vm/type_testing_stubs.h" +#include "vm/compiler/assembler/disassembler.h" +#include "vm/compiler/backend/flow_graph_compiler.h" + +#define __ assembler-> + +namespace dart { + +DECLARE_FLAG(bool, disassemble_stubs); + +RawInstructions* TypeTestingStubGenerator::DefaultCodeForType( + const AbstractType& type) { + // During bootstrapping we have no access to stubs yet, so we'll just return + // `null` and patch these later in `Object::FinishInitOnce()`. + if (!StubCode::HasBeenInitialized()) { + ASSERT(type.IsType()); + const intptr_t cid = Type::Cast(type).type_class_id(); + ASSERT(cid == kDynamicCid || cid == kVoidCid || cid == kVectorCid); + return Instructions::null(); + } + + if (type.IsType() || type.IsTypeParameter() || type.IsTypeRef()) { + return Code::InstructionsOf(StubCode::DefaultTypeTest_entry()->code()); + } else { + ASSERT(type.IsBoundedType() || type.IsMixinAppType()); + return Code::InstructionsOf(StubCode::UnreachableTypeTest_entry()->code()); + } +} + +TypeTestingStubFinder::TypeTestingStubFinder() : code_(Code::Handle()) {} + +RawInstructions* TypeTestingStubFinder::LookupByAddresss( + uword entry_point) const { + code_ = StubCode::DefaultTypeTest_entry()->code(); + if (entry_point == code_.UncheckedEntryPoint()) { + return code_.instructions(); + } + code_ = StubCode::UnreachableTypeTest_entry()->code(); + if (entry_point == code_.UncheckedEntryPoint()) { + return code_.instructions(); + } + + UNREACHABLE(); + return NULL; +} + +const char* TypeTestingStubFinder::StubNameFromAddresss( + uword entry_point) const { + // First test the 2 common ones: + code_ = StubCode::DefaultTypeTest_entry()->code(); + if (entry_point == code_.UncheckedEntryPoint()) { + return "TypeTestingStub_Default"; + } + code_ = StubCode::UnreachableTypeTest_entry()->code(); + if (entry_point == code_.UncheckedEntryPoint()) { + return "TypeTestingStub_Unreachable"; + } + + UNREACHABLE(); + return NULL; +} + +#undef __ + +} // namespace dart diff --git a/runtime/vm/type_testing_stubs.h b/runtime/vm/type_testing_stubs.h new file mode 100644 index 000000000000..82c48a413f47 --- /dev/null +++ b/runtime/vm/type_testing_stubs.h @@ -0,0 +1,45 @@ +// Copyright (c) 2018, the Dart project authors. Please see the AUTHORS file +// for details. All rights reserved. Use of this source code is governed by a +// BSD-style license that can be found in the LICENSE file. + +#ifndef RUNTIME_VM_TYPE_TESTING_STUBS_H_ +#define RUNTIME_VM_TYPE_TESTING_STUBS_H_ + +#include "vm/compiler/assembler/assembler.h" +#include "vm/compiler/backend/il.h" + +namespace dart { + +class TypeTestingStubGenerator { + public: + // During bootstrapping it will return `null` for a whitelisted set of types, + // otherwise it will return a default stub which tail-calls + // subtypingtest/runtime code. + static RawInstructions* DefaultCodeForType(const AbstractType& type); +}; + +class TypeTestingStubFinder { + public: + TypeTestingStubFinder(); + + // When serializing an AOT snapshot via our clustered snapshot writer, we + // write out references to the [Instructions] object for all the + // [AbstractType] objects we encounter. + // + // This method is used for this mapping of stub entrypoint addresses to the + // corresponding [Instructions] object. + RawInstructions* LookupByAddresss(uword entry_point) const; + + // When generating an AOT snapshot as an assembly file (i.e. ".S" file) we + // need to generate labels for the type testing stubs. + // + // This method maps stub entrypoint addresses to meaningful names. + const char* StubNameFromAddresss(uword entry_point) const; + + private: + Code& code_; +}; + +} // namespace dart + +#endif // RUNTIME_VM_TYPE_TESTING_STUBS_H_ diff --git a/runtime/vm/vm_sources.gni b/runtime/vm/vm_sources.gni index a4ee4faa4cd2..65be0c4c0928 100644 --- a/runtime/vm/vm_sources.gni +++ b/runtime/vm/vm_sources.gni @@ -341,6 +341,8 @@ vm_sources = [ "token_position.cc", "token_position.h", "type_table.h", + "type_testing_stubs.cc", + "type_testing_stubs.h", "unibrow-inl.h", "unibrow.cc", "unibrow.h",