diff --git a/runtime/vm/clustered_snapshot.cc b/runtime/vm/clustered_snapshot.cc index ddf2b7ee0236..9b5a4beb8d78 100644 --- a/runtime/vm/clustered_snapshot.cc +++ b/runtime/vm/clustered_snapshot.cc @@ -1052,6 +1052,7 @@ class FieldSerializationCluster : public SerializationCluster { s->WriteTokenPosition(field->ptr()->end_token_pos_); s->WriteCid(field->ptr()->guarded_cid_); s->WriteCid(field->ptr()->is_nullable_); + s->Write(field->ptr()->static_type_exactness_state_); #if !defined(DART_PRECOMPILED_RUNTIME) s->Write(field->ptr()->kernel_offset_); #endif @@ -1103,6 +1104,7 @@ class FieldDeserializationCluster : public DeserializationCluster { field->ptr()->end_token_pos_ = d->ReadTokenPosition(); field->ptr()->guarded_cid_ = d->ReadCid(); field->ptr()->is_nullable_ = d->ReadCid(); + field->ptr()->static_type_exactness_state_ = d->Read(); #if !defined(DART_PRECOMPILED_RUNTIME) field->ptr()->kernel_offset_ = d->Read(); #endif @@ -1124,6 +1126,8 @@ class FieldDeserializationCluster : public DeserializationCluster { field.set_guarded_list_length(Field::kNoFixedLength); field.set_guarded_list_length_in_object_offset( Field::kUnknownLengthOffset); + field.set_static_type_exactness_state( + StaticTypeExactnessState::NotTracking()); } } else { for (intptr_t i = start_index_; i < stop_index_; i++) { diff --git a/runtime/vm/compiler/backend/constant_propagator.cc b/runtime/vm/compiler/backend/constant_propagator.cc index e7db5aaaf3d1..7850d4e4149f 100644 --- a/runtime/vm/compiler/backend/constant_propagator.cc +++ b/runtime/vm/compiler/backend/constant_propagator.cc @@ -243,6 +243,8 @@ void ConstantPropagator::VisitGuardFieldClass(GuardFieldClassInstr* instr) {} void ConstantPropagator::VisitGuardFieldLength(GuardFieldLengthInstr* instr) {} +void ConstantPropagator::VisitGuardFieldType(GuardFieldTypeInstr* instr) {} + void ConstantPropagator::VisitCheckSmi(CheckSmiInstr* instr) {} void ConstantPropagator::VisitTailCall(TailCallInstr* instr) {} diff --git a/runtime/vm/compiler/backend/il.cc b/runtime/vm/compiler/backend/il.cc index 027022f0ac1f..0fff9471dfd2 100644 --- a/runtime/vm/compiler/backend/il.cc +++ b/runtime/vm/compiler/backend/il.cc @@ -879,6 +879,10 @@ bool GuardFieldLengthInstr::AttributesEqual(Instruction* other) const { return field().raw() == other->AsGuardFieldLength()->field().raw(); } +bool GuardFieldTypeInstr::AttributesEqual(Instruction* other) const { + return field().raw() == other->AsGuardFieldType()->field().raw(); +} + bool AssertAssignableInstr::AttributesEqual(Instruction* other) const { AssertAssignableInstr* other_assert = other->AsAssertAssignable(); ASSERT(other_assert != NULL); @@ -2647,6 +2651,15 @@ Definition* LoadFieldInstr::Canonicalize(FlowGraph* flow_graph) { } } else if (CreateArrayInstr* create_array = array->AsCreateArray()) { return create_array->element_type()->definition(); + } else if (LoadFieldInstr* load_array = array->AsLoadField()) { + const Field* field = load_array->field(); + // For trivially exact fields we know that type arguments match + // static type arguments exactly. + if ((field != nullptr) && + field->static_type_exactness_state().IsTriviallyExact()) { + return flow_graph->GetConstant(TypeArguments::Handle( + AbstractType::Handle(field->type()).arguments())); + } } } @@ -2693,31 +2706,64 @@ Definition* AssertAssignableInstr::Canonicalize(FlowGraph* flow_graph) { // be located in the unreachable part of the graph (e.g. // it might be dominated by CheckClass that always fails). // This means that the code below must guard against such possibility. - ConstantInstr* constant_instantiator_type_args = - instantiator_type_arguments()->definition()->AsConstant(); - ConstantInstr* constant_function_type_args = - function_type_arguments()->definition()->AsConstant(); - if ((constant_instantiator_type_args != NULL) && - (constant_function_type_args != NULL)) { - ASSERT(constant_instantiator_type_args->value().IsNull() || - constant_instantiator_type_args->value().IsTypeArguments()); - ASSERT(constant_function_type_args->value().IsNull() || - constant_function_type_args->value().IsTypeArguments()); + Zone* Z = Thread::Current()->zone(); - Zone* Z = Thread::Current()->zone(); - const TypeArguments& instantiator_type_args = TypeArguments::Handle( - Z, - TypeArguments::RawCast(constant_instantiator_type_args->value().raw())); + const TypeArguments* instantiator_type_args = nullptr; + const TypeArguments* function_type_args = nullptr; - const TypeArguments& function_type_args = TypeArguments::Handle( - Z, TypeArguments::RawCast(constant_function_type_args->value().raw())); + if (instantiator_type_arguments()->BindsToConstant()) { + const Object& val = instantiator_type_arguments()->BoundConstant(); + instantiator_type_args = (val.raw() == TypeArguments::null()) + ? &TypeArguments::null_type_arguments() + : &TypeArguments::Cast(val); + } + + if (function_type_arguments()->BindsToConstant()) { + const Object& val = function_type_arguments()->BoundConstant(); + function_type_args = + (val.raw() == TypeArguments::null()) + ? &TypeArguments::null_type_arguments() + : &TypeArguments::Cast(function_type_arguments()->BoundConstant()); + } + + // If instantiator_type_args are not constant try to match the pattern + // obj.field.:type_arguments where field's static type exactness state + // tells us that all values stored in the field have exact superclass. + // In this case we know the prefix of the actual type arguments vector + // and can try to instantiate the type using just the prefix. + // + // Note: TypeParameter::InstantiateFrom returns an error if we try + // to instantiate it from a vector that is too short. + if (instantiator_type_args == nullptr) { + if (LoadFieldInstr* load_type_args = + instantiator_type_arguments()->definition()->AsLoadField()) { + if (load_type_args->native_field() != nullptr && + load_type_args->native_field()->kind() == + NativeFieldDesc::kTypeArguments) { + if (LoadFieldInstr* load_field = load_type_args->instance() + ->definition() + ->OriginalDefinition() + ->AsLoadField()) { + if (load_field->field() != nullptr && + load_field->field() + ->static_type_exactness_state() + .IsHasExactSuperClass()) { + instantiator_type_args = &TypeArguments::Handle( + Z, AbstractType::Handle(Z, load_field->field()->type()) + .arguments()); + } + } + } + } + } + if ((instantiator_type_args != nullptr) && (function_type_args != nullptr)) { Error& bound_error = Error::Handle(Z); AbstractType& new_dst_type = AbstractType::Handle( - Z, dst_type().InstantiateFrom(instantiator_type_args, - function_type_args, kAllFree, - &bound_error, NULL, NULL, Heap::kOld)); + Z, dst_type().InstantiateFrom( + *instantiator_type_args, *function_type_args, kAllFree, + &bound_error, nullptr, nullptr, Heap::kOld)); if (new_dst_type.IsMalformedOrMalbounded() || !bound_error.IsNull()) { return this; } @@ -3318,6 +3364,11 @@ Instruction* GuardFieldLengthInstr::Canonicalize(FlowGraph* flow_graph) { return this; } +Instruction* GuardFieldTypeInstr::Canonicalize(FlowGraph* flow_graph) { + return field().static_type_exactness_state().NeedsFieldGuard() ? this + : nullptr; +} + Instruction* CheckSmiInstr::Canonicalize(FlowGraph* flow_graph) { return (value()->Type()->ToCid() == kSmiCid) ? NULL : this; } diff --git a/runtime/vm/compiler/backend/il.h b/runtime/vm/compiler/backend/il.h index 589e13e4461f..ec7cbb1959c3 100644 --- a/runtime/vm/compiler/backend/il.h +++ b/runtime/vm/compiler/backend/il.h @@ -580,6 +580,7 @@ struct InstrAttrs { /*We could be more precise about when these 2 instructions can trigger GC.*/ \ M(GuardFieldClass, _) \ M(GuardFieldLength, _) \ + M(GuardFieldType, _) \ M(IfThenElse, kNoGC) \ M(MaterializeObject, _) \ M(TestSmi, kNoGC) \ @@ -4297,6 +4298,29 @@ class GuardFieldLengthInstr : public GuardFieldInstr { DISALLOW_COPY_AND_ASSIGN(GuardFieldLengthInstr); }; +// For a field of static type G and a stored value of runtime +// type T checks that type arguments of T at G exactly match +// and updates guarded state (RawField::static_type_exactness_state_) +// accordingly. +// +// See StaticTypeExactnessState for more information. +class GuardFieldTypeInstr : public GuardFieldInstr { + public: + GuardFieldTypeInstr(Value* value, const Field& field, intptr_t deopt_id) + : GuardFieldInstr(value, field, deopt_id) { + CheckField(field); + } + + DECLARE_INSTRUCTION(GuardFieldType) + + virtual Instruction* Canonicalize(FlowGraph* flow_graph); + + virtual bool AttributesEqual(Instruction* other) const; + + private: + DISALLOW_COPY_AND_ASSIGN(GuardFieldTypeInstr); +}; + class LoadStaticFieldInstr : public TemplateDefinition<1, NoThrow> { public: LoadStaticFieldInstr(Value* field_value, TokenPosition token_pos) diff --git a/runtime/vm/compiler/backend/il_arm.cc b/runtime/vm/compiler/backend/il_arm.cc index a186bfab19aa..40df4d817dea 100644 --- a/runtime/vm/compiler/backend/il_arm.cc +++ b/runtime/vm/compiler/backend/il_arm.cc @@ -2013,6 +2013,16 @@ void GuardFieldLengthInstr::EmitNativeCode(FlowGraphCompiler* compiler) { } } +LocationSummary* GuardFieldTypeInstr::MakeLocationSummary(Zone* zone, + bool opt) const { + UNREACHABLE(); + return nullptr; +} + +void GuardFieldTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { + UNREACHABLE(); +} + class BoxAllocationSlowPath : public TemplateSlowPathCode { public: BoxAllocationSlowPath(Instruction* instruction, diff --git a/runtime/vm/compiler/backend/il_arm64.cc b/runtime/vm/compiler/backend/il_arm64.cc index 99f9646dbe4f..f72c97582825 100644 --- a/runtime/vm/compiler/backend/il_arm64.cc +++ b/runtime/vm/compiler/backend/il_arm64.cc @@ -1572,6 +1572,16 @@ static void LoadValueCid(FlowGraphCompiler* compiler, __ Bind(&done); } +LocationSummary* GuardFieldTypeInstr::MakeLocationSummary(Zone* zone, + bool opt) const { + UNREACHABLE(); + return nullptr; +} + +void GuardFieldTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { + UNREACHABLE(); +} + LocationSummary* GuardFieldClassInstr::MakeLocationSummary(Zone* zone, bool opt) const { const intptr_t kNumInputs = 1; diff --git a/runtime/vm/compiler/backend/il_dbc.cc b/runtime/vm/compiler/backend/il_dbc.cc index dae7f804ed58..133a7fb26db9 100644 --- a/runtime/vm/compiler/backend/il_dbc.cc +++ b/runtime/vm/compiler/backend/il_dbc.cc @@ -38,6 +38,7 @@ DECLARE_FLAG(int, optimization_counter_threshold); M(TruncDivMod) \ M(GuardFieldClass) \ M(GuardFieldLength) \ + M(GuardFieldType) \ M(IfThenElse) \ M(ExtractNthOutput) \ M(BinaryUint32Op) \ diff --git a/runtime/vm/compiler/backend/il_ia32.cc b/runtime/vm/compiler/backend/il_ia32.cc index b75054de15ba..f9fe59ef22fd 100644 --- a/runtime/vm/compiler/backend/il_ia32.cc +++ b/runtime/vm/compiler/backend/il_ia32.cc @@ -1438,6 +1438,16 @@ void StoreIndexedInstr::EmitNativeCode(FlowGraphCompiler* compiler) { } } +LocationSummary* GuardFieldTypeInstr::MakeLocationSummary(Zone* zone, + bool opt) const { + UNREACHABLE(); + return nullptr; +} + +void GuardFieldTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { + UNREACHABLE(); +} + LocationSummary* GuardFieldClassInstr::MakeLocationSummary(Zone* zone, bool opt) const { const intptr_t kNumInputs = 1; diff --git a/runtime/vm/compiler/backend/il_printer.cc b/runtime/vm/compiler/backend/il_printer.cc index be25c2b9837a..c5455fdb6ae1 100644 --- a/runtime/vm/compiler/backend/il_printer.cc +++ b/runtime/vm/compiler/backend/il_printer.cc @@ -651,16 +651,8 @@ void LoadFieldInstr::PrintOperandsTo(BufferFormatter* f) const { f->Print(", %" Pd, offset_in_bytes()); if (field() != nullptr) { - f->Print(" {%s}", String::Handle(field()->name()).ToCString()); - const char* expected = "?"; - if (field()->guarded_cid() != kIllegalCid) { - const Class& cls = Class::Handle( - Isolate::Current()->class_table()->At(field()->guarded_cid())); - expected = String::Handle(cls.Name()).ToCString(); - } - - f->Print(" [%s %s]", field()->is_nullable() ? "nullable" : "non-nullable", - expected); + f->Print(" {%s} %s", String::Handle(field()->name()).ToCString(), + field()->GuardedPropertiesAsCString()); } if (native_field() != nullptr) { diff --git a/runtime/vm/compiler/backend/il_x64.cc b/runtime/vm/compiler/backend/il_x64.cc index 6d0a4898ed11..35171aec854b 100644 --- a/runtime/vm/compiler/backend/il_x64.cc +++ b/runtime/vm/compiler/backend/il_x64.cc @@ -1646,10 +1646,14 @@ void GuardFieldClassInstr::EmitNativeCode(FlowGraphCompiler* compiler) { __ j(EQUAL, &ok); // Check if the tracked state of the guarded field can be initialized - // inline. If the field needs length check we fall through to runtime - // which is responsible for computing offset of the length field - // based on the class id. - if (!field().needs_length_check()) { + // inline. If the field needs length check or requires type arguments and + // class hierarchy processing for exactness tracking then we fall through + // into runtime which is responsible for computing offset of the length + // field based on the class id. + const bool is_complicated_field = + field().needs_length_check() || + field().static_type_exactness_state().IsUninitialized(); + if (!is_complicated_field) { // Uninitialized field can be handled inline. Check if the // field is still unitialized. __ cmpw(field_cid_operand, Immediate(kIllegalCid)); @@ -1805,6 +1809,87 @@ void GuardFieldLengthInstr::EmitNativeCode(FlowGraphCompiler* compiler) { } } +LocationSummary* GuardFieldTypeInstr::MakeLocationSummary(Zone* zone, + bool opt) const { + const intptr_t kNumInputs = 1; + const intptr_t kNumTemps = 1; + LocationSummary* summary = new (zone) + LocationSummary(zone, kNumInputs, kNumTemps, LocationSummary::kNoCall); + summary->set_in(0, Location::RequiresRegister()); + summary->set_temp(0, Location::RequiresRegister()); + return summary; +} + +void GuardFieldTypeInstr::EmitNativeCode(FlowGraphCompiler* compiler) { + // Should never emit GuardFieldType for fields that are marked as NotTracking. + ASSERT(field().static_type_exactness_state().IsTracking()); + if (!field().static_type_exactness_state().NeedsFieldGuard()) { + // Nothing to do: we only need to perform checks for trivially invariant + // fields. If optimizing Canonicalize pass should have removed + // this instruction. + if (Compiler::IsBackgroundCompilation()) { + Compiler::AbortBackgroundCompilation( + deopt_id(), + "GuardFieldTypeInstr: field state changed during compilation"); + } + ASSERT(!compiler->is_optimizing()); + return; + } + + Label* deopt = + compiler->is_optimizing() + ? compiler->AddDeoptStub(deopt_id(), ICData::kDeoptGuardField) + : NULL; + + Label ok; + + const Register value_reg = locs()->in(0).reg(); + const Register temp = locs()->temp(0).reg(); + + // Skip null values for nullable fields. + if (!compiler->is_optimizing() || field().is_nullable()) { + __ CompareObject(value_reg, Object::Handle()); + __ j(EQUAL, &ok); + } + + // Get the state. + __ LoadObject(temp, field()); + __ movsxb(temp, + FieldAddress(temp, Field::static_type_exactness_state_offset())); + + if (!compiler->is_optimizing()) { + // Check if field requires checking (it is in unitialized or trivially + // exact state). + __ cmpq(temp, Immediate(StaticTypeExactnessState::kUninitialized)); + __ j(LESS, &ok); + } + + Label call_runtime; + if (field().static_type_exactness_state().IsUninitialized()) { + // Can't initialize the field state inline in optimized code. + __ cmpq(temp, Immediate(StaticTypeExactnessState::kUninitialized)); + __ j(EQUAL, compiler->is_optimizing() ? deopt : &call_runtime); + } + + // At this point temp is known to be type arguments offset in words. + __ movq(temp, FieldAddress(value_reg, temp, TIMES_8, 0)); + __ CompareObject(temp, TypeArguments::ZoneHandle( + AbstractType::Handle(field().type()).arguments())); + if (deopt != nullptr) { + __ j(NOT_EQUAL, deopt); + } else { + __ j(EQUAL, &ok); + + __ Bind(&call_runtime); + __ PushObject(field()); + __ pushq(value_reg); + __ CallRuntime(kUpdateFieldCidRuntimeEntry, 2); + __ Drop(2); + } + + __ Bind(&ok); +} + LocationSummary* StoreInstanceFieldInstr::MakeLocationSummary(Zone* zone, bool opt) const { const intptr_t kNumInputs = 2; diff --git a/runtime/vm/compiler/call_specializer.cc b/runtime/vm/compiler/call_specializer.cc index f0f4fd458e8a..aff0b3ad183c 100644 --- a/runtime/vm/compiler/call_specializer.cc +++ b/runtime/vm/compiler/call_specializer.cc @@ -960,7 +960,6 @@ bool CallSpecializer::TryInlineInstanceSetter(InstanceCallInstr* instr, if (I->use_field_guards()) { if (field.guarded_cid() != kDynamicCid) { - ASSERT(I->use_field_guards()); InsertBefore(instr, new (Z) GuardFieldClassInstr(new (Z) Value(instr->ArgumentAt(1)), @@ -969,13 +968,20 @@ bool CallSpecializer::TryInlineInstanceSetter(InstanceCallInstr* instr, } if (field.needs_length_check()) { - ASSERT(I->use_field_guards()); InsertBefore( instr, new (Z) GuardFieldLengthInstr(new (Z) Value(instr->ArgumentAt(1)), field, instr->deopt_id()), instr->env(), FlowGraph::kEffect); } + + if (field.static_type_exactness_state().NeedsFieldGuard()) { + InsertBefore(instr, + new (Z) + GuardFieldTypeInstr(new (Z) Value(instr->ArgumentAt(1)), + field, instr->deopt_id()), + instr->env(), FlowGraph::kEffect); + } } // Build an AssertAssignable if necessary. diff --git a/runtime/vm/compiler/frontend/base_flow_graph_builder.cc b/runtime/vm/compiler/frontend/base_flow_graph_builder.cc index d8f9e1a5370e..30432db902c6 100644 --- a/runtime/vm/compiler/frontend/base_flow_graph_builder.cc +++ b/runtime/vm/compiler/frontend/base_flow_graph_builder.cc @@ -411,6 +411,14 @@ Fragment BaseFlowGraphBuilder::StoreInstanceFieldGuarded( instructions += GuardFieldClass(field_clone, GetNextDeoptId()); instructions += LoadLocal(store_expression); instructions += GuardFieldLength(field_clone, GetNextDeoptId()); + + // If we are tracking exactness of the static type of the field then + // emit appropriate guard. + if (field_clone.static_type_exactness_state().IsTracking()) { + instructions += LoadLocal(store_expression); + instructions <<= + new (Z) GuardFieldTypeInstr(Pop(), field_clone, GetNextDeoptId()); + } } instructions += StoreInstanceField(field_clone, is_initialization_store); return instructions; diff --git a/runtime/vm/flag_list.h b/runtime/vm/flag_list.h index 96d15959ae69..440bbe809029 100644 --- a/runtime/vm/flag_list.h +++ b/runtime/vm/flag_list.h @@ -176,7 +176,7 @@ constexpr bool kDartPrecompiledRuntime = false; R(support_service, false, bool, true, "Support the service protocol.") \ R(support_timeline, false, bool, true, "Support timeline.") \ D(trace_cha, bool, false, "Trace CHA operations") \ - D(trace_field_guards, bool, false, "Trace changes in field's cids.") \ + R(trace_field_guards, false, bool, false, "Trace changes in field's cids.") \ C(trace_irregexp, false, false, bool, false, "Trace irregexps.") \ D(trace_isolates, bool, false, "Trace isolate creation and shut down.") \ D(trace_handles, bool, false, "Traces allocation of handles.") \ diff --git a/runtime/vm/kernel_loader.cc b/runtime/vm/kernel_loader.cc index d20b9759dfc9..e1ecd1989282 100644 --- a/runtime/vm/kernel_loader.cc +++ b/runtime/vm/kernel_loader.cc @@ -1239,6 +1239,50 @@ Class& KernelLoader::LoadClass(const Library& library, return klass; } +#if defined(TARGET_ARCH_X64) +static bool ReferencesAnyTypeArguments(const AbstractType& type) { + if (type.IsTypeRef()) { + return false; + } + + if (type.IsType()) { + const TypeArguments& args = + TypeArguments::Handle(Type::Cast(type).arguments()); + AbstractType& arg = AbstractType::Handle(); + for (intptr_t i = 0; i < args.Length(); i++) { + arg = args.TypeAt(i); + if (ReferencesAnyTypeArguments(arg)) { + return true; + } + } + } + + return type.IsTypeParameter(); +} + +static bool IsPotentialExactGeneric(const AbstractType& type) { + if (type.IsType()) { + const TypeArguments& args = + TypeArguments::Handle(Type::Cast(type).arguments()); + if (args.Length() == 0) { + return false; + } + + // TODO(dartbug.com/34170) Investigate supporting this for + // fields with types that depend on type parameters + // of the enclosoing class. + return !ReferencesAnyTypeArguments(type); + } + + return false; +} +#else +// TODO(dartbug.com/34170) Support other architectures. +static bool IsPotentialExactGeneric(const AbstractType& type) { + return false; +} +#endif + void KernelLoader::FinishClassLoading(const Class& klass, const Library& library, const Class& toplevel_class, @@ -1300,6 +1344,11 @@ void KernelLoader::FinishClassLoading(const Class& klass, Field::New(name, field_helper.IsStatic(), is_final, field_helper.IsConst(), is_reflectable, script_class, type, field_helper.position_, field_helper.end_position_)); + if (I->strong() && I->use_field_guards() && + IsPotentialExactGeneric(type)) { + field.set_static_type_exactness_state( + StaticTypeExactnessState::Unitialized()); + } field.set_kernel_offset(field_offset); CheckForInitializer(field); field_helper.ReadUntilExcluding(FieldHelper::kInitializer); diff --git a/runtime/vm/object.cc b/runtime/vm/object.cc index fbdb135dbd51..cdb5272d1ed5 100644 --- a/runtime/vm/object.cc +++ b/runtime/vm/object.cc @@ -8670,6 +8670,8 @@ void Field::InitializeNew(const Field& result, result.set_is_unboxing_candidate(true); result.set_initializer_changed_after_initialization(false); result.set_kernel_offset(0); + result.set_static_type_exactness_state( + StaticTypeExactnessState::NotTracking()); Isolate* isolate = Isolate::Current(); // Use field guards if they are enabled and the isolate has never reloaded. @@ -8970,7 +8972,9 @@ bool Field::IsConsistentWith(const Field& other) const { (raw_ptr()->is_nullable_ == other.raw_ptr()->is_nullable_) && (raw_ptr()->guarded_list_length_ == other.raw_ptr()->guarded_list_length_) && - (is_unboxing_candidate() == other.is_unboxing_candidate()); + (is_unboxing_candidate() == other.is_unboxing_candidate()) && + (static_type_exactness_state().Encode() == + other.static_type_exactness_state().Encode()); } bool Field::IsUninitialized() const { @@ -9058,9 +9062,24 @@ const char* Field::GuardedPropertiesAsCString() const { if (guarded_cid() == kIllegalCid) { return ""; } else if (guarded_cid() == kDynamicCid) { + ASSERT(!static_type_exactness_state().IsExactOrUninitialized()); return "<*>"; } + const char* exactness = ""; + if (!static_type_exactness_state().IsExactOrUninitialized()) { + exactness = " {!exact}"; + } else if (static_type_exactness_state().IsTriviallyExact()) { + exactness = " {trivially-exact}"; + } else if (static_type_exactness_state().IsHasExactSuperType()) { + exactness = " {has-exact-super-type}"; + } else if (static_type_exactness_state().IsHasExactSuperClass()) { + exactness = " {has-exact-super-class}"; + } else { + ASSERT(static_type_exactness_state().IsUninitialized()); + exactness = " {unknown exactness}"; + } + const Class& cls = Class::Handle(Isolate::Current()->class_table()->At(guarded_cid())); const char* class_name = String::Handle(cls.Name()).ToCString(); @@ -9069,16 +9088,18 @@ const char* Field::GuardedPropertiesAsCString() const { is_final()) { ASSERT(guarded_list_length() != kUnknownFixedLength); if (guarded_list_length() == kNoFixedLength) { - return Thread::Current()->zone()->PrintToString("<%s [*]>", class_name); + return Thread::Current()->zone()->PrintToString("<%s [*]%s>", class_name, + exactness); } else { return Thread::Current()->zone()->PrintToString( - "<%s [%" Pd " @%" Pd "]>", class_name, guarded_list_length(), - guarded_list_length_in_object_offset()); + "<%s [%" Pd " @%" Pd "]%s>", class_name, guarded_list_length(), + guarded_list_length_in_object_offset(), exactness); } } return Thread::Current()->zone()->PrintToString( - "<%s %s>", is_nullable() ? "nullable" : "not-nullable", class_name); + "<%s %s%s>", is_nullable() ? "nullable" : "not-nullable", class_name, + exactness); } void Field::InitializeGuardedListLengthInObjectOffset() const { @@ -9160,6 +9181,232 @@ bool Field::UpdateGuardedCidAndLength(const Object& value) const { return true; } +// Given the type G and class C find path to C at G. +// This path can be used to compute type arguments of C at G. +// +// Note: we are relying on the restriction that the same class can only occur +// once among the supertype. +static bool FindInstantiationOf(const Type& type, + const Class& cls, + GrowableArray* path, + bool consider_only_super_classes) { + if (type.type_class() == cls.raw()) { + return true; // Found instantiation. + } + + Class& cls2 = Class::Handle(); + AbstractType& super_type = AbstractType::Handle(); + super_type = cls.super_type(); + if (!super_type.IsNull() && !super_type.IsObjectType()) { + cls2 = super_type.type_class(); + path->Add(&super_type); + if (FindInstantiationOf(type, cls2, path, consider_only_super_classes)) { + return true; // Found instantiation. + } + path->RemoveLast(); + } + + if (!consider_only_super_classes) { + Array& super_interfaces = Array::Handle(cls.interfaces()); + for (intptr_t i = 0; i < super_interfaces.Length(); i++) { + super_type ^= super_interfaces.At(i); + cls2 = super_type.type_class(); + path->Add(&super_type); + if (FindInstantiationOf(type, cls2, path, + /*consider_only_supertypes=*/false)) { + return true; // Found instantiation. + } + path->RemoveLast(); + } + } + + return false; // Not found. +} + +bool Field::UpdateGuardedExactnessState(const Object& value) const { + if (!static_type_exactness_state().IsExactOrUninitialized()) { + // Nothing to update. + return false; + } + + if (guarded_cid() == kDynamicCid) { + if (FLAG_trace_field_guards) { + THR_Print( + " => switching off exactness tracking because guarded cid is " + "dynamic\n"); + } + set_static_type_exactness_state(StaticTypeExactnessState::NotExact()); + return true; // Invalidate. + } + + // If we are storing null into a field or we have an exact super type + // then there is nothing to do. + if (value.IsNull() || static_type_exactness_state().IsHasExactSuperType() || + static_type_exactness_state().IsHasExactSuperClass()) { + return false; + } + + // If we are storing a non-null value into a field that is considered + // to be trivially exact then we need to check if value has an appropriate + // type. + ASSERT(guarded_cid() != kNullCid); + + const Type& field_type = Type::Cast(AbstractType::Handle(type())); + const TypeArguments& field_type_args = + TypeArguments::Handle(field_type.arguments()); + + const Instance& instance = Instance::Cast(value); + TypeArguments& args = TypeArguments::Handle(); + if (static_type_exactness_state().IsTriviallyExact()) { + args = instance.GetTypeArguments(); + if (args.raw() == field_type_args.raw()) { + return false; + } + + if (FLAG_trace_field_guards) { + THR_Print(" expected %s got %s type arguments\n", + field_type_args.ToCString(), args.ToCString()); + } + + set_static_type_exactness_state(StaticTypeExactnessState::NotExact()); + return true; + } + + ASSERT(static_type_exactness_state().IsUninitialized()); + ASSERT(field_type.IsFinalized()); + const Class& cls = Class::Handle(instance.clazz()); + GrowableArray path(10); + + bool is_super_class = true; + if (!FindInstantiationOf(field_type, cls, &path, + /*consider_only_super_classes=*/true)) { + is_super_class = false; + bool found_super_interface = FindInstantiationOf( + field_type, cls, &path, /*consider_only_super_classes=*/false); + ASSERT(found_super_interface); + } + + // Trivial case: field has type G and value has type + // G. Check if type arguments match. + if (path.is_empty()) { + ASSERT(cls.raw() == field_type.type_class()); + args = instance.GetTypeArguments(); + // TODO(dartbug.com/34170) Evaluate if comparing relevant subvectors (that + // disregards superclass own arguments) improves precision of the + // tracking. + if (args.raw() == field_type_args.raw()) { + return false; + } + + if (FLAG_trace_field_guards) { + THR_Print(" expected %s got %s type arguments\n", + field_type_args.ToCString(), args.ToCString()); + } + set_static_type_exactness_state(StaticTypeExactnessState::NotExact()); + return true; + } + + // Value has type C and field has type G and G != C. + // Compute C at G (Xi are free type arguments). + // Path array contains a chain of immediate supertypes S0 <: S1 <: ... Sn, + // such that S0 is an immediate supertype of C and Sn is G<...>. + // Each Si might depend on type parameters of the previous supertype S{i-1}. + // To compute C at G we walk the chain backwards and + // instantiate Si using type parameters of S{i-1} which gives us a type + // depending on type parameters of S{i-2}. + Error& error = Error::Handle(); + AbstractType& type = AbstractType::Handle(path.Last()->raw()); + for (intptr_t i = path.length() - 2; (i >= 0) && !type.IsInstantiated(); + i--) { + args = path[i]->arguments(); + type = type.InstantiateFrom( + args, TypeArguments::null_type_arguments(), kAllFree, &error, + /*instantiation_trail=*/nullptr, /*bound_trail=*/nullptr, Heap::kNew); + } + + if (type.IsInstantiated()) { + // C at G is fully instantiated and does not depend on + // Xi. In this case just check if type arguments match. + args = type.arguments(); + if (args.Equals(field_type_args)) { + set_static_type_exactness_state( + is_super_class ? StaticTypeExactnessState::HasExactSuperClass() + : StaticTypeExactnessState::HasExactSuperType()); + } else { + if (FLAG_trace_field_guards) { + THR_Print( + " expected %s got %s type arguments\n", + field_type_args.ToCString(), + TypeArguments::Handle(instance.GetTypeArguments()).ToCString()); + } + set_static_type_exactness_state(StaticTypeExactnessState::NotExact()); + } + + // We are going from trivially exact to either super-exact or not-exact. + // In either of those case invalidate any code that might be depending + // on the field state. + return true; + } + + // The most complicated case: C at G depends on + // Xi values. To compare type arguments we would need to instantiate + // it fully from value's type arguments and compare with . + // However this would complicate fast path in the native code. To avoid this + // complication we would optimize for the trivial case: we check if + // C at G is exactly G which means we can simply + // compare values type arguements () to fields type arguments + // () to establish if field type is exact. + ASSERT(cls.IsGeneric()); + const intptr_t num_type_params = cls.NumTypeParameters(); + bool trivial_case = + (num_type_params == + Class::Handle(field_type.type_class()).NumTypeParameters()) && + (instance.GetTypeArguments() == field_type.arguments()); + if (!trivial_case && FLAG_trace_field_guards) { + THR_Print("Not a simple case: %" Pd " vs %" Pd + " type parameters, %s vs %s type arguments\n", + num_type_params, + Class::Handle(field_type.type_class()).NumTypeParameters(), + TypeArguments::Handle(instance.GetTypeArguments()).ToCString(), + field_type_args.ToCString()); + } + + AbstractType& type_arg = AbstractType::Handle(); + args = type.arguments(); + for (intptr_t i = 0; (i < num_type_params) && trivial_case; i++) { + type_arg = args.TypeAt(i); + if (!type_arg.IsTypeParameter() || + (TypeParameter::Cast(type_arg).index() != i)) { + if (FLAG_trace_field_guards) { + THR_Print(" => encountered %s at index % " Pd "\n", + type_arg.ToCString(), i); + } + trivial_case = false; + } + } + + if (trivial_case) { + const intptr_t type_arguments_offset = cls.type_arguments_field_offset(); + ASSERT(type_arguments_offset != Class::kNoTypeArguments); + if (static_type_exactness_state().IsUninitialized()) { + if (StaticTypeExactnessState::CanRepresentAsTriviallyExact( + type_arguments_offset)) { + set_static_type_exactness_state( + StaticTypeExactnessState::TriviallyExact(type_arguments_offset)); + } else { + set_static_type_exactness_state(StaticTypeExactnessState::NotExact()); + } + return true; + } + + // Nothing to do - already initialized and checked. + return false; + } + + set_static_type_exactness_state(StaticTypeExactnessState::NotExact()); + return true; +} + void Field::RecordStore(const Object& value) const { ASSERT(IsOriginal()); if (!Isolate::Current()->use_field_guards()) { @@ -9171,7 +9418,15 @@ void Field::RecordStore(const Object& value) const { value.ToCString()); } + bool invalidate = false; if (UpdateGuardedCidAndLength(value)) { + invalidate = true; + } + if (UpdateGuardedExactnessState(value)) { + invalidate = true; + } + + if (invalidate) { if (FLAG_trace_field_guards) { THR_Print(" => %s\n", GuardedPropertiesAsCString()); } @@ -9187,6 +9442,9 @@ void Field::ForceDynamicGuardedCidAndLength() const { set_is_nullable(true); set_guarded_list_length(Field::kNoFixedLength); set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset); + if (static_type_exactness_state().IsTracking()) { + set_static_type_exactness_state(StaticTypeExactnessState::NotExact()); + } // Drop any code that relied on the above assumptions. DeoptimizeDependentCode(); } diff --git a/runtime/vm/object.h b/runtime/vm/object.h index b4666bc2cd91..b6aaf09dd5c8 100644 --- a/runtime/vm/object.h +++ b/runtime/vm/object.h @@ -3160,6 +3160,116 @@ class RedirectionData : public Object { friend class HeapProfiler; }; +// Representation of a state of runtime tracking of static type exactness for +// a particular location in the program (e.g. exactness of type annotation +// on a field). +// +// Given the static type G we say that it is exact iff any +// values that can be observed at this location has runtime type T such that +// type arguments of T at G are exactly . +// +// Currently we only support tracking for locations that are also known +// to be monomorphic with respect to the actual class of the values it contains. +// +// Important: locations should never switch from tracked (kIsTriviallyExact, +// kHasExactSuperType, kHasExactSuperClass, kNotExact) to not tracked +// (kNotTracking) or the other way around because that would affect unoptimized +// graphs generated by graph builder and skew deopt ids. +class StaticTypeExactnessState final { + public: + // Values stored in the location with static type G are all + // instances of C and C at G has type parameters + // . + // + // For trivially exact types we can simply compare type argument + // vectors as pointers to check exactness. That's why we represent + // trivially exact locations as offset in words to the type arguments of + // class C. All other states are represented as non-positive values. + // + // Note: we are ignoring the type argument vector sharing optimization for + // now. + static inline StaticTypeExactnessState TriviallyExact( + intptr_t type_arguments_offset) { + ASSERT((type_arguments_offset > 0) && + Utils::IsAligned(type_arguments_offset, kWordSize) && + Utils::IsInt(8, type_arguments_offset / kWordSize)); + return StaticTypeExactnessState(type_arguments_offset / kWordSize); + } + + static inline bool CanRepresentAsTriviallyExact( + intptr_t type_arguments_offset) { + return Utils::IsInt(8, type_arguments_offset / kWordSize); + } + + // Values stored in the location with static type G are all + // instances of class C<...> and C at G has type + // parameters for any - that is C<...> has a + // supertype G. + // + // For such locations we can simply check if the value stored + // is an instance of an expected class and we don't have to look at + // type arguments carried by the instance. + // + // We distinguish situations where we know that G is a superclass of C from + // situations where G might be superinterface of C - because in the first + // type arguments of G give us constant prefix of type arguments of C. + static inline StaticTypeExactnessState HasExactSuperType() { + return StaticTypeExactnessState(kHasExactSuperType); + } + + static inline StaticTypeExactnessState HasExactSuperClass() { + return StaticTypeExactnessState(kHasExactSuperClass); + } + + // Values stored in the location don't fall under either kIsTriviallyExact + // or kHasExactSuperType categories. + // + // Note: that does not imply that static type annotation is not exact + // according to a broader definition, e.g. location might simply be + // polymorphic and store instances of multiple different types. + // However for simplicity we don't track such cases yet. + static inline StaticTypeExactnessState NotExact() { + return StaticTypeExactnessState(kNotExact); + } + + // The location does not track exactness of its static type at runtime. + static inline StaticTypeExactnessState NotTracking() { + return StaticTypeExactnessState(kNotTracking); + } + + static inline StaticTypeExactnessState Unitialized() { + return StaticTypeExactnessState(kUninitialized); + } + + bool IsTracking() const { return value_ != kNotTracking; } + bool IsUninitialized() const { return value_ == kUninitialized; } + bool IsHasExactSuperClass() const { return value_ == kHasExactSuperClass; } + bool IsHasExactSuperType() const { return value_ == kHasExactSuperType; } + bool IsTriviallyExact() const { return value_ > kUninitialized; } + bool NeedsFieldGuard() const { return value_ >= kUninitialized; } + bool IsExactOrUninitialized() const { return value_ > kNotExact; } + + static inline StaticTypeExactnessState Decode(int8_t value) { + return StaticTypeExactnessState(value); + } + + int8_t Encode() const { return value_; } + + static constexpr int8_t kUninitialized = 0; + + private: + static constexpr int8_t kNotTracking = -4; + static constexpr int8_t kNotExact = -3; + static constexpr int8_t kHasExactSuperType = -2; + static constexpr int8_t kHasExactSuperClass = -1; + + explicit StaticTypeExactnessState(int8_t value) : value_(value) {} + + const int8_t value_; + + DISALLOW_ALLOCATION(); +}; + class Field : public Object { public: RawField* Original() const; @@ -3306,6 +3416,19 @@ class Field : public Object { HasInitializerBit::update(has_initializer, raw_ptr()->kind_bits_)); } + StaticTypeExactnessState static_type_exactness_state() const { + return StaticTypeExactnessState::Decode( + raw_ptr()->static_type_exactness_state_); + } + + void set_static_type_exactness_state(StaticTypeExactnessState state) const { + StoreNonPointer(&raw_ptr()->static_type_exactness_state_, state.Encode()); + } + + static intptr_t static_type_exactness_state_offset() { + return OFFSET_OF(RawField, static_type_exactness_state_); + } + // Return class id that any non-null value read from this field is guaranteed // to have or kDynamicCid if such class id is not known. // Stores to this field must update this information hence the name. @@ -3476,6 +3599,11 @@ class Field : public Object { // deoptimization of dependent code is required. bool UpdateGuardedCidAndLength(const Object& value) const; + // Update guarded exactness state for this field. Returns true, if + // deoptimization of dependent code is required. + // Assumes that guarded cid was already updated. + bool UpdateGuardedExactnessState(const Object& value) const; + // Force this field's guard to be dynamic and deoptimize dependent code. void ForceDynamicGuardedCidAndLength() const; diff --git a/runtime/vm/parser.cc b/runtime/vm/parser.cc index fc8f34f9522a..f2ed76247791 100644 --- a/runtime/vm/parser.cc +++ b/runtime/vm/parser.cc @@ -214,8 +214,9 @@ ParsedFunction::ParsedFunction(Thread* thread, const Function& function) } void ParsedFunction::AddToGuardedFields(const Field* field) const { - if ((field->guarded_cid() == kDynamicCid) || - (field->guarded_cid() == kIllegalCid)) { + if (((field->guarded_cid() == kDynamicCid) || + (field->guarded_cid() == kIllegalCid)) && + field->static_type_exactness_state().IsExactOrUninitialized()) { return; } diff --git a/runtime/vm/raw_object.h b/runtime/vm/raw_object.h index faa0c2ddf774..dc6bcc9438c8 100644 --- a/runtime/vm/raw_object.h +++ b/runtime/vm/raw_object.h @@ -1081,6 +1081,11 @@ class RawField : public RawObject { // generated on platforms with weak addressing modes (ARM). int8_t guarded_list_length_in_object_offset_; + // Runtime tracking state of exactness of type annotation of this field. + // See StaticTypeExactnessState for the meaning and possible values in this + // field. + int8_t static_type_exactness_state_; + uint8_t kind_bits_; // static, final, const, has initializer.... friend class CidRewriteVisitor; diff --git a/runtime/vm/raw_object_snapshot.cc b/runtime/vm/raw_object_snapshot.cc index 288f9f1d8345..279d28e9e05b 100644 --- a/runtime/vm/raw_object_snapshot.cc +++ b/runtime/vm/raw_object_snapshot.cc @@ -881,6 +881,8 @@ RawField* Field::ReadFrom(SnapshotReader* reader, TokenPosition::SnapshotDecode(reader->Read())); field.set_guarded_cid(reader->Read()); field.set_is_nullable(reader->Read()); + field.set_static_type_exactness_state( + StaticTypeExactnessState::Decode(reader->Read())); #if !defined(DART_PRECOMPILED_RUNTIME) field.set_kernel_offset(reader->Read()); #endif @@ -896,6 +898,8 @@ RawField* Field::ReadFrom(SnapshotReader* reader, field.set_is_nullable(true); field.set_guarded_list_length(Field::kNoFixedLength); field.set_guarded_list_length_in_object_offset(Field::kUnknownLengthOffset); + field.set_static_type_exactness_state( + StaticTypeExactnessState::NotTracking()); } else { field.InitializeGuardedListLengthInObjectOffset(); } @@ -922,6 +926,7 @@ void RawField::WriteTo(SnapshotWriter* writer, writer->Write(ptr()->end_token_pos_.SnapshotEncode()); writer->Write(ptr()->guarded_cid_); writer->Write(ptr()->is_nullable_); + writer->Write(ptr()->static_type_exactness_state_); #if !defined(DART_PRECOMPILED_RUNTIME) writer->Write(ptr()->kernel_offset_); #endif