From a99c68855c9ccfaf9fb6a975aad658f59cfc260d Mon Sep 17 00:00:00 2001 From: Julius de Bruijn Date: Fri, 27 Jan 2023 16:25:24 +0000 Subject: [PATCH] Separate Introspection warnings to its own module (#3633) --- flake.lock | 24 +- .../src/lib.rs | 2 +- .../src/sampler/field_type.rs | 4 +- .../src/sampler/statistics.rs | 9 +- .../tests/test_api/mod.rs | 2 +- .../src/datamodel_calculator.rs | 33 +- .../src/datamodel_calculator/context.rs | 144 +++--- .../sql-introspection-connector/src/error.rs | 16 +- .../src/introspection_map.rs | 12 +- .../src/introspection_map/relation_names.rs | 40 +- .../sql-introspection-connector/src/lib.rs | 8 +- .../sql-introspection-connector/src/pair.rs | 6 +- .../src/pair/default.rs | 4 +- .../src/pair/enumerator.rs | 6 +- .../src/pair/id.rs | 7 +- .../src/pair/index.rs | 4 +- .../src/pair/index_field.rs | 2 +- .../src/pair/model.rs | 17 +- .../src/pair/relation_field.rs | 21 +- .../src/pair/scalar_field.rs | 18 +- .../src/pair/view.rs | 17 +- .../src/rendering.rs | 33 +- .../src/rendering/defaults.rs | 37 +- .../src/rendering/enums.rs | 59 +-- .../src/rendering/id.rs | 21 +- .../src/rendering/models.rs | 72 +-- .../src/rendering/relation_field.rs | 10 +- .../src/rendering/scalar_field.rs | 70 +-- .../src/rendering/views.rs | 44 +- .../src/sanitize_datamodel_names.rs | 8 +- .../src/version_checker.rs | 19 +- .../src/warnings.rs | 454 +----------------- .../src/warnings/enum.rs | 48 ++ .../src/warnings/generators.rs | 453 +++++++++++++++++ .../src/warnings/model.rs | 91 ++++ .../src/warnings/view.rs | 69 +++ introspection-engine/core/build.rs | 2 +- introspection-engine/core/src/error.rs | 4 +- .../core/src/error_rendering.rs | 2 +- .../src/configuration/datasource.rs | 6 +- .../src/configuration/generator.rs | 4 +- .../introspection-engine-tests/build.rs | 2 +- .../src/test_api.rs | 24 +- .../tests/commenting_out/postgres.rs | 2 +- .../tests/enums/cockroachdb.rs | 5 +- .../tests/enums/mod.rs | 23 +- .../tests/multi_schema/cockroach.rs | 2 +- .../tests/multi_schema/postgres.rs | 2 +- .../tests/multi_schema/sql_server.rs | 4 +- .../tests/native_types/mssql.rs | 6 +- .../tests/native_types/mysql.rs | 6 +- .../tests/native_types/postgres.rs | 10 +- .../tests/re_introspection/postgresql.rs | 84 ++++ .../tests/referential_actions/mysql.rs | 2 +- .../tests/remapping_database_names/mod.rs | 18 +- .../tests/simple.rs | 3 +- .../tests/views/postgresql.rs | 108 ++++- libs/mongodb-client/src/lib.rs | 14 +- libs/prisma-value/src/lib.rs | 6 +- libs/sql-ddl/src/mysql.rs | 18 +- libs/sql-ddl/src/postgres.rs | 6 +- libs/sql-ddl/src/sqlite.rs | 9 +- libs/sql-schema-describer/src/lib.rs | 2 +- libs/sql-schema-describer/src/mssql.rs | 22 +- libs/sql-schema-describer/src/mysql.rs | 20 +- libs/sql-schema-describer/src/postgres.rs | 24 +- libs/sql-schema-describer/src/sqlite.rs | 20 +- .../describers/postgres_describer_tests.rs | 9 +- .../tests/test_api/mod.rs | 4 +- libs/test-cli/build.rs | 2 +- .../src/diagnose_migration_history.rs | 2 +- libs/test-cli/src/main.rs | 13 +- libs/test-macros/src/lib.rs | 2 +- libs/test-setup/src/diff.rs | 4 +- libs/test-setup/src/logging.rs | 2 +- libs/test-setup/src/mssql.rs | 2 +- libs/test-setup/src/mysql.rs | 6 +- libs/test-setup/src/postgres.rs | 6 +- libs/test-setup/src/test_api_args.rs | 7 +- libs/user-facing-errors/src/common.rs | 13 +- libs/user-facing-errors/src/lib.rs | 6 +- .../src/migration_engine.rs | 2 +- libs/user-facing-errors/src/quaint.rs | 18 +- libs/user-facing-errors/src/query_engine.rs | 4 +- migration-engine/cli/build.rs | 2 +- migration-engine/cli/src/commands.rs | 2 +- migration-engine/cli/src/logger.rs | 2 +- migration-engine/cli/tests/cli_tests.rs | 34 +- .../migration-connector/src/checksum.rs | 4 +- .../migration-connector/src/error.rs | 2 +- .../src/migrations_directory.rs | 9 +- .../tests/migrations/test_api.rs | 14 +- .../src/apply_migration.rs | 2 +- .../sql-migration-connector/src/flavour.rs | 1 - .../src/flavour/mssql.rs | 44 +- .../src/flavour/mysql.rs | 21 +- .../src/flavour/postgres.rs | 24 +- .../src/flavour/postgres/connection.rs | 5 +- .../src/flavour/sqlite.rs | 2 +- .../postgres.rs | 2 +- .../unexecutable_step_check.rs | 9 +- .../warning_check.rs | 22 +- .../src/sql_renderer.rs | 2 +- .../src/sql_renderer/common.rs | 16 +- .../src/sql_renderer/mssql_renderer.rs | 12 +- .../mssql_renderer/alter_table.rs | 2 +- .../src/sql_renderer/mysql_renderer.rs | 24 +- .../src/sql_renderer/postgres_renderer.rs | 20 +- .../src/sql_renderer/sqlite_renderer.rs | 2 +- .../core/src/commands/dev_diagnostic.rs | 7 +- .../commands/diagnose_migration_history.rs | 2 +- migration-engine/core/src/commands/diff.rs | 4 +- migration-engine/core/src/lib.rs | 3 +- .../json-rpc-api-build/src/error.rs | 2 +- .../json-rpc-api-build/src/lib.rs | 16 +- .../json-rpc-api-build/src/rust_crate.rs | 32 +- .../migration-engine-tests/build.rs | 2 +- .../migration-engine-tests/src/assertions.rs | 62 +-- .../src/assertions/quaint_result_set_ext.rs | 25 +- .../src/commands/apply_migrations.rs | 4 +- .../src/commands/create_migration.rs | 15 +- .../commands/list_migration_directories.rs | 4 +- .../src/commands/schema_push.rs | 4 +- .../src/multi_engine_test_api.rs | 7 +- .../migration-engine-tests/src/test_api.rs | 15 +- .../create_migration_tests.rs | 4 +- .../tests/errors/database_access_denied.rs | 3 +- .../tests/errors/error_tests.rs | 42 +- .../tests/existing_data/mod.rs | 4 +- .../tests/initialization/mod.rs | 5 +- .../tests/migrations/cockroachdb.rs | 11 +- .../migrations/cockroachdb/failure_modes.rs | 2 +- .../tests/migrations/db_execute.rs | 2 +- .../tests/migrations/dev_diagnostic_tests.rs | 19 +- .../diagnose_migration_history_tests.rs | 6 +- .../tests/migrations/diff.rs | 28 +- .../tests/migrations/drift_summary.rs | 2 +- .../mark_migration_applied_tests.rs | 3 +- .../mark_migration_rolled_back_tests.rs | 3 +- .../tests/migrations/migrate_lock.rs | 4 +- .../tests/migrations/mssql.rs | 21 +- .../tests/migrations/mssql/multi_schema.rs | 6 +- .../tests/migrations/mysql.rs | 2 +- .../tests/migrations/postgres.rs | 2 +- .../tests/migrations/postgres/multi_schema.rs | 8 +- .../tests/migrations/relations.rs | 10 +- .../tests/migrations/squashing_tests.rs | 27 +- .../tests/migrations/unsupported_types.rs | 5 +- .../tests/native_types/mssql.rs | 6 +- .../tests/native_types/mysql.rs | 4 +- .../tests/native_types/postgres.rs | 46 +- .../tests/schema_push/mod.rs | 9 +- .../tests/single_migration_tests.rs | 3 +- migration-engine/qe-setup/src/mssql.rs | 7 +- migration-engine/qe-setup/src/postgres.rs | 18 +- prisma-fmt/build.rs | 2 +- prisma-fmt/src/actions.rs | 2 +- prisma-fmt/src/code_actions/multi_schema.rs | 8 +- prisma-fmt/src/get_config.rs | 2 +- prisma-fmt/tests/code_actions/test_api.rs | 8 +- .../text_document_completion/test_api.rs | 8 +- .../src/cockroach_datamodel_connector.rs | 2 +- .../src/mongodb/mongodb_types.rs | 2 +- .../src/mssql_datamodel_connector.rs | 2 +- .../src/mysql_datamodel_connector.rs | 2 +- .../src/postgres_datamodel_connector.rs | 2 +- .../src/sqlite_datamodel_connector.rs | 2 +- psl/diagnostics/src/error.rs | 46 +- psl/diagnostics/src/pretty_print.rs | 4 +- psl/parser-database/src/attributes.rs | 8 +- psl/parser-database/src/attributes/id.rs | 3 +- psl/parser-database/src/names.rs | 6 +- psl/parser-database/src/relations.rs | 4 +- psl/parser-database/src/types.rs | 2 +- .../src/walkers/relation_field.rs | 4 +- .../src/configuration/configuration_struct.rs | 5 +- psl/psl-core/src/configuration/datasource.rs | 10 +- psl/psl-core/src/datamodel_connector.rs | 8 +- .../datamodel_connector/constraint_names.rs | 12 +- .../src/validate/datasource_loader.rs | 2 +- .../validations/composite_types.rs | 5 +- .../validations/default_value.rs | 6 +- .../validations/indexes.rs | 3 +- .../validation_pipeline/validations/models.rs | 5 +- .../validations/relation_fields.rs | 10 +- .../validations/relations.rs | 8 +- psl/psl/build.rs | 2 +- psl/psl/tests/base/base_types.rs | 6 +- psl/psl/tests/common/mod.rs | 20 +- psl/psl/tests/datamodel_tests.rs | 4 +- psl/psl/tests/panic_with_diff/mod.rs | 4 +- psl/psl/tests/reformat_tests.rs | 2 +- psl/schema-ast/src/ast/expression.rs | 4 +- psl/schema-ast/src/parser/parse_arguments.rs | 2 +- psl/schema-ast/src/parser/parse_enum.rs | 5 +- psl/schema-ast/src/parser/parse_schema.rs | 2 +- .../query-engine-tests/src/utils/string.rs | 2 +- .../tests/new/assertion_violation_error.rs | 11 +- .../tests/new/interactive_tx.rs | 2 +- .../tests/new/regressions/max_integer.rs | 180 ++----- .../tests/new/regressions/prisma_15467.rs | 18 +- .../tests/queries/aggregation/avg.rs | 4 +- .../queries/aggregation/combination_spec.rs | 4 +- .../tests/queries/aggregation/count.rs | 2 +- .../tests/queries/aggregation/group_by.rs | 2 +- .../queries/aggregation/group_by_having.rs | 4 +- .../aggregation/many_count_relation.rs | 2 +- .../tests/queries/aggregation/max.rs | 4 +- .../tests/queries/aggregation/min.rs | 4 +- .../tests/queries/aggregation/sum.rs | 4 +- .../aggregation/uniq_count_relation.rs | 2 +- .../queries/batch/in_selection_batching.rs | 2 +- .../queries/batch/select_one_singular.rs | 10 +- .../tests/queries/distinct.rs | 2 +- .../tests/queries/filters/composite/mod.rs | 2 +- .../filters/field_reference/having_filter.rs | 2 +- .../filters/field_reference/json_filter.rs | 2 +- .../field_reference/relation_filter.rs | 2 +- .../queries/filters/filter_regression.rs | 4 +- .../tests/queries/filters/filters.rs | 6 +- .../queries/filters/insensitive_filters.rs | 3 +- .../tests/queries/filters/json.rs | 2 +- .../tests/queries/filters/json_filters.rs | 17 +- .../tests/queries/filters/list_filters.rs | 10 +- .../tests/queries/filters/ported_filters.rs | 19 +- .../tests/queries/filters/search_filter.rs | 2 +- .../nested_multi_order_pagination.rs | 4 +- .../order_and_pagination/nested_pagination.rs | 2 +- .../queries/order_and_pagination/order_by.rs | 2 +- .../order_by_aggregation.rs | 2 +- .../order_by_composite.rs | 2 +- .../order_by_dependent.rs | 8 +- .../order_by_dependent_pagination.rs | 8 +- .../order_and_pagination/order_by_nulls.rs | 2 +- .../order_by_relevance.rs | 2 +- .../order_and_pagination/pagination.rs | 2 +- .../regressions/pagination_regression.rs | 2 +- .../tests/queries/regressions/prisma_4088.rs | 2 +- .../tests/queries/regressions/prisma_8389.rs | 5 +- .../tests/queries/regressions/prisma_933.rs | 2 +- .../queries/relations/inline_relation.rs | 2 +- .../queries/simple/composite_default_value.rs | 2 +- .../tests/queries/simple/find_first.rs | 2 +- .../queries/simple/find_first_or_throw.rs | 2 +- .../tests/queries/simple/find_many.rs | 2 +- .../queries/simple/mongo_incorrect_fields.rs | 2 +- .../queries/simple/multi_field_unique.rs | 2 +- .../tests/queries/simple/raw_mongo.rs | 2 +- .../query-engine-tests/tests/queries/views.rs | 6 +- .../tests/raw/sql/typed_output.rs | 2 +- .../tests/writes/composites/list.rs | 2 +- .../tests/writes/composites/single.rs | 2 +- .../writes/data_types/datetime/datetime.rs | 2 +- .../data_types/datetime/where_and_datetime.rs | 36 +- .../data_types/datetime/where_and_update.rs | 2 +- .../writes/data_types/scalar_list/base.rs | 14 +- .../writes/data_types/scalar_list/decimal.rs | 2 +- .../writes/data_types/scalar_list/json.rs | 2 +- .../writes/filters/delete_many_rel_filter.rs | 2 +- .../writes/filters/update_many_rel_filter.rs | 2 +- .../tests/writes/ids/byoid.rs | 8 +- .../nested_connect_inside_create.rs | 32 +- .../nested_connect_inside_update.rs | 45 +- .../nested_create_inside_update.rs | 18 +- .../nested_delete_inside_update.rs | 56 +-- .../nested_delete_inside_upsert.rs | 54 +-- .../nested_delete_many_inside_update.rs | 25 +- .../nested_disconnect_inside_update.rs | 38 +- .../nested_disconnect_inside_upsert.rs | 18 +- .../nested_set_inside_update.rs | 14 +- .../nested_update_many_inside_update.rs | 21 +- .../nested_upsert_inside_update.rs | 10 +- .../nested_atomic_number_ops.rs | 15 +- .../nested_connect_inside_upsert.rs | 6 +- .../nested_create_many.rs | 4 +- .../nested_update_inside_update.rs | 32 +- .../tests/writes/relations/rel_defaults.rs | 4 +- .../tests/writes/relations/rel_design.rs | 4 +- .../tests/writes/relations/rel_graphql.rs | 6 +- .../same_model_self_rel_without_back_rel.rs | 2 +- .../writes/top_level_mutations/create.rs | 5 +- .../writes/top_level_mutations/create_many.rs | 4 +- .../writes/top_level_mutations/delete.rs | 2 +- .../writes/top_level_mutations/delete_many.rs | 2 +- .../insert_null_in_required_field.rs | 2 +- .../non_embedded_upsert.rs | 2 +- .../writes/top_level_mutations/update.rs | 19 +- .../writes/top_level_mutations/update_many.rs | 9 +- .../writes/top_level_mutations/upsert.rs | 13 +- .../multi_field_uniq_mutation.rs | 4 +- ..._embedded_setting_node_selector_to_null.rs | 2 +- .../src/args/connector_test.rs | 4 +- .../query-test-macros/src/connector_test.rs | 4 +- .../query-test-macros/src/lib.rs | 4 +- .../src/relation_link_test.rs | 6 +- .../src/connector_tag/cockroachdb.rs | 4 +- .../src/connector_tag/mod.rs | 13 +- .../src/connector_tag/mongodb.rs | 30 +- .../src/connector_tag/mysql.rs | 18 +- .../src/connector_tag/postgres.rs | 48 +- .../src/connector_tag/sql_server.rs | 18 +- .../src/connector_tag/sqlite.rs | 2 +- .../src/connector_tag/vitess.rs | 2 +- .../src/datamodel_rendering/mod.rs | 8 +- .../src/datamodel_rendering/sql_renderer.rs | 2 +- .../query-tests-setup/src/logging.rs | 2 +- .../query-tests-setup/src/query_result.rs | 2 +- .../query-tests-setup/src/runner/binary.rs | 4 +- .../query-tests-setup/src/runner/mod.rs | 2 +- .../src/schema_gen/identifiers.rs | 2 +- .../query-tests-setup/src/schema_gen/parse.rs | 15 +- .../src/schema_gen/relation_field.rs | 3 +- .../src/templating/parse_models.rs | 4 +- .../mongodb-query-connector/src/cursor.rs | 2 +- .../mongodb-query-connector/src/error.rs | 21 +- .../mongodb-query-connector/src/filter.rs | 2 +- .../mongodb-query-connector/src/join.rs | 8 +- .../mongodb-query-connector/src/lib.rs | 2 +- .../mongodb-query-connector/src/orderby.rs | 7 +- .../src/query_builder/group_by_builder.rs | 2 +- .../src/query_strings.rs | 2 +- .../src/root_queries/raw.rs | 8 +- .../mongodb-query-connector/src/value.rs | 19 +- .../connectors/query-connector/src/error.rs | 6 +- .../query-connector/src/write_args.rs | 7 +- .../src/database/sqlite.rs | 3 +- .../sql-query-connector/src/error.rs | 9 +- .../src/filter_conversion.rs | 36 +- .../src/model_extensions/relation.rs | 2 +- .../src/nested_aggregations.rs | 2 +- .../sql-query-connector/src/query_ext.rs | 2 +- .../sql-query-connector/src/sql_trace.rs | 2 +- query-engine/core/src/error.rs | 9 +- query-engine/core/src/executor/loader.rs | 3 +- .../src/interactive_transactions/actors.rs | 2 +- .../core/src/interactive_transactions/mod.rs | 2 +- query-engine/core/src/interpreter/error.rs | 6 +- .../core/src/interpreter/expressionista.rs | 5 +- .../core/src/interpreter/interpreter.rs | 8 +- query-engine/core/src/query_ast/mod.rs | 4 +- query-engine/core/src/query_document/error.rs | 14 +- .../core/src/query_document/parser.rs | 17 +- .../core/src/query_document/transformers.rs | 39 +- .../core/src/query_graph/formatters.rs | 12 +- .../core/src/query_graph/transformers.rs | 4 +- .../extractors/filters/composite.rs | 3 +- .../extractors/filters/filter_fold.rs | 6 +- .../extractors/filters/filter_grouping.rs | 3 +- .../extractors/filters/mod.rs | 9 +- .../extractors/filters/relation.rs | 3 +- .../extractors/filters/scalar.rs | 18 +- .../extractors/query_arguments.rs | 9 +- .../src/query_graph_builder/read/utils.rs | 2 +- .../src/query_graph_builder/write/connect.rs | 3 +- .../query_graph_builder/write/disconnect.rs | 3 +- .../write/nested/connect_nested.rs | 38 +- .../write/nested/connect_or_create_nested.rs | 24 +- .../write/nested/create_nested.rs | 15 +- .../write/nested/delete_nested.rs | 3 +- .../query_graph_builder/write/nested/mod.rs | 2 +- .../write/nested/set_nested.rs | 6 +- .../write/nested/update_nested.rs | 3 +- .../write/nested/upsert_nested.rs | 9 +- .../src/query_graph_builder/write/utils.rs | 13 +- query-engine/core/src/response_ir/internal.rs | 10 +- .../core/src/response_ir/ir_serializer.rs | 2 +- query-engine/dml/src/default_value.rs | 10 +- query-engine/dml/src/model.rs | 4 +- query-engine/dml/src/scalars.rs | 2 +- .../src/ast_builders/datamodel_ast_builder.rs | 8 +- .../field_builders/composite_field_builder.rs | 2 +- .../src/builders/index_builder.rs | 2 +- .../src/builders/internal_dm_builder.rs | 2 +- query-engine/prisma-models/src/field/mod.rs | 2 +- .../prisma-models/src/field_selection.rs | 7 +- .../prisma-models/src/prisma_value_ext.rs | 18 +- .../tests/datamodel_converter_tests.rs | 4 +- query-engine/query-engine-node-api/build.rs | 2 +- .../query-engine-node-api/src/engine.rs | 2 +- .../query-engine-node-api/src/error.rs | 6 +- .../query-engine-node-api/src/logger.rs | 5 +- query-engine/query-engine/build.rs | 2 +- query-engine/query-engine/src/cli.rs | 6 +- query-engine/query-engine/src/error.rs | 8 +- query-engine/query-engine/src/opt.rs | 4 +- query-engine/query-engine/src/server/mod.rs | 2 +- query-engine/request-handlers/src/error.rs | 6 +- .../src/graphql/protocol_adapter.rs | 11 +- .../request-handlers/src/graphql/response.rs | 2 +- .../graphql/schema_renderer/type_renderer.rs | 4 +- query-engine/schema-builder/src/cache.rs | 3 +- query-engine/schema-builder/src/enum_types.rs | 2 +- .../fields/data_input_mapper/update.rs | 4 +- .../src/input_types/fields/field_ref_type.rs | 2 +- query-engine/schema/src/input_types.rs | 6 +- query-engine/schema/src/query_schema.rs | 4 +- 396 files changed, 2433 insertions(+), 2783 deletions(-) create mode 100644 introspection-engine/connectors/sql-introspection-connector/src/warnings/enum.rs create mode 100644 introspection-engine/connectors/sql-introspection-connector/src/warnings/generators.rs create mode 100644 introspection-engine/connectors/sql-introspection-connector/src/warnings/model.rs create mode 100644 introspection-engine/connectors/sql-introspection-connector/src/warnings/view.rs diff --git a/flake.lock b/flake.lock index 78ca9a9e27cb..a074b87f05f7 100644 --- a/flake.lock +++ b/flake.lock @@ -14,11 +14,11 @@ ] }, "locked": { - "lastModified": 1673405853, - "narHash": "sha256-6Nq9DuOo+gE2I8z5UZaKuumykz2xxZ9JGYmUthOuwSA=", + "lastModified": 1674348649, + "narHash": "sha256-hBRlaUlsrmW1wAPevwQnkrT0XiLrmlAHWabWYmLeQlQ=", "owner": "ipetkov", "repo": "crane", - "rev": "b13963c8c18026aa694acd98d14f66d24666f70b", + "rev": "ccea7b33178daf6010aae3ea2b3fb5b0241b9146", "type": "github" }, "original": { @@ -50,11 +50,11 @@ ] }, "locked": { - "lastModified": 1673362319, - "narHash": "sha256-Pjp45Vnj7S/b3BRpZEVfdu8sqqA6nvVjvYu59okhOyI=", + "lastModified": 1674771137, + "narHash": "sha256-Zpk1GbEsYrqKmuIZkx+f+8pU0qcCYJoSUwNz1Zk+R00=", "owner": "hercules-ci", "repo": "flake-parts", - "rev": "82c16f1682cf50c01cb0280b38a1eed202b3fe9f", + "rev": "7c7a8bce3dffe71203dcd4276504d1cb49dfe05f", "type": "github" }, "original": { @@ -100,11 +100,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1673226411, - "narHash": "sha256-b6cGb5Ln7Zy80YO66+cbTyGdjZKtkoqB/iIIhDX9gRA=", + "lastModified": 1674641431, + "narHash": "sha256-qfo19qVZBP4qn5M5gXc/h1MDgAtPA5VxJm9s8RUAkVk=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "aa1d74709f5dac623adb4d48fdfb27cc2c92a4d4", + "rev": "9b97ad7b4330aacda9b2343396eb3df8a853b4fc", "type": "github" }, "original": { @@ -133,11 +133,11 @@ ] }, "locked": { - "lastModified": 1673404037, - "narHash": "sha256-9yhRzFiqzVQaJN5jsAIwApDolkORRQ3EJi7D4yu58ig=", + "lastModified": 1674786480, + "narHash": "sha256-n25V3Ug/dJewbJaxj1gL0cUMBdOonrVkIQCHd9yHHvw=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "a979c85ed4691bf996af88504522b32e9611ccfe", + "rev": "296dd673b46aaebe1c8355f1848ceb7c905dda35", "type": "github" }, "original": { diff --git a/introspection-engine/connectors/mongodb-introspection-connector/src/lib.rs b/introspection-engine/connectors/mongodb-introspection-connector/src/lib.rs index 2dbb3dbba5b2..69383f057728 100644 --- a/introspection-engine/connectors/mongodb-introspection-connector/src/lib.rs +++ b/introspection-engine/connectors/mongodb-introspection-connector/src/lib.rs @@ -41,7 +41,7 @@ impl MongoDbIntrospectionConnector { ConnectorError { user_facing_error: Some(known), - kind: introspection_connector::ErrorKind::InvalidDatabaseUrl(format!("{} in database URL", err)), + kind: introspection_connector::ErrorKind::InvalidDatabaseUrl(format!("{err} in database URL")), } }; diff --git a/introspection-engine/connectors/mongodb-introspection-connector/src/sampler/field_type.rs b/introspection-engine/connectors/mongodb-introspection-connector/src/sampler/field_type.rs index e858fbc75155..be74e848a19b 100644 --- a/introspection-engine/connectors/mongodb-introspection-connector/src/sampler/field_type.rs +++ b/introspection-engine/connectors/mongodb-introspection-connector/src/sampler/field_type.rs @@ -115,8 +115,8 @@ impl fmt::Display for FieldType { FieldType::Int64 => f.write_str("BigInt"), FieldType::Json => f.write_str("Json"), FieldType::Document(s) => f.write_str(s), - FieldType::Array(r#type) => write!(f, "Array({})", r#type), - FieldType::Unsupported(r#type) => write!(f, "{}", r#type), + FieldType::Array(r#type) => write!(f, "Array({type})"), + FieldType::Unsupported(r#type) => write!(f, "{type}"), } } } diff --git a/introspection-engine/connectors/mongodb-introspection-connector/src/sampler/statistics.rs b/introspection-engine/connectors/mongodb-introspection-connector/src/sampler/statistics.rs index 95ebce3af2f3..415308eb4fbc 100644 --- a/introspection-engine/connectors/mongodb-introspection-connector/src/sampler/statistics.rs +++ b/introspection-engine/connectors/mongodb-introspection-connector/src/sampler/statistics.rs @@ -56,15 +56,12 @@ impl<'a> Statistics<'a> { /// - if model is foo and field is bar, the type is FooBar /// - if a model already exists with the name, we'll use FooBar_ fn composite_type_name(&self, model: &str, field: &str) -> Name { - let combined: String = format!("{}_{}", model, field) - .chars() - .filter(|c| c.is_ascii()) - .collect(); + let combined: String = format!("{model}_{field}").chars().filter(|c| c.is_ascii()).collect(); let name = Name::Model(combined.to_case(Case::Pascal)); let name = if self.models.contains_key(&name) { - format!("{}_", name) + format!("{name}_") } else { name.take() }; @@ -568,7 +565,7 @@ impl fmt::Display for FieldPercentages { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { for (i, (k, v)) in self.data.iter().enumerate() { let p = (*v * 1000.0).round() / 10.0; - write!(f, "{}: {}%", k, p)?; + write!(f, "{k}: {p}%")?; if i < self.data.keys().count() - 1 { write!(f, ", ")?; diff --git a/introspection-engine/connectors/mongodb-introspection-connector/tests/test_api/mod.rs b/introspection-engine/connectors/mongodb-introspection-connector/tests/test_api/mod.rs index 0705c3dfe5e0..14a4bfa70b5d 100644 --- a/introspection-engine/connectors/mongodb-introspection-connector/tests/test_api/mod.rs +++ b/introspection-engine/connectors/mongodb-introspection-connector/tests/test_api/mod.rs @@ -90,7 +90,7 @@ where let features = preview_features .iter() - .map(|f| format!("\"{}\"", f)) + .map(|f| format!("\"{f}\"")) .collect::>() .join(", "); diff --git a/introspection-engine/connectors/sql-introspection-connector/src/datamodel_calculator.rs b/introspection-engine/connectors/sql-introspection-connector/src/datamodel_calculator.rs index 83b24f919861..874b049a3e3e 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/datamodel_calculator.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/datamodel_calculator.rs @@ -2,45 +2,24 @@ mod context; -use crate::{rendering, warnings, SqlFamilyTrait, SqlIntrospectionResult}; -pub(crate) use context::{InputContext, OutputContext}; +use crate::{rendering, warnings, SqlIntrospectionResult}; +pub(crate) use context::DatamodelCalculatorContext; use introspection_connector::{IntrospectionContext, IntrospectionResult, Version}; use sql_schema_describer as sql; /// Calculate a data model from a database schema. pub fn calculate(schema: &sql::SqlSchema, ctx: &IntrospectionContext) -> SqlIntrospectionResult { - let introspection_map = Default::default(); + let ctx = DatamodelCalculatorContext::new(ctx, schema); - let mut input = InputContext { - version: Version::NonPrisma, - config: ctx.configuration(), - render_config: ctx.render_config, - schema, - sql_family: ctx.sql_family(), - previous_schema: ctx.previous_schema(), - introspection_map: &introspection_map, - force_namespaces: ctx.namespaces(), - }; - - let introspection_map = crate::introspection_map::IntrospectionMap::new(input); - input.introspection_map = &introspection_map; - - let mut output = OutputContext { - rendered_schema: datamodel_renderer::Datamodel::default(), - warnings: warnings::Warnings::new(), - }; - - input.version = crate::version_checker::check_prisma_version(&input); - - let (schema_string, is_empty) = rendering::to_psl_string(input, &mut output)?; - let warnings = output.finalize_warnings(); + let (schema_string, is_empty) = rendering::to_psl_string(&ctx)?; + let warnings = warnings::generate(&ctx); // Warning codes 5 and 6 are for Prisma 1 default reintrospection. let version = if warnings.iter().any(|w| ![5, 6].contains(&w.code)) { Version::NonPrisma } else { - input.version + ctx.version }; Ok(IntrospectionResult { diff --git a/introspection-engine/connectors/sql-introspection-connector/src/datamodel_calculator/context.rs b/introspection-engine/connectors/sql-introspection-connector/src/datamodel_calculator/context.rs index 815ec27e0bd3..d454dd4d4a89 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/datamodel_calculator/context.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/datamodel_calculator/context.rs @@ -1,10 +1,10 @@ use crate::{ introspection_helpers::{is_new_migration_table, is_old_migration_table, is_prisma_join_table, is_relay_table}, - introspection_map::RelationName, + introspection_map::{IntrospectionMap, RelationName}, pair::{EnumPair, ModelPair, Pair, RelationFieldDirection, ViewPair}, - warnings, EnumVariantName, IntrospectedName, ModelName, + version_checker, EnumVariantName, IntrospectedName, ModelName, SqlFamilyTrait, }; -use introspection_connector::{Version, Warning}; +use introspection_connector::{IntrospectionContext, Version}; use psl::{ builtin_connectors::*, datamodel_connector::Connector, @@ -15,47 +15,53 @@ use quaint::prelude::SqlFamily; use sql_schema_describer as sql; use std::borrow::Cow; -pub(crate) struct OutputContext<'a> { - pub(crate) rendered_schema: datamodel_renderer::Datamodel<'a>, - pub(crate) warnings: warnings::Warnings, -} - -impl<'a> OutputContext<'a> { - pub(crate) fn finalize_warnings(&mut self) -> Vec { - self.warnings.finalize() - } -} - -#[derive(Clone, Copy)] -pub(crate) struct InputContext<'a> { +pub(crate) struct DatamodelCalculatorContext<'a> { pub(crate) config: &'a Configuration, pub(crate) render_config: bool, - pub(crate) schema: &'a sql::SqlSchema, + pub(crate) sql_schema: &'a sql::SqlSchema, pub(crate) sql_family: SqlFamily, pub(crate) version: Version, pub(crate) previous_schema: &'a psl::ValidatedSchema, - pub(crate) introspection_map: &'a crate::introspection_map::IntrospectionMap<'a>, + pub(crate) introspection_map: IntrospectionMap<'a>, pub(crate) force_namespaces: Option<&'a [String]>, } -impl<'a> InputContext<'a> { - pub(crate) fn is_cockroach(self) -> bool { +impl<'a> DatamodelCalculatorContext<'a> { + pub(crate) fn new(ctx: &'a IntrospectionContext, sql_schema: &'a sql::SqlSchema) -> Self { + let mut ctx = DatamodelCalculatorContext { + version: Version::NonPrisma, + config: ctx.configuration(), + render_config: ctx.render_config, + sql_schema, + sql_family: ctx.sql_family(), + previous_schema: ctx.previous_schema(), + introspection_map: Default::default(), + force_namespaces: ctx.namespaces(), + }; + + ctx.introspection_map = IntrospectionMap::new(&ctx); + ctx.version = version_checker::check_prisma_version(&ctx); + + ctx + } + + pub(crate) fn is_cockroach(&self) -> bool { self.active_connector().provider_name() == COCKROACH.provider_name() } - pub(crate) fn relation_mode(self) -> psl::datamodel_connector::RelationMode { + pub(crate) fn relation_mode(&self) -> psl::datamodel_connector::RelationMode { self.config.datasources.first().unwrap().relation_mode() } - pub(crate) fn foreign_keys_enabled(self) -> bool { + pub(crate) fn foreign_keys_enabled(&self) -> bool { self.relation_mode().uses_foreign_keys() } - pub(crate) fn active_connector(self) -> &'static dyn Connector { + pub(crate) fn active_connector(&self) -> &'static dyn Connector { self.config.datasources.first().unwrap().active_connector } - pub(crate) fn uses_namespaces(self) -> bool { + pub(crate) fn uses_namespaces(&self) -> bool { let schemas_in_datasource = matches!(self.config.datasources.first(), Some(ds) if !ds.namespaces.is_empty()); let schemas_in_parameters = self.force_namespaces.is_some(); @@ -64,16 +70,16 @@ impl<'a> InputContext<'a> { /// Iterate over the database enums, combined together with a /// possible existing enum in the PSL. - pub(crate) fn enum_pairs(self) -> impl ExactSizeIterator> { - self.schema + pub(crate) fn enum_pairs(&'a self) -> impl ExactSizeIterator> + 'a { + self.sql_schema .enum_walkers() - .map(move |next| Pair::new(self, self.existing_enum(next.id), next)) + .map(|next| Pair::new(self, self.existing_enum(next.id), next)) } /// Iterate over the database tables, combined together with a /// possible existing model in the PSL. - pub(crate) fn model_pairs(self) -> impl Iterator> { - self.schema + pub(crate) fn model_pairs(&'a self) -> impl Iterator> + 'a { + self.sql_schema .table_walkers() .filter(|table| !is_old_migration_table(*table)) .filter(|table| !is_new_migration_table(*table)) @@ -87,16 +93,16 @@ impl<'a> InputContext<'a> { /// Iterate over the database views, combined together with a /// possible existing view in the PSL. - pub(crate) fn view_pairs(self) -> impl Iterator> { + pub(crate) fn view_pairs(&'a self) -> impl Iterator> + 'a { // Right now all connectors introspect views for db reset. // Filtering the ones with columns will not cause // empty view blocks with these connectors. // // Removing the filter when all connectors are done. - self.schema + self.sql_schema .view_walkers() .filter(|v| !v.columns().len() > 0) - .map(move |next| { + .map(|next| { let previous = self.existing_view(next.id); Pair::new(self, previous, next) }) @@ -104,7 +110,7 @@ impl<'a> InputContext<'a> { /// Given a SQL enum from the database, this method returns the enum that matches it (by name) /// in the Prisma schema. - pub(crate) fn existing_enum(self, id: sql::EnumId) -> Option> { + pub(crate) fn existing_enum(&self, id: sql::EnumId) -> Option> { self.introspection_map .existing_enums .get(&id) @@ -114,7 +120,7 @@ impl<'a> InputContext<'a> { /// Given a SQL enum from the database, this method returns the name it will be given in the /// introspected schema. If it matches a remapped enum in the Prisma schema, it is taken into /// account. - pub(crate) fn enum_prisma_name(self, id: sql::EnumId) -> ModelName<'a> { + pub(crate) fn enum_prisma_name(&self, id: sql::EnumId) -> ModelName<'a> { if let Some(r#enum) = self.existing_enum(id) { return ModelName::FromPsl { name: r#enum.name(), @@ -122,15 +128,15 @@ impl<'a> InputContext<'a> { }; } - let r#enum = self.schema.walk(id); + let r#enum = self.sql_schema.walk(id); ModelName::new_from_sql(r#enum.name(), r#enum.namespace(), self) } /// Given a SQL enum variant from the database catalog, this method returns the name it will be /// given in the introspected schema. If it matches a remapped enum value in the Prisma schema, /// it is taken into account. - pub(crate) fn enum_variant_name(self, id: sql::EnumVariantId) -> EnumVariantName<'a> { - let variant = self.schema.walk(id); + pub(crate) fn enum_variant_name(&self, id: sql::EnumVariantId) -> EnumVariantName<'a> { + let variant = self.sql_schema.walk(id); let variant_name = variant.name(); self.existing_enum(variant.r#enum().id) @@ -144,7 +150,7 @@ impl<'a> InputContext<'a> { /// Given a foreign key from the database, this methods returns the existing relation in the /// Prisma schema that matches it. - pub(crate) fn existing_inline_relation(self, id: sql::ForeignKeyId) -> Option> { + pub(crate) fn existing_inline_relation(&self, id: sql::ForeignKeyId) -> Option> { self.introspection_map .existing_inline_relations .get(&id) @@ -152,7 +158,7 @@ impl<'a> InputContext<'a> { } pub(crate) fn existing_m2m_relation( - self, + &self, id: sql::TableId, ) -> Option> { self.introspection_map @@ -161,28 +167,28 @@ impl<'a> InputContext<'a> { .map(|relation_id| self.previous_schema.db.walk(*relation_id)) } - pub(crate) fn existing_model(self, id: sql::TableId) -> Option> { + pub(crate) fn existing_model(&self, id: sql::TableId) -> Option> { self.introspection_map .existing_models .get(&id) .map(|id| self.previous_schema.db.walk(*id)) } - pub(crate) fn existing_view(self, id: sql::ViewId) -> Option> { + pub(crate) fn existing_view(&self, id: sql::ViewId) -> Option> { self.introspection_map .existing_views .get(&id) .map(|id| self.previous_schema.db.walk(*id)) } - pub(crate) fn existing_table_scalar_field(self, id: sql::TableColumnId) -> Option> { + pub(crate) fn existing_table_scalar_field(&self, id: sql::TableColumnId) -> Option> { self.introspection_map .existing_model_scalar_fields .get(&id) .map(|(model_id, field_id)| self.previous_schema.db.walk(*model_id).scalar_field(*field_id)) } - pub(crate) fn existing_view_scalar_field(self, id: sql::ViewColumnId) -> Option> { + pub(crate) fn existing_view_scalar_field(&self, id: sql::ViewColumnId) -> Option> { self.introspection_map .existing_view_scalar_fields .get(&id) @@ -190,7 +196,7 @@ impl<'a> InputContext<'a> { } pub(crate) fn column_prisma_name( - self, + &self, id: sql::Either, ) -> crate::IntrospectedName<'a> { match id { @@ -199,26 +205,26 @@ impl<'a> InputContext<'a> { } } - pub(crate) fn table_column_prisma_name(self, id: sql::TableColumnId) -> crate::IntrospectedName<'a> { + pub(crate) fn table_column_prisma_name(&self, id: sql::TableColumnId) -> crate::IntrospectedName<'a> { self.existing_table_scalar_field(id) .map(|sf| IntrospectedName::FromPsl { name: sf.name(), mapped_name: sf.mapped_name(), }) - .unwrap_or_else(|| IntrospectedName::new_from_sql(self.schema.walk(id).name())) + .unwrap_or_else(|| IntrospectedName::new_from_sql(self.sql_schema.walk(id).name())) } - pub(crate) fn view_column_prisma_name(self, id: sql::ViewColumnId) -> crate::IntrospectedName<'a> { + pub(crate) fn view_column_prisma_name(&self, id: sql::ViewColumnId) -> crate::IntrospectedName<'a> { self.existing_view_scalar_field(id) .map(|sf| IntrospectedName::FromPsl { name: sf.name(), mapped_name: sf.mapped_name(), }) - .unwrap_or_else(|| IntrospectedName::new_from_sql(self.schema.walk(id).name())) + .unwrap_or_else(|| IntrospectedName::new_from_sql(self.sql_schema.walk(id).name())) } // Use the existing model name when available. - pub(crate) fn table_prisma_name(self, id: sql::TableId) -> crate::ModelName<'a> { + pub(crate) fn table_prisma_name(&self, id: sql::TableId) -> crate::ModelName<'a> { if let Some(model) = self.existing_model(id) { return ModelName::FromPsl { name: model.name(), @@ -226,12 +232,12 @@ impl<'a> InputContext<'a> { }; } - let table = self.schema.walk(id); + let table = self.sql_schema.walk(id); ModelName::new_from_sql(table.name(), table.namespace(), self) } // Use the existing view name when available. - pub(crate) fn view_prisma_name(self, id: sql::ViewId) -> crate::ModelName<'a> { + pub(crate) fn view_prisma_name(&self, id: sql::ViewId) -> crate::ModelName<'a> { if let Some(view) = self.existing_view(id) { return ModelName::FromPsl { name: view.name(), @@ -239,11 +245,11 @@ impl<'a> InputContext<'a> { }; } - let view = self.schema.walk(id); + let view = self.sql_schema.walk(id); ModelName::new_from_sql(view.name(), view.namespace(), self) } - pub(crate) fn name_is_unique(self, name: &'a str) -> bool { + pub(crate) fn name_is_unique(&'a self, name: &'a str) -> bool { let name = crate::sanitize_datamodel_names::sanitize_string(name); self.introspection_map @@ -253,7 +259,7 @@ impl<'a> InputContext<'a> { .unwrap_or(true) } - pub(crate) fn forward_inline_relation_field_prisma_name(self, id: sql::ForeignKeyId) -> &'a str { + pub(crate) fn forward_inline_relation_field_prisma_name(&'a self, id: sql::ForeignKeyId) -> &'a str { let existing_relation = self .existing_inline_relation(id) .and_then(|relation| relation.as_complete()); @@ -264,7 +270,7 @@ impl<'a> InputContext<'a> { } } - pub(crate) fn back_inline_relation_field_prisma_name(self, id: sql::ForeignKeyId) -> &'a str { + pub(crate) fn back_inline_relation_field_prisma_name(&'a self, id: sql::ForeignKeyId) -> &'a str { let existing_relation = self .existing_inline_relation(id) .and_then(|relation| relation.as_complete()); @@ -276,7 +282,7 @@ impl<'a> InputContext<'a> { } #[track_caller] - pub(crate) fn forward_m2m_relation_field_prisma_name(self, id: sql::TableId) -> &'a str { + pub(crate) fn forward_m2m_relation_field_prisma_name(&'a self, id: sql::TableId) -> &'a str { let existing_relation = self.existing_m2m_relation(id); match existing_relation { @@ -286,7 +292,7 @@ impl<'a> InputContext<'a> { } #[track_caller] - pub(crate) fn back_m2m_relation_field_prisma_name(self, id: sql::TableId) -> &'a str { + pub(crate) fn back_m2m_relation_field_prisma_name(&'a self, id: sql::TableId) -> &'a str { let existing_relation = self.existing_m2m_relation(id); match existing_relation { @@ -296,7 +302,7 @@ impl<'a> InputContext<'a> { } #[track_caller] - pub(crate) fn inline_relation_prisma_name(self, id: sql::ForeignKeyId) -> Cow<'a, str> { + pub(crate) fn inline_relation_prisma_name(&'a self, id: sql::ForeignKeyId) -> Cow<'a, str> { let existing_relation = self .existing_inline_relation(id) .and_then(|relation| relation.as_complete()); @@ -311,7 +317,7 @@ impl<'a> InputContext<'a> { } #[track_caller] - pub(crate) fn m2m_relation_prisma_name(self, id: sql::TableId) -> Cow<'a, str> { + pub(crate) fn m2m_relation_prisma_name(&'a self, id: sql::TableId) -> Cow<'a, str> { let existing_relation = self.existing_m2m_relation(id); match existing_relation { @@ -323,27 +329,27 @@ impl<'a> InputContext<'a> { } } - pub(crate) fn inline_relation_name(self, id: sql::ForeignKeyId) -> Option<&'a RelationName<'a>> { + pub(crate) fn inline_relation_name(&'a self, id: sql::ForeignKeyId) -> Option<&'a RelationName<'a>> { self.introspection_map.relation_names.inline_relation_name(id) } #[track_caller] - pub(crate) fn m2m_relation_name(self, id: sql::TableId) -> &'a RelationName<'a> { + pub(crate) fn m2m_relation_name(&'a self, id: sql::TableId) -> &'a RelationName<'a> { self.introspection_map.relation_names.m2m_relation_name(id) } - pub(crate) fn table_missing_for_model(self, id: &ast::ModelId) -> bool { + pub(crate) fn table_missing_for_model(&self, id: &ast::ModelId) -> bool { self.introspection_map.missing_tables_for_previous_models.contains(id) } - pub(crate) fn view_missing_for_model(self, id: &ast::ModelId) -> bool { + pub(crate) fn view_missing_for_model(&self, id: &ast::ModelId) -> bool { self.introspection_map.missing_views_for_previous_models.contains(id) } pub(crate) fn inline_relations_for_table( - self, + &'a self, table_id_filter: sql::TableId, - ) -> impl Iterator)> { + ) -> impl Iterator)> + 'a { self.introspection_map .inline_relation_positions .iter() @@ -352,7 +358,7 @@ impl<'a> InputContext<'a> { .map(|(_, fk_id, direction)| { let foreign_key = sql::Walker { id: *fk_id, - schema: self.schema, + schema: self.sql_schema, }; (*direction, foreign_key) @@ -360,9 +366,9 @@ impl<'a> InputContext<'a> { } pub(crate) fn m2m_relations_for_table( - self, + &'a self, table_id_filter: sql::TableId, - ) -> impl Iterator)> { + ) -> impl Iterator)> + 'a { self.introspection_map .m2m_relation_positions .iter() @@ -370,7 +376,7 @@ impl<'a> InputContext<'a> { .map(|(_, fk_id, direction)| { let next = sql::Walker { id: *fk_id, - schema: self.schema, + schema: self.sql_schema, }; (*direction, next) diff --git a/introspection-engine/connectors/sql-introspection-connector/src/error.rs b/introspection-engine/connectors/sql-introspection-connector/src/error.rs index 82a7a5d82adb..68a7a9f28a5c 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/error.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/error.rs @@ -114,7 +114,7 @@ impl SqlError { ConnectorError { user_facing_error, kind: ErrorKind::TlsError { - message: format!("{}", cause), + message: format!("{cause}"), }, } } @@ -169,25 +169,25 @@ impl From for SqlError { fn from(kind: QuaintKind) -> Self { match kind { QuaintKind::DatabaseDoesNotExist { ref db_name } => Self::DatabaseDoesNotExist { - db_name: format!("{}", db_name), + db_name: format!("{db_name}"), cause: kind, }, QuaintKind::DatabaseAlreadyExists { ref db_name } => Self::DatabaseAlreadyExists { - db_name: format!("{}", db_name), + db_name: format!("{db_name}"), cause: kind, }, QuaintKind::DatabaseAccessDenied { ref db_name } => Self::DatabaseAccessDenied { - db_name: format!("{}", db_name), + db_name: format!("{db_name}"), cause: kind, }, QuaintKind::AuthenticationFailed { ref user } => Self::AuthenticationFailed { - user: format!("{}", user), + user: format!("{user}"), cause: kind, }, QuaintKind::DatabaseUrlIsInvalid(reason) => Self::DatabaseUrlIsInvalid(reason), e @ QuaintKind::ConnectTimeout => Self::ConnectTimeout(e), - e @ QuaintKind::SocketTimeout => Self::Timeout(format!("{}", e)), - e @ QuaintKind::PoolTimeout { .. } => Self::Timeout(format!("{}", e)), + e @ QuaintKind::SocketTimeout => Self::Timeout(format!("{e}")), + e @ QuaintKind::PoolTimeout { .. } => Self::Timeout(format!("{e}")), QuaintKind::ConnectionError { .. } => Self::ConnectionError { cause: kind }, QuaintKind::TlsError { .. } => Self::TlsError { cause: kind }, QuaintKind::UniqueConstraintViolation { ref constraint } => Self::UniqueConstraintViolation { @@ -212,7 +212,7 @@ impl From for SqlError { SqlError::QueryError(anyhow::anyhow!("{}", error)) } sql_schema_describer::DescriberErrorKind::CrossSchemaReference { .. } => { - SqlError::CrossSchemaReference(format!("{}", error)) + SqlError::CrossSchemaReference(format!("{error}")) } } } diff --git a/introspection-engine/connectors/sql-introspection-connector/src/introspection_map.rs b/introspection-engine/connectors/sql-introspection-connector/src/introspection_map.rs index c9503557bc33..3f74a15de577 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/introspection_map.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/introspection_map.rs @@ -2,7 +2,9 @@ mod relation_names; -use crate::{datamodel_calculator::InputContext, introspection_helpers as helpers, pair::RelationFieldDirection}; +use crate::{ + datamodel_calculator::DatamodelCalculatorContext, introspection_helpers as helpers, pair::RelationFieldDirection, +}; use psl::{ parser_database::{self, ast}, PreviewFeature, @@ -37,9 +39,9 @@ pub(crate) struct IntrospectionMap<'a> { } impl<'a> IntrospectionMap<'a> { - pub(crate) fn new(input: InputContext<'a>) -> Self { - let sql_schema = input.schema; - let prisma_schema = input.previous_schema; + pub(crate) fn new(ctx: &DatamodelCalculatorContext<'a>) -> Self { + let sql_schema = ctx.sql_schema; + let prisma_schema = ctx.previous_schema; let mut map = Default::default(); match_existing_models(sql_schema, prisma_schema, &mut map); @@ -48,7 +50,7 @@ impl<'a> IntrospectionMap<'a> { match_existing_scalar_fields(sql_schema, prisma_schema, &mut map); match_existing_inline_relations(sql_schema, prisma_schema, &mut map); match_existing_m2m_relations(sql_schema, prisma_schema, &mut map); - relation_names::introspect(input, &mut map); + relation_names::introspect(ctx, &mut map); position_inline_relation_fields(sql_schema, &mut map); position_m2m_relation_fields(sql_schema, &mut map); populate_top_level_names(sql_schema, prisma_schema, &mut map); diff --git a/introspection-engine/connectors/sql-introspection-connector/src/introspection_map/relation_names.rs b/introspection-engine/connectors/sql-introspection-connector/src/introspection_map/relation_names.rs index c39cac103ed5..8dfc4391fcfd 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/introspection_map/relation_names.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/introspection_map/relation_names.rs @@ -1,7 +1,7 @@ //! This module is responsible for defining the relation names and the relation field names in an //! introspected schema with as much clarity and as little ambiguity as possible. -use crate::{datamodel_calculator::InputContext, introspection_helpers::is_prisma_join_table}; +use crate::{datamodel_calculator::DatamodelCalculatorContext, introspection_helpers::is_prisma_join_table}; use sql_schema_describer as sql; use std::{ borrow::Cow, @@ -62,25 +62,25 @@ impl<'a> RelationNames<'a> { /// /// Additionally, in self-relations, the names of the two relation fields are disambiguated by /// prefixing the name of the backrelation field with "other_". -pub(super) fn introspect<'a>(input: InputContext<'a>, map: &mut super::IntrospectionMap<'a>) { +pub(super) fn introspect<'a>(ctx: &DatamodelCalculatorContext<'a>, map: &mut super::IntrospectionMap<'a>) { let mut names = RelationNames { inline_relation_names: Default::default(), m2m_relation_names: Default::default(), }; let mut duplicated_fks = Default::default(); - let ambiguous_relations = find_ambiguous_relations(input); + let ambiguous_relations = find_ambiguous_relations(ctx); - for table in input.schema.table_walkers() { + for table in ctx.sql_schema.table_walkers() { if is_prisma_join_table(table) { - let name = prisma_m2m_relation_name(table, &ambiguous_relations, input); + let name = prisma_m2m_relation_name(table, &ambiguous_relations, ctx); names.m2m_relation_names.insert(table.id, name); } else { collect_duplicated_fks(table, &mut duplicated_fks); for fk in table.foreign_keys().filter(|fk| !duplicated_fks.contains(&fk.id)) { names .inline_relation_names - .insert(fk.id, inline_relation_name(fk, &ambiguous_relations, input)); + .insert(fk.id, inline_relation_name(fk, &ambiguous_relations, ctx)); } } } @@ -91,7 +91,7 @@ pub(super) fn introspect<'a>(input: InputContext<'a>, map: &mut super::Introspec fn prisma_m2m_relation_name<'a>( table: sql::TableWalker<'a>, ambiguous_relations: &HashSet<[sql::TableId; 2]>, - input: InputContext, + ctx: &DatamodelCalculatorContext<'a>, ) -> RelationName<'a> { let ids = table_ids_for_m2m_relation_table(table); let is_self_relation = ids[0] == ids[1]; @@ -100,7 +100,7 @@ fn prisma_m2m_relation_name<'a>( // the table names of prisma m2m tables starts with an underscore (Cow::Borrowed(&table.name()[1..]), table.name()) } else { - let default_name = ids.map(|id| input.table_prisma_name(id).prisma_name()).join("To"); + let default_name = ids.map(|id| ctx.table_prisma_name(id).prisma_name()).join("To"); let found_name = &table.name()[1..]; let relation_name = if found_name == default_name && !is_self_relation { "" @@ -114,12 +114,12 @@ fn prisma_m2m_relation_name<'a>( relation_name, Cow::Owned(format!( "{}{field_name_suffix}{}", - input.table_prisma_name(ids[1]).prisma_name(), + ctx.table_prisma_name(ids[1]).prisma_name(), if is_self_relation { "_A" } else { "" }, )), Cow::Owned(format!( "{}{field_name_suffix}{}", - input.table_prisma_name(ids[0]).prisma_name(), + ctx.table_prisma_name(ids[0]).prisma_name(), if is_self_relation { "_B" } else { "" }, )), ] @@ -128,11 +128,11 @@ fn prisma_m2m_relation_name<'a>( fn inline_relation_name<'a>( fk: sql::ForeignKeyWalker<'a>, ambiguous_relations: &HashSet<[sql::TableId; 2]>, - input: InputContext<'a>, + ctx: &DatamodelCalculatorContext<'a>, ) -> RelationName<'a> { let is_self_relation = fk.is_self_relation(); - let referencing_model_name = input.table_prisma_name(fk.table().id).prisma_name(); - let referenced_model_name = input.table_prisma_name(fk.referenced_table().id).prisma_name(); + let referencing_model_name = ctx.table_prisma_name(fk.table().id).prisma_name(); + let referenced_model_name = ctx.table_prisma_name(fk.referenced_table().id).prisma_name(); let self_relation_prefix = if is_self_relation { "other_" } else { "" }; let is_ambiguous_name = ambiguous_relations.contains(&sorted_table_ids(fk.table().id, fk.referenced_table().id)); @@ -153,7 +153,7 @@ fn inline_relation_name<'a>( relation_name.push('_'); let mut cols = fk.constrained_columns().peekable(); while let Some(col) = cols.next() { - relation_name.push_str(input.table_column_prisma_name(col.id).prisma_name().as_ref()); + relation_name.push_str(ctx.table_column_prisma_name(col.id).prisma_name().as_ref()); if cols.peek().is_some() { relation_name.push('_'); } @@ -169,15 +169,15 @@ fn inline_relation_name<'a>( } /// Relation names are only ambiguous between two given models. -fn find_ambiguous_relations(input: InputContext) -> HashSet<[sql::TableId; 2]> { +fn find_ambiguous_relations(ctx: &DatamodelCalculatorContext) -> HashSet<[sql::TableId; 2]> { let mut ambiguous_relations = HashSet::new(); - for table in input.schema.table_walkers() { + for table in ctx.sql_schema.table_walkers() { if is_prisma_join_table(table) { m2m_relation_ambiguousness(table, &mut ambiguous_relations) } else { for fk in table.foreign_keys() { - inline_relation_ambiguousness(fk, &mut ambiguous_relations, input) + inline_relation_ambiguousness(fk, &mut ambiguous_relations, ctx) } } } @@ -213,7 +213,7 @@ fn m2m_relation_ambiguousness(table: sql::TableWalker<'_>, ambiguous_relations: fn inline_relation_ambiguousness( fk: sql::ForeignKeyWalker<'_>, ambiguous_relations: &mut HashSet<[sql::TableId; 2]>, - input: InputContext, + ctx: &DatamodelCalculatorContext, ) { let tables = table_ids_for_inline_relation(fk); @@ -235,10 +235,10 @@ fn inline_relation_ambiguousness( } // ...or because the relation field name conflicts with one of the scalar fields' name. - let default_field_name = input.table_prisma_name(fk.referenced_table().id).prisma_name(); + let default_field_name = ctx.table_prisma_name(fk.referenced_table().id).prisma_name(); if fk .constrained_columns() - .any(|col| default_field_name == input.table_column_prisma_name(col.id).prisma_name()) + .any(|col| default_field_name == ctx.table_column_prisma_name(col.id).prisma_name()) { ambiguous_relations.insert(tables); } diff --git a/introspection-engine/connectors/sql-introspection-connector/src/lib.rs b/introspection-engine/connectors/sql-introspection-connector/src/lib.rs index 3fec422087e8..e419215783d9 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/lib.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/lib.rs @@ -11,6 +11,7 @@ //! - By using the `Pair` apis, create a rendering structure together //! with possible warnings utilizing the `datamodel-renderer` //! crate in the [`rendering`] module. +//! - Analyze the PSL and SQL schemas, trigger warnings if needed. //! - Check the Prisma version to warn and guide people upgrading //! from older versions of Prisma. //! - Convert the rendering structure into a string. Reformat the @@ -24,17 +25,18 @@ #![allow(clippy::ptr_arg)] // remove after https://github.com/rust-lang/rust-clippy/issues/8482 is fixed and shipped pub mod datamodel_calculator; // only exported to be able to unit test it -mod pair; mod error; mod introspection_helpers; mod introspection_map; +mod pair; mod rendering; mod sanitize_datamodel_names; mod schema_describer_loading; mod version_checker; mod warnings; +use datamodel_calculator::DatamodelCalculatorContext; pub use error::*; use self::sanitize_datamodel_names::*; @@ -76,7 +78,7 @@ impl SqlIntrospectionConnector { ConnectorError { user_facing_error: Some(known), - kind: ErrorKind::InvalidDatabaseUrl(format!("{} in database URL", err)), + kind: ErrorKind::InvalidDatabaseUrl(format!("{err} in database URL")), } }) })?; @@ -209,7 +211,7 @@ impl SqlFamilyTrait for IntrospectionContext { } } -impl SqlFamilyTrait for datamodel_calculator::InputContext<'_> { +impl SqlFamilyTrait for DatamodelCalculatorContext<'_> { fn sql_family(&self) -> SqlFamily { self.sql_family } diff --git a/introspection-engine/connectors/sql-introspection-connector/src/pair.rs b/introspection-engine/connectors/sql-introspection-connector/src/pair.rs index 32a6f2c49b38..b808c66ae060 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/pair.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/pair.rs @@ -13,7 +13,7 @@ mod relation_field; mod scalar_field; mod view; -use crate::datamodel_calculator::InputContext; +use crate::datamodel_calculator::DatamodelCalculatorContext; pub(crate) use default::{DefaultKind, DefaultValuePair}; pub(crate) use enumerator::EnumPair; pub(crate) use id::IdPair; @@ -40,7 +40,7 @@ where /// The next state, taken from the database. next: U, /// The configuration object of the introspection. - context: InputContext<'a>, + context: &'a DatamodelCalculatorContext<'a>, } impl<'a, T, U> Pair<'a, T, U> @@ -48,7 +48,7 @@ where T: Copy, U: Copy, { - pub(crate) fn new(context: InputContext<'a>, previous: T, next: U) -> Self { + pub(crate) fn new(context: &'a DatamodelCalculatorContext<'a>, previous: T, next: U) -> Self { Self { context, previous, diff --git a/introspection-engine/connectors/sql-introspection-connector/src/pair/default.rs b/introspection-engine/connectors/sql-introspection-connector/src/pair/default.rs index eea87391ebed..d65213db9014 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/pair/default.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/pair/default.rs @@ -40,7 +40,7 @@ impl<'a> DefaultValuePair<'a> { match (sql_kind, family) { (Some(sql::DefaultKind::Sequence(name)), _) if self.context.is_cockroach() => { - let connector_data: &PostgresSchemaExt = self.context.schema.downcast_connector_data(); + let connector_data: &PostgresSchemaExt = self.context.sql_schema.downcast_connector_data(); let sequence_idx = connector_data .sequences @@ -70,7 +70,7 @@ impl<'a> DefaultValuePair<'a> { (Some(sql::DefaultKind::Value(PrismaValue::Enum(variant))), sql::ColumnTypeFamily::Enum(enum_id)) => { let variant = self .context - .schema + .sql_schema .walk(*enum_id) .variants() .find(|v| v.name() == variant) diff --git a/introspection-engine/connectors/sql-introspection-connector/src/pair/enumerator.rs b/introspection-engine/connectors/sql-introspection-connector/src/pair/enumerator.rs index a8dbe2f1d356..2ba7f9ca4620 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/pair/enumerator.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/pair/enumerator.rs @@ -46,11 +46,7 @@ impl<'a> EnumPair<'a> { /// The namespace of the enumerator, if using the multi-schema feature. pub(crate) fn namespace(self) -> Option<&'a str> { - if self.context.uses_namespaces() { - self.next.namespace() - } else { - None - } + self.context.uses_namespaces().then(|| self.next.namespace()).flatten() } /// The position of the enum from the PSL, if existing. Used for diff --git a/introspection-engine/connectors/sql-introspection-connector/src/pair/id.rs b/introspection-engine/connectors/sql-introspection-connector/src/pair/id.rs index 8a2e1e9060b6..5fdb15bffda1 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/pair/id.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/pair/id.rs @@ -42,7 +42,7 @@ impl<'a> IdPair<'a> { let clustered = match self.next { Some(next) => { - let ext: &MssqlSchemaExt = self.context.schema.downcast_connector_data(); + let ext: &MssqlSchemaExt = self.context.sql_schema.downcast_connector_data(); ext.index_is_clustered(next.id) } None => self.previous.and_then(|prev| prev.clustered()).unwrap_or(true), @@ -61,11 +61,6 @@ impl<'a> IdPair<'a> { self.fields().len() == 1 } - /// The existing model in the PSL. - pub(crate) fn model(self) -> Option> { - self.previous.map(|prev| prev.model()) - } - /// If defined in a single field, returns the given field. pub(crate) fn field(self) -> Option> { self.defined_in_a_field().then(|| self.fields().next().unwrap()) diff --git a/introspection-engine/connectors/sql-introspection-connector/src/pair/index.rs b/introspection-engine/connectors/sql-introspection-connector/src/pair/index.rs index c88dab66cb78..aaddd3b27bef 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/pair/index.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/pair/index.rs @@ -83,7 +83,7 @@ impl<'a> IndexPair<'a> { let clustered = match self.next { Some(next) => { - let ext: &MssqlSchemaExt = self.context.schema.downcast_connector_data(); + let ext: &MssqlSchemaExt = self.context.sql_schema.downcast_connector_data(); ext.index_is_clustered(next.id) } None => self.previous.and_then(|prev| prev.clustered()).unwrap_or(false), @@ -106,7 +106,7 @@ impl<'a> IndexPair<'a> { match (self.next, self.previous.and_then(|i| i.algorithm())) { // Index is defined in a table to the database. (Some(next), _) => { - let data: &PostgresSchemaExt = self.context.schema.downcast_connector_data(); + let data: &PostgresSchemaExt = self.context.sql_schema.downcast_connector_data(); match data.index_algorithm(next.id) { sql::postgres::SqlIndexAlgorithm::BTree => None, diff --git a/introspection-engine/connectors/sql-introspection-connector/src/pair/index_field.rs b/introspection-engine/connectors/sql-introspection-connector/src/pair/index_field.rs index eea50bd46889..7dbeec61963a 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/pair/index_field.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/pair/index_field.rs @@ -52,7 +52,7 @@ impl<'a> IndexFieldPair<'a> { return None; } - let ext: &PostgresSchemaExt = self.context.schema.downcast_connector_data(); + let ext: &PostgresSchemaExt = self.context.sql_schema.downcast_connector_data(); let next = match self.next { Some(next) => next, diff --git a/introspection-engine/connectors/sql-introspection-connector/src/pair/model.rs b/introspection-engine/connectors/sql-introspection-connector/src/pair/model.rs index 69a296414155..607fb177b3ac 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/pair/model.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/pair/model.rs @@ -29,11 +29,7 @@ impl<'a> ModelPair<'a> { /// The namespace of the model, if using the multi-schema feature. pub(crate) fn namespace(self) -> Option<&'a str> { - if self.context.uses_namespaces() { - self.next.namespace() - } else { - None - } + self.context.uses_namespaces().then(|| self.next.namespace()).flatten() } /// Name of the model in the PSL. The value can be sanitized if it @@ -50,8 +46,10 @@ impl<'a> ModelPair<'a> { } /// True, if the name of the model is using a reserved identifier. + /// If we already have a model in the PSL, the validation will not + /// allow reserved names and we don't need to warn the user. pub(crate) fn uses_reserved_name(self) -> bool { - psl::is_reserved_type_name(self.next.name()) + psl::is_reserved_type_name(self.next.name()) && self.previous.is_none() } /// The documentation on top of the enum. @@ -158,12 +156,17 @@ impl<'a> ModelPair<'a> { /// explicitly sets the model attribute, or if the model has no /// usable identifiers. pub(crate) fn ignored(self) -> bool { - let explicit_ignore = self.previous.map(|model| model.is_ignored()).unwrap_or(false); + let explicit_ignore = self.ignored_in_psl(); let implicit_ignore = !self.has_usable_identifier() && self.scalar_fields().len() > 0; explicit_ignore || implicit_ignore } + /// If the model is already marked as ignored in the PSL. + pub(crate) fn ignored_in_psl(self) -> bool { + self.previous.map(|model| model.is_ignored()).unwrap_or(false) + } + /// Returns an iterator over all indexes of the model, /// specifically the ones defined in the model level, skipping the /// primary key and unique index defined in a field. diff --git a/introspection-engine/connectors/sql-introspection-connector/src/pair/relation_field.rs b/introspection-engine/connectors/sql-introspection-connector/src/pair/relation_field.rs index aae0128b44ff..016eb547d38a 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/pair/relation_field.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/pair/relation_field.rs @@ -1,5 +1,5 @@ use crate::{ - datamodel_calculator::InputContext, + datamodel_calculator::DatamodelCalculatorContext, pair::{ModelPair, Pair}, }; use psl::{ @@ -45,21 +45,21 @@ impl<'a> InlineRelationField<'a> { self.next.constrained_columns().any(|col| !col.arity().is_required()) } - fn model(self, context: InputContext<'a>) -> ModelPair<'a> { + fn model(self, context: &'a DatamodelCalculatorContext<'a>) -> ModelPair<'a> { let previous = self.previous.map(|prev| prev.model()); let next = self.next.table(); Pair::new(context, previous, next) } - fn referenced_model(self, context: InputContext<'a>) -> ModelPair<'a> { + fn referenced_model(self, context: &'a DatamodelCalculatorContext<'a>) -> ModelPair<'a> { let previous = self.previous.map(|prev| prev.related_model()); let next = self.next.referenced_table(); Pair::new(context, previous, next) } - fn default_constraint_name(self, context: InputContext<'a>) -> String { + fn default_constraint_name(self, context: &DatamodelCalculatorContext<'a>) -> String { let connector = context.active_connector(); let cols: Vec<_> = self.next.constrained_columns().map(|c| c.name()).collect(); ConstraintNames::foreign_key_constraint_name(self.next.table().name(), &cols, connector) @@ -100,13 +100,13 @@ enum RelationType<'a> { #[derive(Clone, Copy)] pub(crate) struct RelationFieldPair<'a> { relation_type: RelationType<'a>, - context: InputContext<'a>, + context: &'a DatamodelCalculatorContext<'a>, } impl<'a> RelationFieldPair<'a> { /// Create a new inline relation field to the wanted direction. pub(crate) fn inline( - context: InputContext<'a>, + context: &'a DatamodelCalculatorContext<'a>, previous: Option>, next: sql::ForeignKeyWalker<'a>, direction: RelationFieldDirection, @@ -126,7 +126,7 @@ impl<'a> RelationFieldPair<'a> { /// Create a new many to many relation field to the wanted /// direction. pub(crate) fn m2m( - context: InputContext<'a>, + context: &'a DatamodelCalculatorContext<'a>, next: sql::ForeignKeyWalker<'a>, direction: RelationFieldDirection, ) -> Self { @@ -139,8 +139,11 @@ impl<'a> RelationFieldPair<'a> { } /// Create a new emulated relation field, if using `relationMode` - /// `prisma`. - pub(crate) fn emulated(context: InputContext<'a>, previous: walkers::RelationFieldWalker<'a>) -> Self { + /// `prisma` (or, for now, if having a relation in a view). + pub(crate) fn emulated( + context: &'a DatamodelCalculatorContext<'a>, + previous: walkers::RelationFieldWalker<'a>, + ) -> Self { let relation_type = EmulatedRelationField { previous }; Self { diff --git a/introspection-engine/connectors/sql-introspection-connector/src/pair/scalar_field.rs b/introspection-engine/connectors/sql-introspection-connector/src/pair/scalar_field.rs index 5b6c475baf9d..8d92083ba560 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/pair/scalar_field.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/pair/scalar_field.rs @@ -8,7 +8,7 @@ use sql::ColumnArity; use sql_schema_describer as sql; use std::borrow::Cow; -use super::{DefaultValuePair, IdPair, IndexPair, ModelPair, Pair, ViewPair}; +use super::{DefaultValuePair, IdPair, IndexPair, Pair}; /// Comparing a possible previous PSL scalar field /// to a column from the database. Re-introspection @@ -44,22 +44,6 @@ impl<'a> ScalarFieldPair<'a> { self.previous.map(|f| f.is_ignored()).unwrap_or(false) } - /// The container where the field is defined, view or model. - pub fn container(self) -> Either, ViewPair<'a>> { - let previous = self.previous.map(|f| f.model()); - - match self.next.refine() { - Either::Left(f) => { - let pair = Pair::new(self.context, previous, f.table()); - Either::Left(pair) - } - Either::Right(v) => { - let pair = Pair::new(self.context, previous, v.view()); - Either::Right(pair) - } - } - } - /// True if we took the name from the PSL. pub(crate) fn remapped_name_from_psl(&self) -> bool { self.previous.and_then(|p| p.mapped_name()).is_some() diff --git a/introspection-engine/connectors/sql-introspection-connector/src/pair/view.rs b/introspection-engine/connectors/sql-introspection-connector/src/pair/view.rs index 5147af801fd3..2d90622f1ca1 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/pair/view.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/pair/view.rs @@ -22,11 +22,7 @@ impl<'a> ViewPair<'a> { /// The namespace of the view, if using the multi-schema feature. pub(crate) fn namespace(self) -> Option<&'a str> { - if self.context.uses_namespaces() { - self.next.namespace() - } else { - None - } + self.context.uses_namespaces().then(|| self.next.namespace()).flatten() } /// Name of the view in the PSL. The value can be sanitized if it @@ -43,8 +39,10 @@ impl<'a> ViewPair<'a> { } /// True, if the name of the view is using a reserved identifier. + /// If we already have a view in the PSL, the validation will not + /// allow reserved names and we don't need to warn the user. pub(crate) fn uses_reserved_name(self) -> bool { - psl::is_reserved_type_name(self.next.name()) + psl::is_reserved_type_name(self.next.name()) && self.previous.is_none() } /// The documentation on top of the view. @@ -124,12 +122,17 @@ impl<'a> ViewPair<'a> { /// explicitly sets the view attribute, or if the view has no /// usable identifiers. pub(crate) fn ignored(self) -> bool { - let explicit_ignore = self.previous.map(|view| view.is_ignored()).unwrap_or(false); + let explicit_ignore = self.ignored_in_psl(); let implicit_ignore = !self.has_usable_identifier() && self.scalar_fields().len() > 0; explicit_ignore || implicit_ignore } + /// If the view is already marked as ignored in the PSL. + pub(crate) fn ignored_in_psl(self) -> bool { + self.previous.map(|view| view.is_ignored()).unwrap_or(false) + } + /// Returns an iterator over all indexes of the view explicitly defined in PSL. /// /// For the primary key, use [`ModelPair#id`]. For a field-level diff --git a/introspection-engine/connectors/sql-introspection-connector/src/rendering.rs b/introspection-engine/connectors/sql-introspection-connector/src/rendering.rs index 603a1cbf8cbb..6738f4dd23af 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/rendering.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/rendering.rs @@ -13,31 +13,28 @@ mod views; use psl::PreviewFeature; -use crate::datamodel_calculator::{InputContext, OutputContext}; +use crate::datamodel_calculator::DatamodelCalculatorContext; pub(crate) use crate::SqlError; +use datamodel_renderer as renderer; /// Combines the SQL database schema and an existing PSL schema to a /// PSL schema definition string. -pub(crate) fn to_psl_string<'a>( - input: InputContext<'a>, - output: &mut OutputContext<'a>, -) -> Result<(String, bool), SqlError> { - enums::render(input, output); - models::render(input, output); - - if input.config.preview_features().contains(PreviewFeature::Views) { - views::render(input, output); +pub(crate) fn to_psl_string(ctx: &DatamodelCalculatorContext<'_>) -> Result<(String, bool), SqlError> { + let mut rendered = renderer::Datamodel::new(); + + enums::render(ctx, &mut rendered); + models::render(ctx, &mut rendered); + + if ctx.config.preview_features().contains(PreviewFeature::Views) { + views::render(ctx, &mut rendered); } - let psl_string = if input.render_config { - let config = configuration::render(input.config, input.schema, input.force_namespaces); - format!("{}\n{}", config, output.rendered_schema) + let psl_string = if ctx.render_config { + let config = configuration::render(ctx.config, ctx.sql_schema, ctx.force_namespaces); + format!("{config}\n{rendered}") } else { - output.rendered_schema.to_string() + rendered.to_string() }; - Ok(( - psl::reformat(&psl_string, 2).unwrap(), - output.rendered_schema.is_empty(), - )) + Ok((psl::reformat(&psl_string, 2).unwrap(), rendered.is_empty())) } diff --git a/introspection-engine/connectors/sql-introspection-connector/src/rendering/defaults.rs b/introspection-engine/connectors/sql-introspection-connector/src/rendering/defaults.rs index 00a52a6ca6c5..fd632f424a24 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/rendering/defaults.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/rendering/defaults.rs @@ -1,17 +1,14 @@ //! The `@default` attribute rendering. -use crate::{ - pair::{DefaultKind, ScalarFieldPair}, - warnings::Warnings, -}; +use crate::pair::{DefaultKind, DefaultValuePair}; use datamodel_renderer::{ datamodel as renderer, value::{Constant, Function, Text, Value}, }; /// Render a default value for the given scalar field. -pub(crate) fn render<'a>(field: ScalarFieldPair<'a>, warnings: &mut Warnings) -> Option> { - let mut rendered = match field.default().kind() { +pub(crate) fn render(default: DefaultValuePair<'_>) -> Option> { + let mut rendered = match default.kind() { Some(kind) => match kind { DefaultKind::Sequence(sequence) => { let mut fun = Function::new("sequence"); @@ -73,36 +70,14 @@ pub(crate) fn render<'a>(field: ScalarFieldPair<'a>, warnings: &mut Warnings) -> let vals = vals.into_iter().map(Value::from).collect(); Some(renderer::DefaultValue::array(vals)) } - DefaultKind::Prisma1Uuid => { - if let Some(model) = field.container().left() { - let warn = crate::warnings::ModelAndField { - model: model.name().to_string(), - field: field.name().to_string(), - }; - - warnings.prisma_1_uuid_defaults.push(warn); - } - - Some(renderer::DefaultValue::function(Function::new("uuid"))) - } - DefaultKind::Prisma1Cuid => { - if let Some(model) = field.container().left() { - let warn = crate::warnings::ModelAndField { - model: model.name().to_string(), - field: field.name().to_string(), - }; - - warnings.prisma_1_cuid_defaults.push(warn); - } - - Some(renderer::DefaultValue::function(Function::new("cuid"))) - } + DefaultKind::Prisma1Uuid => Some(renderer::DefaultValue::function(Function::new("uuid"))), + DefaultKind::Prisma1Cuid => Some(renderer::DefaultValue::function(Function::new("cuid"))), }, None => None, }; if let Some(res) = rendered.as_mut() { - if let Some(mapped_name) = field.default().mapped_name() { + if let Some(mapped_name) = default.mapped_name() { res.map(mapped_name); } } diff --git a/introspection-engine/connectors/sql-introspection-connector/src/rendering/enums.rs b/introspection-engine/connectors/sql-introspection-connector/src/rendering/enums.rs index a3db98b9e891..56bbf41e602b 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/rendering/enums.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/rendering/enums.rs @@ -1,26 +1,23 @@ //! Rendering of enumerators. use crate::{ - datamodel_calculator::{InputContext, OutputContext}, - introspection_helpers as helpers, - pair::EnumPair, + datamodel_calculator::DatamodelCalculatorContext, introspection_helpers as helpers, pair::EnumPair, sanitize_datamodel_names, - warnings::{self, Warnings}, }; use datamodel_renderer::datamodel as renderer; use psl::parser_database::ast; /// Render all enums. -pub(super) fn render<'a>(input: InputContext<'a>, output: &mut OutputContext<'a>) { +pub(super) fn render<'a>(ctx: &'a DatamodelCalculatorContext<'a>, rendered: &mut renderer::Datamodel<'a>) { let mut all_enums: Vec<(Option, renderer::Enum)> = Vec::new(); - for pair in input.enum_pairs() { - all_enums.push((pair.previous_position(), render_enum(pair, &mut output.warnings))) + for pair in ctx.enum_pairs() { + all_enums.push((pair.previous_position(), render_enum(pair))) } all_enums.sort_by(|(id_a, _), (id_b, _)| helpers::compare_options_none_last(id_a.as_ref(), id_b.as_ref())); - if input.sql_family.is_mysql() { + if ctx.sql_family.is_mysql() { // MySQL can have multiple database enums matching one Prisma enum. all_enums.dedup_by(|(id_a, _), (id_b, _)| match (id_a, id_b) { (Some(id_a), Some(id_b)) => id_a == id_b, @@ -29,13 +26,12 @@ pub(super) fn render<'a>(input: InputContext<'a>, output: &mut OutputContext<'a> } for (_, enm) in all_enums { - output.rendered_schema.push_enum(enm); + rendered.push_enum(enm); } } /// Render a single enum. -fn render_enum<'a>(r#enum: EnumPair<'a>, warnings: &mut Warnings) -> renderer::Enum<'a> { - let mut remapped_values = Vec::new(); +fn render_enum(r#enum: EnumPair<'_>) -> renderer::Enum<'_> { let mut rendered_enum = renderer::Enum::new(r#enum.name()); if let Some(schema) = r#enum.namespace() { @@ -46,37 +42,11 @@ fn render_enum<'a>(r#enum: EnumPair<'a>, warnings: &mut Warnings) -> renderer::E rendered_enum.map(mapped_name); } - if r#enum.name_from_psl() { - let warning = warnings::warning_enriched_with_map_on_enum(&[warnings::Enum::new(&r#enum.name())]); - warnings.push(warning); - } - if let Some(docs) = r#enum.documentation() { rendered_enum.documentation(docs); } - if r#enum.uses_duplicate_name() { - warnings.duplicate_names.push(warnings::TopLevelItem { - r#type: warnings::TopLevelType::Enum, - name: r#enum.name().to_string(), - }) - } - for variant in r#enum.variants() { - if variant.name().is_empty() { - let value = variant - .mapped_name() - .map(String::from) - .unwrap_or_else(|| variant.name().to_string()); - - let warning = warnings::EnumAndValue { - enm: r#enum.name().to_string(), - value, - }; - - warnings.enum_values_with_empty_names.push(warning); - } - let mut rendered_variant = renderer::EnumVariant::new(variant.name()); if let Some(docs) = variant.documentation() { @@ -88,26 +58,11 @@ fn render_enum<'a>(r#enum: EnumPair<'a>, warnings: &mut Warnings) -> renderer::E } if variant.name().is_empty() || sanitize_datamodel_names::needs_sanitation(&variant.name()) { - let warning = warnings::EnumAndValue { - enm: r#enum.name().to_string(), - value: variant.name().to_string(), - }; - - warnings.enum_values_with_empty_names.push(warning); rendered_variant.comment_out(); - } else if variant.name_from_psl() { - remapped_values.push(warnings::EnumAndValue { - value: variant.name().to_string(), - enm: r#enum.name().to_string(), - }); } rendered_enum.push_variant(rendered_variant); } - if !remapped_values.is_empty() { - warnings.push(warnings::warning_enriched_with_map_on_enum_value(&remapped_values)) - } - rendered_enum } diff --git a/introspection-engine/connectors/sql-introspection-connector/src/rendering/id.rs b/introspection-engine/connectors/sql-introspection-connector/src/rendering/id.rs index a368b2f5760f..485e7cae90e7 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/rendering/id.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/rendering/id.rs @@ -1,11 +1,8 @@ -use crate::{ - pair::IdPair, - warnings::{self, Warnings}, -}; +use crate::pair::IdPair; use datamodel_renderer::datamodel as renderer; /// Render a model/view level `@@id` definition. -pub(super) fn render<'a>(id: IdPair<'a>, warnings: &mut Warnings) -> renderer::IdDefinition<'a> { +pub(super) fn render(id: IdPair<'_>) -> renderer::IdDefinition<'_> { let fields = id.fields().map(|field| { let mut rendered = renderer::IndexFieldInput::new(field.name()); @@ -24,20 +21,6 @@ pub(super) fn render<'a>(id: IdPair<'a>, warnings: &mut Warnings) -> renderer::I if let Some(name) = id.name() { definition.name(name); - - match id.model() { - Some(model) if model.ast_model().is_view() => { - warnings.reintrospected_id_names_in_view.push(warnings::View { - view: model.name().to_string(), - }); - } - Some(model) => { - warnings.reintrospected_id_names_in_model.push(warnings::Model { - model: model.name().to_string(), - }); - } - None => (), - } } if let Some(map) = id.mapped_name() { diff --git a/introspection-engine/connectors/sql-introspection-connector/src/rendering/models.rs b/introspection-engine/connectors/sql-introspection-connector/src/rendering/models.rs index 7638a8b33d15..e8954f745351 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/rendering/models.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/rendering/models.rs @@ -1,36 +1,31 @@ //! Rendering of model blocks. -use super::{id, relation_field, scalar_field}; +use super::{id, indexes, relation_field, scalar_field}; use crate::{ - datamodel_calculator::{InputContext, OutputContext}, + datamodel_calculator::DatamodelCalculatorContext, introspection_helpers::{self as helpers, compare_options_none_last}, pair::ModelPair, - warnings::{self, Warnings}, }; use datamodel_renderer::datamodel as renderer; - -use super::indexes; +use quaint::prelude::SqlFamily; /// Render all model blocks to the PSL. -pub(super) fn render<'a>(input: InputContext<'a>, output: &mut OutputContext<'a>) { - let mut models_with_idx: Vec<(Option<_>, renderer::Model<'a>)> = Vec::with_capacity(input.schema.tables_count()); - - for model in input.model_pairs() { - models_with_idx.push(( - model.previous_position(), - render_model(model, input, &mut output.warnings), - )); +pub(super) fn render<'a>(ctx: &'a DatamodelCalculatorContext<'a>, rendered: &mut renderer::Datamodel<'a>) { + let mut models_with_idx: Vec<(Option<_>, renderer::Model<'a>)> = Vec::with_capacity(ctx.sql_schema.tables_count()); + + for model in ctx.model_pairs() { + models_with_idx.push((model.previous_position(), render_model(model, ctx.sql_family))); } models_with_idx.sort_by(|(a, _), (b, _)| helpers::compare_options_none_last(*a, *b)); for (_, render) in models_with_idx.into_iter() { - output.rendered_schema.push_model(render); + rendered.push_model(render); } } /// Render a single model. -fn render_model<'a>(model: ModelPair<'a>, input: InputContext<'a>, warnings: &mut Warnings) -> renderer::Model<'a> { +fn render_model(model: ModelPair<'_>, sql_family: SqlFamily) -> renderer::Model<'_> { let mut rendered = renderer::Model::new(model.name()); if let Some(docs) = model.documentation() { @@ -60,45 +55,31 @@ fn render_model<'a>(model: ModelPair<'a>, input: InputContext<'a>, warnings: &mu } if let Some(id) = model.id() { - rendered.id(id::render(id, warnings)); + rendered.id(id::render(id)); } if model.scalar_fields().len() == 0 { - rendered.documentation(empty_table_comment(input)); - rendered.comment_out(); + // On postgres this is allowed, on the other dbs, this could be a symptom of missing privileges. + let docs = if sql_family.is_postgres() { + "We could not retrieve columns for the underlying table. Either it has none or you are missing rights to see them. Please check your privileges." + } else { + "We could not retrieve columns for the underlying table. You probably have no rights to see them. Please check your privileges." + }; - warnings.models_without_columns.push(warnings::Model { - model: model.name().to_string(), - }); - } else if !model.has_usable_identifier() { + rendered.documentation(docs); + rendered.comment_out(); + } else if !model.has_usable_identifier() && !model.ignored_in_psl() { let docs = "The underlying table does not contain a valid unique identifier and can therefore currently not be handled by the Prisma Client."; rendered.documentation(docs); - - warnings.models_without_identifiers.push(warnings::Model { - model: model.name().to_string(), - }); - } - - if model.uses_duplicate_name() { - warnings.duplicate_names.push(warnings::TopLevelItem { - r#type: warnings::TopLevelType::Model, - name: model.name().to_string(), - }) - } - - if model.remapped_name() { - warnings.remapped_models.push(warnings::Model { - model: model.name().to_string(), - }); } for field in model.scalar_fields() { - rendered.push_field(scalar_field::render(field, warnings)); + rendered.push_field(scalar_field::render(field)); } for field in model.relation_fields() { - rendered.push_field(relation_field::render(field, warnings)); + rendered.push_field(relation_field::render(field)); } let mut ordered_indexes: Vec<_> = model @@ -114,12 +95,3 @@ fn render_model<'a>(model: ModelPair<'a>, input: InputContext<'a>, warnings: &mu rendered } - -fn empty_table_comment(input: InputContext<'_>) -> &'static str { - // On postgres this is allowed, on the other dbs, this could be a symptom of missing privileges. - if input.sql_family.is_postgres() { - "We could not retrieve columns for the underlying table. Either it has none or you are missing rights to see them. Please check your privileges." - } else { - "We could not retrieve columns for the underlying table. You probably have no rights to see them. Please check your privileges." - } -} diff --git a/introspection-engine/connectors/sql-introspection-connector/src/rendering/relation_field.rs b/introspection-engine/connectors/sql-introspection-connector/src/rendering/relation_field.rs index 086c50e8f056..0b66e4d18cb7 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/rendering/relation_field.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/rendering/relation_field.rs @@ -1,10 +1,10 @@ //! Rendering of relation fields. -use crate::{pair::RelationFieldPair, warnings::Warnings}; +use crate::pair::RelationFieldPair; use datamodel_renderer::datamodel as renderer; /// Render a relation field to be added in a model. -pub(super) fn render<'a>(field: RelationFieldPair<'a>, warnings: &mut Warnings) -> renderer::Field<'a> { +pub(super) fn render(field: RelationFieldPair<'_>) -> renderer::Field<'_> { let mut rendered = renderer::Field::new(field.field_name(), field.prisma_type()); if field.is_optional() { @@ -47,11 +47,5 @@ pub(super) fn render<'a>(field: RelationFieldPair<'a>, warnings: &mut Warnings) rendered.relation(relation); } - if field.reintrospected_relation() { - warnings.reintrospected_relations.push(crate::warnings::Model { - model: field.prisma_type().into_owned(), - }); - } - rendered } diff --git a/introspection-engine/connectors/sql-introspection-connector/src/rendering/scalar_field.rs b/introspection-engine/connectors/sql-introspection-connector/src/rendering/scalar_field.rs index da94e0931037..5daec5b93aa9 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/rendering/scalar_field.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/rendering/scalar_field.rs @@ -3,14 +3,12 @@ use crate::{ pair::{IdPair, IndexPair, ScalarFieldPair}, rendering::defaults, - warnings::{self, Warnings}, }; use datamodel_renderer::datamodel as renderer; -use either::Either; use sql_schema_describer::ColumnArity; /// Render a scalar field to be added in a model. -pub(crate) fn render<'a>(field: ScalarFieldPair<'a>, warnings: &mut Warnings) -> renderer::Field<'a> { +pub(crate) fn render(field: ScalarFieldPair<'_>) -> renderer::Field<'_> { let mut rendered = renderer::Field::new(field.name(), field.prisma_type()); match field.arity() { @@ -35,7 +33,7 @@ pub(crate) fn render<'a>(field: ScalarFieldPair<'a>, warnings: &mut Warnings) -> rendered.documentation(docs); } - if let Some(default) = defaults::render(field, warnings) { + if let Some(default) = defaults::render(field.default()) { rendered.default(default); } @@ -55,73 +53,11 @@ pub(crate) fn render<'a>(field: ScalarFieldPair<'a>, warnings: &mut Warnings) -> rendered.unique(render_unique(unique)); } - if field.remapped_name_from_psl() { - match field.container() { - Either::Left(model) => { - let mf = crate::warnings::ModelAndField { - model: model.name().to_string(), - field: field.name().to_string(), - }; - - warnings.remapped_fields_in_model.push(mf); - } - Either::Right(view) => { - let mf = crate::warnings::ViewAndField { - view: view.name().to_string(), - field: field.name().to_string(), - }; - - warnings.remapped_fields_in_view.push(mf); - } - } - } - - if field.is_unsupported() { - match field.container() { - Either::Left(model) => { - let mf = warnings::ModelAndFieldAndType { - model: model.name().to_string(), - field: field.name().to_string(), - tpe: field.prisma_type().to_string(), - }; - - warnings.unsupported_types_in_model.push(mf) - } - Either::Right(view) => { - let mf = warnings::ViewAndFieldAndType { - view: view.name().to_string(), - field: field.name().to_string(), - tpe: field.prisma_type().to_string(), - }; - - warnings.unsupported_types_in_view.push(mf) - } - } - } - if field.remapped_name_empty() { let docs = "This field was commented out because of an invalid name. Please provide a valid one that matches [a-zA-Z][a-zA-Z0-9_]*"; + rendered.documentation(docs); rendered.commented_out(); - - match field.container() { - Either::Left(model) => { - let mf = crate::warnings::ModelAndField { - model: model.name().to_string(), - field: field.name().to_string(), - }; - - warnings.fields_with_empty_names_in_model.push(mf); - } - Either::Right(view) => { - let mf = crate::warnings::ViewAndField { - view: view.name().to_string(), - field: field.name().to_string(), - }; - - warnings.fields_with_empty_names_in_view.push(mf); - } - } } rendered diff --git a/introspection-engine/connectors/sql-introspection-connector/src/rendering/views.rs b/introspection-engine/connectors/sql-introspection-connector/src/rendering/views.rs index 8baa7459b1f3..f1dd7ba086c9 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/rendering/views.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/rendering/views.rs @@ -1,30 +1,25 @@ -use crate::{ - datamodel_calculator::{InputContext, OutputContext}, - introspection_helpers as helpers, - pair::ViewPair, - warnings::{self, Warnings}, -}; +use crate::{datamodel_calculator::DatamodelCalculatorContext, introspection_helpers as helpers, pair::ViewPair}; use datamodel_renderer::datamodel as renderer; use super::{id, indexes, relation_field, scalar_field}; /// Render all view blocks to the PSL. -pub(super) fn render<'a>(input: InputContext<'a>, output: &mut OutputContext<'a>) { - let mut views_with_idx: Vec<(Option<_>, renderer::View<'a>)> = Vec::with_capacity(input.schema.views_count()); +pub(super) fn render<'a>(ctx: &'a DatamodelCalculatorContext<'a>, rendered: &mut renderer::Datamodel<'a>) { + let mut views_with_idx: Vec<(Option<_>, renderer::View<'a>)> = Vec::with_capacity(ctx.sql_schema.views_count()); - for view in input.view_pairs() { - views_with_idx.push((view.previous_position(), render_view(view, &mut output.warnings))); + for view in ctx.view_pairs() { + views_with_idx.push((view.previous_position(), render_view(view))); } views_with_idx.sort_by(|(a, _), (b, _)| helpers::compare_options_none_last(*a, *b)); for (_, render) in views_with_idx.into_iter() { - output.rendered_schema.push_view(render); + rendered.push_view(render); } } /// Render a single view. -fn render_view<'a>(view: ViewPair<'a>, warnings: &mut Warnings) -> renderer::View<'a> { +fn render_view(view: ViewPair<'_>) -> renderer::View<'_> { let mut rendered = renderer::View::new(view.name()); if let Some(docs) = view.documentation() { @@ -54,37 +49,20 @@ fn render_view<'a>(view: ViewPair<'a>, warnings: &mut Warnings) -> renderer::Vie } if let Some(id) = view.id() { - rendered.id(id::render(id, warnings)); + rendered.id(id::render(id)); } - if !view.has_usable_identifier() { + if !view.has_usable_identifier() && !view.ignored_in_psl() { let docs = "The underlying view does not contain a valid unique identifier and can therefore currently not be handled by the Prisma Client."; rendered.documentation(docs); - - warnings.views_without_identifiers.push(warnings::View { - view: view.name().to_string(), - }); - } - - if view.uses_duplicate_name() { - warnings.duplicate_names.push(warnings::TopLevelItem { - r#type: warnings::TopLevelType::View, - name: view.name().to_string(), - }) - } - - if view.remapped_name() { - warnings.remapped_views.push(warnings::View { - view: view.name().to_string(), - }); } for field in view.scalar_fields() { - rendered.push_field(scalar_field::render(field, warnings)); + rendered.push_field(scalar_field::render(field)); } for field in view.relation_fields() { - rendered.push_field(relation_field::render(field, warnings)); + rendered.push_field(relation_field::render(field)); } for definition in view.indexes().map(indexes::render) { diff --git a/introspection-engine/connectors/sql-introspection-connector/src/sanitize_datamodel_names.rs b/introspection-engine/connectors/sql-introspection-connector/src/sanitize_datamodel_names.rs index 1432f9129f66..d854d86b2ecd 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/sanitize_datamodel_names.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/sanitize_datamodel_names.rs @@ -5,7 +5,7 @@ use once_cell::sync::Lazy; use regex::Regex; use std::borrow::Cow; -use crate::datamodel_calculator::InputContext; +use crate::datamodel_calculator::DatamodelCalculatorContext; /// Regex to determine if an identifier starts with a character that /// is not supported. @@ -57,7 +57,11 @@ pub(crate) enum ModelName<'a> { impl<'a> ModelName<'a> { /// Create a name from an SQL identifier. - pub(crate) fn new_from_sql(name: &'a str, namespace: Option<&'a str>, context: InputContext<'a>) -> Self { + pub(crate) fn new_from_sql( + name: &'a str, + namespace: Option<&'a str>, + context: &DatamodelCalculatorContext<'a>, + ) -> Self { match (name, namespace) { (mapped_name, Some(namespace)) if !context.name_is_unique(mapped_name) => { ModelName::RenamedDuplicate { mapped_name, namespace } diff --git a/introspection-engine/connectors/sql-introspection-connector/src/version_checker.rs b/introspection-engine/connectors/sql-introspection-connector/src/version_checker.rs index 339fc466ec41..3579e42785e5 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/version_checker.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/version_checker.rs @@ -1,6 +1,6 @@ //! Prisma version information lookup. -use crate::datamodel_calculator::InputContext; +use crate::datamodel_calculator::DatamodelCalculatorContext; use crate::introspection_helpers::{ has_created_at_and_updated_at, is_new_migration_table, is_old_migration_table, is_prisma_1_or_11_list_table, is_prisma_1_point_0_join_table, is_prisma_1_point_1_or_2_join_table, is_relay_table, @@ -61,14 +61,17 @@ const MYSQL_TYPES: &[MySqlType] = &[ /// Find out if the database is created with a specific version of /// Prisma. -pub(crate) fn check_prisma_version(input: &InputContext<'_>) -> Version { +pub(crate) fn check_prisma_version(input: &DatamodelCalculatorContext<'_>) -> Version { let mut version_checker = VersionChecker { sql_family: input.sql_family(), is_cockroachdb: input.is_cockroach(), - has_migration_table: input.schema.table_walkers().any(is_old_migration_table), - has_relay_table: input.schema.table_walkers().any(is_relay_table), - has_prisma_1_join_table: input.schema.table_walkers().any(is_prisma_1_point_0_join_table), - has_prisma_1_1_or_2_join_table: input.schema.table_walkers().any(is_prisma_1_point_1_or_2_join_table), + has_migration_table: input.sql_schema.table_walkers().any(is_old_migration_table), + has_relay_table: input.sql_schema.table_walkers().any(is_relay_table), + has_prisma_1_join_table: input.sql_schema.table_walkers().any(is_prisma_1_point_0_join_table), + has_prisma_1_1_or_2_join_table: input + .sql_schema + .table_walkers() + .any(is_prisma_1_point_1_or_2_join_table), uses_on_delete: false, uses_default_values: false, always_has_created_at_updated_at: true, @@ -78,7 +81,7 @@ pub(crate) fn check_prisma_version(input: &InputContext<'_>) -> Version { }; for table in input - .schema + .sql_schema .table_walkers() .filter(|table| !is_old_migration_table(*table)) .filter(|table| !is_new_migration_table(*table)) @@ -105,7 +108,7 @@ pub(crate) fn check_prisma_version(input: &InputContext<'_>) -> Version { } match version_checker.sql_family { - _ if input.schema.is_empty() => Version::NonPrisma, + _ if input.sql_schema.is_empty() => Version::NonPrisma, SqlFamily::Sqlite if version_checker.is_prisma_2() => Version::Prisma2, SqlFamily::Sqlite => Version::NonPrisma, SqlFamily::Mysql if version_checker.is_prisma_2() => Version::Prisma2, diff --git a/introspection-engine/connectors/sql-introspection-connector/src/warnings.rs b/introspection-engine/connectors/sql-introspection-connector/src/warnings.rs index 8d998273f511..1cffd9e1c72f 100644 --- a/introspection-engine/connectors/sql-introspection-connector/src/warnings.rs +++ b/introspection-engine/connectors/sql-introspection-connector/src/warnings.rs @@ -1,448 +1,34 @@ //! Definition of warnings, which are displayed to the user during `db //! pull`. +mod r#enum; +mod generators; +mod model; +mod view; + +use crate::datamodel_calculator::DatamodelCalculatorContext; +use generators::Warnings; use introspection_connector::Warning; -use serde::Serialize; +use psl::PreviewFeature; -/// Collections used for warning generation. These should be preferred -/// over directly creating warnings from the code, to prevent spamming -/// the user. -#[derive(Debug, Default)] -pub(crate) struct Warnings { - /// Store final warnings to this vector. - pub(crate) warnings: Vec, - /// Fields that are using Prisma 1 UUID defaults. - pub(crate) prisma_1_uuid_defaults: Vec, - /// Fields that are using Prisma 1 CUID defaults. - pub(crate) prisma_1_cuid_defaults: Vec, - /// Fields having an empty name. - pub(crate) fields_with_empty_names_in_model: Vec, - /// Fields having an empty name. - pub(crate) fields_with_empty_names_in_view: Vec, - /// Field names in models we remapped during introspection. - pub(crate) remapped_fields_in_model: Vec, - /// Field names in views we remapped during introspection. - pub(crate) remapped_fields_in_view: Vec, - /// Enum values that are empty strings. - pub(crate) enum_values_with_empty_names: Vec, - /// Models that have no fields. - pub(crate) models_without_columns: Vec, - /// Models missing a id or unique constraint. - pub(crate) models_without_identifiers: Vec, - /// Views missing a id or unique constraint. - pub(crate) views_without_identifiers: Vec, - /// If the id attribute has a name taken from a previous model. - pub(crate) reintrospected_id_names_in_model: Vec, - /// If the id attribute has a name taken from a previous view. - pub(crate) reintrospected_id_names_in_view: Vec, - /// The field in model has a type we do not currently support in Prisma. - pub(crate) unsupported_types_in_model: Vec, - /// The field in view has a type we do not currently support in Prisma. - pub(crate) unsupported_types_in_view: Vec, - /// The name of the model is taken from a previous data model. - pub(crate) remapped_models: Vec, - /// The name of the model is taken from a previous data model. - pub(crate) remapped_views: Vec, - /// The relation is copied from a previous data model, only if - /// `relationMode` is `prisma`. - pub(crate) reintrospected_relations: Vec, - /// The name of these models or enums was a dupe in the PSL. - pub(crate) duplicate_names: Vec, -} +/// Analyzes the described database schema, triggering +/// warnings to the user if necessary. +pub(crate) fn generate(ctx: &DatamodelCalculatorContext<'_>) -> Vec { + let mut warnings = Warnings::new(); -impl Warnings { - pub(crate) fn new() -> Self { - Self { - warnings: Vec::new(), - ..Default::default() - } + for r#enum in ctx.enum_pairs() { + r#enum::generate_warnings(r#enum, &mut warnings); } - pub(crate) fn push(&mut self, warning: Warning) { - self.warnings.push(warning); + for model in ctx.model_pairs() { + model::generate_warnings(model, &mut warnings); } - /// Generate warnings from all indicators. Must be called after - /// introspection. - pub(crate) fn finalize(&mut self) -> Vec { - fn maybe_warn(elems: &[T], warning: impl Fn(&[T]) -> Warning, warnings: &mut Vec) { - if !elems.is_empty() { - warnings.push(warning(elems)) - } + if ctx.config.preview_features().contains(PreviewFeature::Views) { + for view in ctx.view_pairs() { + view::generate_warnings(view, &mut warnings); } - - maybe_warn( - &self.models_without_identifiers, - warning_models_without_identifier, - &mut self.warnings, - ); - - maybe_warn( - &self.views_without_identifiers, - warning_views_without_identifier, - &mut self.warnings, - ); - - maybe_warn( - &self.unsupported_types_in_model, - warning_unsupported_types_in_models, - &mut self.warnings, - ); - - maybe_warn( - &self.unsupported_types_in_view, - warning_unsupported_types_in_views, - &mut self.warnings, - ); - - maybe_warn( - &self.remapped_models, - warning_enriched_with_map_on_model, - &mut self.warnings, - ); - - maybe_warn( - &self.remapped_views, - warning_enriched_with_map_on_view, - &mut self.warnings, - ); - - maybe_warn( - &self.remapped_fields_in_model, - warning_enriched_with_map_on_field_in_models, - &mut self.warnings, - ); - - maybe_warn( - &self.remapped_fields_in_view, - warning_enriched_with_map_on_field_in_views, - &mut self.warnings, - ); - - maybe_warn( - &self.models_without_columns, - warning_models_without_columns, - &mut self.warnings, - ); - - maybe_warn( - &self.reintrospected_id_names_in_model, - warning_enriched_with_custom_primary_key_names_in_models, - &mut self.warnings, - ); - - maybe_warn( - &self.reintrospected_id_names_in_view, - warning_enriched_with_custom_primary_key_names_in_views, - &mut self.warnings, - ); - - maybe_warn( - &self.prisma_1_uuid_defaults, - warning_default_uuid_warning, - &mut self.warnings, - ); - - maybe_warn( - &self.prisma_1_cuid_defaults, - warning_default_cuid_warning, - &mut self.warnings, - ); - - maybe_warn( - &self.enum_values_with_empty_names, - warning_enum_values_with_empty_names, - &mut self.warnings, - ); - - maybe_warn( - &self.fields_with_empty_names_in_model, - warning_fields_with_empty_names_in_models, - &mut self.warnings, - ); - - maybe_warn( - &self.fields_with_empty_names_in_view, - warning_fields_with_empty_names_in_views, - &mut self.warnings, - ); - - maybe_warn( - &self.reintrospected_relations, - warning_relations_added_from_the_previous_data_model, - &mut self.warnings, - ); - - maybe_warn( - &self.duplicate_names, - warning_top_level_item_name_is_a_dupe, - &mut self.warnings, - ); - - std::mem::take(&mut self.warnings) - } -} - -#[derive(Serialize, Debug, Clone)] -pub(crate) struct Model { - pub(crate) model: String, -} - -#[derive(Serialize, Debug, Clone)] -pub(crate) struct View { - pub(crate) view: String, -} - -#[derive(Serialize, Debug, Clone)] -pub(crate) struct Enum { - pub(crate) enm: String, -} - -impl Enum { - pub(crate) fn new(name: &str) -> Self { - Enum { enm: name.to_owned() } - } -} - -#[derive(Serialize, Debug, Clone)] -pub(crate) struct ModelAndField { - pub(crate) model: String, - pub(crate) field: String, -} - -#[derive(Serialize, Debug, Clone)] -pub(crate) struct ViewAndField { - pub(crate) view: String, - pub(crate) field: String, -} - -#[derive(Serialize, Debug, Clone)] -pub(crate) struct ModelAndIndex { - pub(crate) model: String, - pub(crate) index_db_name: String, -} - -#[derive(Serialize, Debug)] -pub(crate) struct ModelAndFieldAndType { - pub(crate) model: String, - pub(crate) field: String, - pub(crate) tpe: String, -} - -#[derive(Serialize, Debug)] -pub(crate) struct ViewAndFieldAndType { - pub(crate) view: String, - pub(crate) field: String, - pub(crate) tpe: String, -} - -#[derive(Serialize, Debug, Clone)] -pub(crate) struct EnumAndValue { - pub(crate) enm: String, - pub(crate) value: String, -} - -#[derive(Serialize, Debug, Clone, Copy)] -pub(crate) enum TopLevelType { - Model, - Enum, - View, -} - -#[derive(Serialize, Debug, Clone)] -pub(crate) struct TopLevelItem { - pub(crate) r#type: TopLevelType, - pub(crate) name: String, -} - -pub(crate) fn warning_models_without_identifier(affected: &[Model]) -> Warning { - Warning { - code: 1, - message: "The following models were commented out as they do not have a valid unique identifier or id. This is currently not supported by the Prisma Client.".into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_fields_with_empty_names_in_models(affected: &[ModelAndField]) -> Warning { - Warning { - code: 2, - message: "These fields were commented out because their names are currently not supported by Prisma. Please provide valid ones that match [a-zA-Z][a-zA-Z0-9_]* using the `@map` attribute." - .into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_unsupported_types_in_models(affected: &[ModelAndFieldAndType]) -> Warning { - Warning { - code: 3, - message: "These fields are not supported by the Prisma Client, because Prisma currently does not support their types.".into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_enum_values_with_empty_names(affected: &[EnumAndValue]) -> Warning { - Warning { - code: 4, - message: "These enum values were commented out because their names are currently not supported by Prisma. Please provide valid ones that match [a-zA-Z][a-zA-Z0-9_]* using the `@map` attribute." - .into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_default_cuid_warning(affected: &[ModelAndField]) -> Warning { - Warning { - code: 5, - message: - "These id fields had a `@default(cuid())` added because we believe the schema was created by Prisma 1." - .into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_default_uuid_warning(affected: &[ModelAndField]) -> Warning { - Warning { - code: 6, - message: - "These id fields had a `@default(uuid())` added because we believe the schema was created by Prisma 1." - .into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_enriched_with_map_on_model(affected: &[Model]) -> Warning { - Warning { - code: 7, - message: "These models were enriched with `@@map` information taken from the previous Prisma schema.".into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_enriched_with_map_on_field_in_models(affected: &[ModelAndField]) -> Warning { - Warning { - code: 8, - message: "These fields were enriched with `@map` information taken from the previous Prisma schema.".into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_enriched_with_map_on_enum(affected: &[Enum]) -> Warning { - Warning { - code: 9, - message: "These enums were enriched with `@@map` information taken from the previous Prisma schema.".into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_enriched_with_map_on_enum_value(affected: &[EnumAndValue]) -> Warning { - Warning { - code: 10, - message: "These enum values were enriched with `@map` information taken from the previous Prisma schema." - .into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -//todo maybe we can get rid of this alltogether due to @@ignore -//but maybe we should have warnings for ignored fields and models -pub(crate) fn warning_models_without_columns(affected: &[Model]) -> Warning { - Warning { - code: 14, - message: "The following models were commented out as we could not retrieve columns for them. Please check your privileges.".into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_enriched_with_custom_primary_key_names_in_models(affected: &[Model]) -> Warning { - Warning { - code: 18, - message: "These models were enriched with custom compound id names taken from the previous Prisma schema." - .into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_relations_added_from_the_previous_data_model(affected: &[Model]) -> Warning { - Warning { - code: 19, - message: "Relations were copied from the previous data model due to not using foreign keys in the database. If any of the relation columns changed in the database, the relations might not be correct anymore.".into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_unsupported_types_in_views(affected: &[ViewAndFieldAndType]) -> Warning { - Warning { - code: 20, - message: "These fields are not supported by the Prisma Client, because Prisma currently does not support their types.".into(), - affected: serde_json::to_value(affected).unwrap(), } -} -pub(crate) fn warning_enriched_with_map_on_field_in_views(affected: &[ViewAndField]) -> Warning { - Warning { - code: 21, - message: "These fields were enriched with `@map` information taken from the previous Prisma schema.".into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_enriched_with_map_on_view(affected: &[View]) -> Warning { - Warning { - code: 22, - message: "These views were enriched with `@@map` information taken from the previous Prisma schema.".into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_views_without_identifier(affected: &[View]) -> Warning { - Warning { - code: 23, - message: "The following views were commented out as they do not have a valid unique identifier or id. This is currently not supported by the Prisma Client.".into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_enriched_with_custom_primary_key_names_in_views(affected: &[View]) -> Warning { - Warning { - code: 24, - message: "These views were enriched with custom compound id names taken from the previous Prisma schema." - .into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_fields_with_empty_names_in_views(affected: &[ViewAndField]) -> Warning { - Warning { - code: 25, - message: "These fields were commented out because their names are currently not supported by Prisma. Please provide valid ones that match [a-zA-Z][a-zA-Z0-9_]* using the `@map` attribute." - .into(), - affected: serde_json::to_value(affected).unwrap(), - } -} - -pub(crate) fn warning_top_level_item_name_is_a_dupe(affected: &[TopLevelItem]) -> Warning { - let has_enums = affected.iter().any(|i| matches!(i.r#type, TopLevelType::Enum)); - let has_models = affected.iter().any(|i| matches!(i.r#type, TopLevelType::Model)); - let has_views = affected.iter().any(|i| matches!(i.r#type, TopLevelType::View)); - - let message = if has_models && has_enums && has_views { - "These models, views and enums were renamed due to their names being duplicates in the Prisma Schema Language." - } else if has_models && has_enums { - "These models and enums were renamed due to their names being duplicates in the Prisma Schema Language." - } else if has_models && has_views { - "These models and views were renamed due to their names being duplicates in the Prisma Schema Language." - } else if has_enums && has_views { - "These enums and views were renamed due to their names being duplicates in the Prisma Schema Language." - } else if has_models { - "These models were renamed due to their names being duplicates in the Prisma Schema Language." - } else if has_views { - "These views were renamed due to their names being duplicates in the Prisma Schema Language." - } else { - "These enums were renamed due to their names being duplicates in the Prisma Schema Language." - }; - - Warning { - code: 20, - message: message.into(), - affected: serde_json::to_value(affected).unwrap(), - } + warnings.finalize() } diff --git a/introspection-engine/connectors/sql-introspection-connector/src/warnings/enum.rs b/introspection-engine/connectors/sql-introspection-connector/src/warnings/enum.rs new file mode 100644 index 000000000000..9e75be6bcf0e --- /dev/null +++ b/introspection-engine/connectors/sql-introspection-connector/src/warnings/enum.rs @@ -0,0 +1,48 @@ +use crate::{pair::EnumPair, sanitize_datamodel_names}; + +use super::generators::{self, Warnings}; + +/// Analyze and generate warnigs from an enum. +pub(super) fn generate_warnings(r#enum: EnumPair<'_>, warnings: &mut Warnings) { + if r#enum.name_from_psl() { + let warning = generators::warning_enriched_with_map_on_enum(&[generators::Enum::new(&r#enum.name())]); + warnings.push(warning); + } + + if r#enum.uses_duplicate_name() { + warnings.duplicate_names.push(generators::TopLevelItem { + r#type: generators::TopLevelType::Enum, + name: r#enum.name().to_string(), + }) + } + + for variant in r#enum.variants() { + if variant.name().is_empty() { + let value = variant + .mapped_name() + .map(String::from) + .unwrap_or_else(|| variant.name().to_string()); + + let warning = generators::EnumAndValue { + enm: r#enum.name().to_string(), + value, + }; + + warnings.enum_values_with_empty_names.push(warning); + } + + if variant.name().is_empty() || sanitize_datamodel_names::needs_sanitation(&variant.name()) { + let warning = generators::EnumAndValue { + enm: r#enum.name().to_string(), + value: variant.name().to_string(), + }; + + warnings.enum_values_with_empty_names.push(warning); + } else if variant.name_from_psl() { + warnings.remapped_values.push(generators::EnumAndValue { + value: variant.name().to_string(), + enm: r#enum.name().to_string(), + }); + } + } +} diff --git a/introspection-engine/connectors/sql-introspection-connector/src/warnings/generators.rs b/introspection-engine/connectors/sql-introspection-connector/src/warnings/generators.rs new file mode 100644 index 000000000000..5f012d430df1 --- /dev/null +++ b/introspection-engine/connectors/sql-introspection-connector/src/warnings/generators.rs @@ -0,0 +1,453 @@ +use introspection_connector::Warning; +use serde::Serialize; + +/// Collections used for warning generation. These should be preferred +/// over directly creating warnings from the code, to prevent spamming +/// the user. +#[derive(Debug, Default)] +pub(super) struct Warnings { + /// Store final warnings to this vector. + warnings: Vec, + /// Fields that are using Prisma 1 UUID defaults. + pub(super) prisma_1_uuid_defaults: Vec, + /// Fields that are using Prisma 1 CUID defaults. + pub(super) prisma_1_cuid_defaults: Vec, + /// Fields having an empty name. + pub(super) fields_with_empty_names_in_model: Vec, + /// Fields having an empty name. + pub(super) fields_with_empty_names_in_view: Vec, + /// Field names in models we remapped during introspection. + pub(super) remapped_fields_in_model: Vec, + /// Field names in views we remapped during introspection. + pub(super) remapped_fields_in_view: Vec, + /// Enum values that are empty strings. + pub(super) enum_values_with_empty_names: Vec, + /// Models that have no fields. + pub(super) models_without_columns: Vec, + /// Models missing a id or unique constraint. + pub(super) models_without_identifiers: Vec, + /// Views missing a id or unique constraint. + pub(super) views_without_identifiers: Vec, + /// If the id attribute has a name taken from a previous model. + pub(super) reintrospected_id_names_in_model: Vec, + /// If the id attribute has a name taken from a previous view. + pub(super) reintrospected_id_names_in_view: Vec, + /// The field in model has a type we do not currently support in Prisma. + pub(super) unsupported_types_in_model: Vec, + /// The field in view has a type we do not currently support in Prisma. + pub(super) unsupported_types_in_view: Vec, + /// The name of the model is taken from a previous data model. + pub(super) remapped_models: Vec, + /// The name of the model is taken from a previous data model. + pub(super) remapped_views: Vec, + /// The name of the enum variant is taken from a previous data model. + pub(super) remapped_values: Vec, + /// The relation is copied from a previous data model, only if + /// `relationMode` is `prisma`. + pub(super) reintrospected_relations: Vec, + /// The name of these models or enums was a dupe in the PSL. + pub(super) duplicate_names: Vec, +} + +impl Warnings { + pub(super) fn new() -> Self { + Self { + warnings: Vec::new(), + ..Default::default() + } + } + + pub(super) fn push(&mut self, warning: Warning) { + self.warnings.push(warning); + } + + /// Generate warnings from all indicators. Must be called after + /// introspection. + pub(super) fn finalize(mut self) -> Vec { + fn maybe_warn(elems: &[T], warning: impl Fn(&[T]) -> Warning, warnings: &mut Vec) { + if !elems.is_empty() { + warnings.push(warning(elems)) + } + } + + maybe_warn( + &self.models_without_identifiers, + warning_models_without_identifier, + &mut self.warnings, + ); + + maybe_warn( + &self.views_without_identifiers, + warning_views_without_identifier, + &mut self.warnings, + ); + + maybe_warn( + &self.unsupported_types_in_model, + warning_unsupported_types_in_models, + &mut self.warnings, + ); + + maybe_warn( + &self.unsupported_types_in_view, + warning_unsupported_types_in_views, + &mut self.warnings, + ); + + maybe_warn( + &self.remapped_models, + warning_enriched_with_map_on_model, + &mut self.warnings, + ); + + maybe_warn( + &self.remapped_values, + warning_enriched_with_map_on_enum_value, + &mut self.warnings, + ); + + maybe_warn( + &self.remapped_views, + warning_enriched_with_map_on_view, + &mut self.warnings, + ); + + maybe_warn( + &self.remapped_fields_in_model, + warning_enriched_with_map_on_field_in_models, + &mut self.warnings, + ); + + maybe_warn( + &self.remapped_fields_in_view, + warning_enriched_with_map_on_field_in_views, + &mut self.warnings, + ); + + maybe_warn( + &self.models_without_columns, + warning_models_without_columns, + &mut self.warnings, + ); + + maybe_warn( + &self.reintrospected_id_names_in_model, + warning_enriched_with_custom_primary_key_names_in_models, + &mut self.warnings, + ); + + maybe_warn( + &self.reintrospected_id_names_in_view, + warning_enriched_with_custom_primary_key_names_in_views, + &mut self.warnings, + ); + + maybe_warn( + &self.prisma_1_uuid_defaults, + warning_default_uuid_warning, + &mut self.warnings, + ); + + maybe_warn( + &self.prisma_1_cuid_defaults, + warning_default_cuid_warning, + &mut self.warnings, + ); + + maybe_warn( + &self.enum_values_with_empty_names, + warning_enum_values_with_empty_names, + &mut self.warnings, + ); + + maybe_warn( + &self.fields_with_empty_names_in_model, + warning_fields_with_empty_names_in_models, + &mut self.warnings, + ); + + maybe_warn( + &self.fields_with_empty_names_in_view, + warning_fields_with_empty_names_in_views, + &mut self.warnings, + ); + + maybe_warn( + &self.reintrospected_relations, + warning_relations_added_from_the_previous_data_model, + &mut self.warnings, + ); + + maybe_warn( + &self.duplicate_names, + warning_top_level_item_name_is_a_dupe, + &mut self.warnings, + ); + + self.warnings + } +} + +#[derive(Serialize, Debug, Clone)] +pub(super) struct Model { + pub(super) model: String, +} + +#[derive(Serialize, Debug, Clone)] +pub(super) struct View { + pub(super) view: String, +} + +#[derive(Serialize, Debug, Clone)] +pub(super) struct Enum { + pub(super) enm: String, +} + +impl Enum { + pub(super) fn new(name: &str) -> Self { + Enum { enm: name.to_owned() } + } +} + +#[derive(Serialize, Debug, Clone)] +pub(super) struct ModelAndField { + pub(super) model: String, + pub(super) field: String, +} + +#[derive(Serialize, Debug, Clone)] +pub(super) struct ViewAndField { + pub(super) view: String, + pub(super) field: String, +} + +#[derive(Serialize, Debug, Clone)] +pub(super) struct ModelAndIndex { + pub(super) model: String, + pub(super) index_db_name: String, +} + +#[derive(Serialize, Debug)] +pub(super) struct ModelAndFieldAndType { + pub(super) model: String, + pub(super) field: String, + pub(super) tpe: String, +} + +#[derive(Serialize, Debug)] +pub(super) struct ViewAndFieldAndType { + pub(super) view: String, + pub(super) field: String, + pub(super) tpe: String, +} + +#[derive(Serialize, Debug, Clone)] +pub(super) struct EnumAndValue { + pub(super) enm: String, + pub(super) value: String, +} + +#[derive(Serialize, Debug, Clone, Copy)] +pub(super) enum TopLevelType { + Model, + Enum, + View, +} + +#[derive(Serialize, Debug, Clone)] +pub(super) struct TopLevelItem { + pub(super) r#type: TopLevelType, + pub(super) name: String, +} + +pub(super) fn warning_models_without_identifier(affected: &[Model]) -> Warning { + Warning { + code: 1, + message: "The following models were ignored as they do not have a valid unique identifier or id. This is currently not supported by the Prisma Client.".into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_fields_with_empty_names_in_models(affected: &[ModelAndField]) -> Warning { + Warning { + code: 2, + message: "These fields were commented out because their names are currently not supported by Prisma. Please provide valid ones that match [a-zA-Z][a-zA-Z0-9_]* using the `@map` attribute." + .into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_unsupported_types_in_models(affected: &[ModelAndFieldAndType]) -> Warning { + Warning { + code: 3, + message: "These fields are not supported by the Prisma Client, because Prisma currently does not support their types.".into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_enum_values_with_empty_names(affected: &[EnumAndValue]) -> Warning { + Warning { + code: 4, + message: "These enum values were commented out because their names are currently not supported by Prisma. Please provide valid ones that match [a-zA-Z][a-zA-Z0-9_]* using the `@map` attribute." + .into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_default_cuid_warning(affected: &[ModelAndField]) -> Warning { + Warning { + code: 5, + message: + "These id fields had a `@default(cuid())` added because we believe the schema was created by Prisma 1." + .into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_default_uuid_warning(affected: &[ModelAndField]) -> Warning { + Warning { + code: 6, + message: + "These id fields had a `@default(uuid())` added because we believe the schema was created by Prisma 1." + .into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_enriched_with_map_on_model(affected: &[Model]) -> Warning { + Warning { + code: 7, + message: "These models were enriched with `@@map` information taken from the previous Prisma schema.".into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_enriched_with_map_on_field_in_models(affected: &[ModelAndField]) -> Warning { + Warning { + code: 8, + message: "These fields were enriched with `@map` information taken from the previous Prisma schema.".into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_enriched_with_map_on_enum(affected: &[Enum]) -> Warning { + Warning { + code: 9, + message: "These enums were enriched with `@@map` information taken from the previous Prisma schema.".into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_enriched_with_map_on_enum_value(affected: &[EnumAndValue]) -> Warning { + Warning { + code: 10, + message: "These enum values were enriched with `@map` information taken from the previous Prisma schema." + .into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +//todo maybe we can get rid of this alltogether due to @@ignore +//but maybe we should have warnings for ignored fields and models +pub(super) fn warning_models_without_columns(affected: &[Model]) -> Warning { + Warning { + code: 14, + message: "The following models were commented out as we could not retrieve columns for them. Please check your privileges.".into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_enriched_with_custom_primary_key_names_in_models(affected: &[Model]) -> Warning { + Warning { + code: 18, + message: "These models were enriched with custom compound id names taken from the previous Prisma schema." + .into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_relations_added_from_the_previous_data_model(affected: &[Model]) -> Warning { + Warning { + code: 19, + message: "Relations were copied from the previous data model due to not using foreign keys in the database. If any of the relation columns changed in the database, the relations might not be correct anymore.".into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_top_level_item_name_is_a_dupe(affected: &[TopLevelItem]) -> Warning { + let has_enums = affected.iter().any(|i| matches!(i.r#type, TopLevelType::Enum)); + let has_models = affected.iter().any(|i| matches!(i.r#type, TopLevelType::Model)); + let has_views = affected.iter().any(|i| matches!(i.r#type, TopLevelType::View)); + + let message = if has_models && has_enums && has_views { + "These models, views and enums were renamed due to their names being duplicates in the Prisma Schema Language." + } else if has_models && has_enums { + "These models and enums were renamed due to their names being duplicates in the Prisma Schema Language." + } else if has_models && has_views { + "These models and views were renamed due to their names being duplicates in the Prisma Schema Language." + } else if has_enums && has_views { + "These enums and views were renamed due to their names being duplicates in the Prisma Schema Language." + } else if has_models { + "These models were renamed due to their names being duplicates in the Prisma Schema Language." + } else if has_views { + "These views were renamed due to their names being duplicates in the Prisma Schema Language." + } else { + "These enums were renamed due to their names being duplicates in the Prisma Schema Language." + }; + + Warning { + code: 20, + message: message.into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_unsupported_types_in_views(affected: &[ViewAndFieldAndType]) -> Warning { + Warning { + code: 21, + message: "These fields are not supported by the Prisma Client, because Prisma currently does not support their types.".into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_enriched_with_map_on_field_in_views(affected: &[ViewAndField]) -> Warning { + Warning { + code: 22, + message: "These fields were enriched with `@map` information taken from the previous Prisma schema.".into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_enriched_with_map_on_view(affected: &[View]) -> Warning { + Warning { + code: 23, + message: "These views were enriched with `@@map` information taken from the previous Prisma schema.".into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_views_without_identifier(affected: &[View]) -> Warning { + Warning { + code: 24, + message: "The following views were ignored as they do not have a valid unique identifier or id. This is currently not supported by the Prisma Client.".into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_enriched_with_custom_primary_key_names_in_views(affected: &[View]) -> Warning { + Warning { + code: 25, + message: "These views were enriched with custom compound id names taken from the previous Prisma schema." + .into(), + affected: serde_json::to_value(affected).unwrap(), + } +} + +pub(super) fn warning_fields_with_empty_names_in_views(affected: &[ViewAndField]) -> Warning { + Warning { + code: 26, + message: "These fields were commented out because their names are currently not supported by Prisma. Please provide valid ones that match [a-zA-Z][a-zA-Z0-9_]* using the `@map` attribute." + .into(), + affected: serde_json::to_value(affected).unwrap(), + } +} diff --git a/introspection-engine/connectors/sql-introspection-connector/src/warnings/model.rs b/introspection-engine/connectors/sql-introspection-connector/src/warnings/model.rs new file mode 100644 index 000000000000..85bd69cf3b28 --- /dev/null +++ b/introspection-engine/connectors/sql-introspection-connector/src/warnings/model.rs @@ -0,0 +1,91 @@ +use crate::pair::{DefaultKind, ModelPair}; + +use super::generators::{self, Warnings}; + +/// Analyze and generate warnigs from a model. +pub(super) fn generate_warnings(model: ModelPair<'_>, warnings: &mut Warnings) { + if model.id().and_then(|id| id.name()).is_some() { + warnings.reintrospected_id_names_in_model.push(generators::Model { + model: model.name().to_string(), + }); + } + + if model.scalar_fields().len() == 0 { + warnings.models_without_columns.push(generators::Model { + model: model.name().to_string(), + }); + } else if !model.has_usable_identifier() && !model.ignored_in_psl() { + warnings.models_without_identifiers.push(generators::Model { + model: model.name().to_string(), + }); + } + + if model.uses_duplicate_name() { + warnings.duplicate_names.push(generators::TopLevelItem { + r#type: generators::TopLevelType::Model, + name: model.name().to_string(), + }) + } + + if model.remapped_name() { + warnings.remapped_models.push(generators::Model { + model: model.name().to_string(), + }); + } + + for field in model.scalar_fields() { + if let Some(DefaultKind::Prisma1Uuid) = field.default().kind() { + let warn = generators::ModelAndField { + model: model.name().to_string(), + field: field.name().to_string(), + }; + + warnings.prisma_1_uuid_defaults.push(warn); + } + + if let Some(DefaultKind::Prisma1Cuid) = field.default().kind() { + let warn = generators::ModelAndField { + model: model.name().to_string(), + field: field.name().to_string(), + }; + + warnings.prisma_1_cuid_defaults.push(warn); + } + + if field.remapped_name_from_psl() { + let mf = generators::ModelAndField { + model: model.name().to_string(), + field: field.name().to_string(), + }; + + warnings.remapped_fields_in_model.push(mf); + } + + if field.is_unsupported() { + let mf = generators::ModelAndFieldAndType { + model: model.name().to_string(), + field: field.name().to_string(), + tpe: field.prisma_type().to_string(), + }; + + warnings.unsupported_types_in_model.push(mf) + } + + if field.remapped_name_empty() { + let mf = generators::ModelAndField { + model: model.name().to_string(), + field: field.name().to_string(), + }; + + warnings.fields_with_empty_names_in_model.push(mf); + } + } + + for field in model.relation_fields() { + if field.reintrospected_relation() { + warnings.reintrospected_relations.push(generators::Model { + model: field.prisma_type().into_owned(), + }); + } + } +} diff --git a/introspection-engine/connectors/sql-introspection-connector/src/warnings/view.rs b/introspection-engine/connectors/sql-introspection-connector/src/warnings/view.rs new file mode 100644 index 000000000000..8c36dc85395d --- /dev/null +++ b/introspection-engine/connectors/sql-introspection-connector/src/warnings/view.rs @@ -0,0 +1,69 @@ +use crate::pair::ViewPair; + +use super::generators::{self, Warnings}; + +/// Analyze and generate warnigs from a view. +pub(super) fn generate_warnings(view: ViewPair<'_>, warnings: &mut Warnings) { + if view.id().and_then(|id| id.name()).is_some() { + warnings.reintrospected_id_names_in_view.push(generators::View { + view: view.name().to_string(), + }); + } + + if !view.has_usable_identifier() && !view.ignored_in_psl() { + warnings.views_without_identifiers.push(generators::View { + view: view.name().to_string(), + }); + } + + if view.uses_duplicate_name() { + warnings.duplicate_names.push(generators::TopLevelItem { + r#type: generators::TopLevelType::View, + name: view.name().to_string(), + }) + } + + if view.remapped_name() { + warnings.remapped_views.push(generators::View { + view: view.name().to_string(), + }); + } + + for field in view.scalar_fields() { + if field.remapped_name_from_psl() { + let mf = generators::ViewAndField { + view: view.name().to_string(), + field: field.name().to_string(), + }; + + warnings.remapped_fields_in_view.push(mf); + } + + if field.is_unsupported() { + let mf = generators::ViewAndFieldAndType { + view: view.name().to_string(), + field: field.name().to_string(), + tpe: field.prisma_type().to_string(), + }; + + warnings.unsupported_types_in_view.push(mf) + } + + if field.remapped_name_empty() { + let mf = generators::ViewAndField { + view: view.name().to_string(), + field: field.name().to_string(), + }; + + warnings.fields_with_empty_names_in_view.push(mf); + } + } + + for field in view.relation_fields() { + if field.reintrospected_relation() { + warnings.reintrospected_relations.push(generators::Model { + model: field.prisma_type().into_owned(), + }); + } + } +} diff --git a/introspection-engine/core/build.rs b/introspection-engine/core/build.rs index d9a74f6b8c89..2e8fe20c0503 100644 --- a/introspection-engine/core/build.rs +++ b/introspection-engine/core/build.rs @@ -3,7 +3,7 @@ use std::process::Command; fn store_git_commit_hash() { let output = Command::new("git").args(["rev-parse", "HEAD"]).output().unwrap(); let git_hash = String::from_utf8(output.stdout).unwrap(); - println!("cargo:rustc-env=GIT_HASH={}", git_hash); + println!("cargo:rustc-env=GIT_HASH={git_hash}"); } fn main() { diff --git a/introspection-engine/core/src/error.rs b/introspection-engine/core/src/error.rs index b71c83b6524d..815e61163b90 100644 --- a/introspection-engine/core/src/error.rs +++ b/introspection-engine/core/src/error.rs @@ -19,8 +19,8 @@ pub enum Error { impl Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Error::ConnectorError(err) => write!(f, "Error in connector: {}", err), - Error::DatamodelError(err) => write!(f, "Error in datamodel:\n{}", err), + Error::ConnectorError(err) => write!(f, "Error in connector: {err}"), + Error::DatamodelError(err) => write!(f, "Error in datamodel:\n{err}"), Error::InvalidDatabaseUrl(err) => f.write_str(err), Error::IntrospectionResultEmpty => f.write_str("The introspected database was empty"), Error::Generic(err) => f.write_str(err), diff --git a/introspection-engine/core/src/error_rendering.rs b/introspection-engine/core/src/error_rendering.rs index 98131f50802a..a4f7c8df592e 100644 --- a/introspection-engine/core/src/error_rendering.rs +++ b/introspection-engine/core/src/error_rendering.rs @@ -43,6 +43,6 @@ fn fallback_jsonrpc_error(err: impl std::error::Error) -> JsonRpcError { JsonRpcError { code: jsonrpc_core::types::error::ErrorCode::ServerError(4466), message: "The migration engine encountered an error and failed to render it.".to_string(), - data: Some(serde_json::json!({ "backtrace": null, "message": format!("{}", err) })), + data: Some(serde_json::json!({ "backtrace": null, "message": format!("{err}") })), } } diff --git a/introspection-engine/datamodel-renderer/src/configuration/datasource.rs b/introspection-engine/datamodel-renderer/src/configuration/datasource.rs index 09f334235563..d711703d4e28 100644 --- a/introspection-engine/datamodel-renderer/src/configuration/datasource.rs +++ b/introspection-engine/datamodel-renderer/src/configuration/datasource.rs @@ -147,15 +147,15 @@ impl<'a> fmt::Display for Datasource<'a> { writeln!(f, "provider = {}", self.provider)?; writeln!(f, "url = {}", self.url)?; if let Some(direct_url) = self.direct_url { - writeln!(f, "directUrl = {}", direct_url)?; + writeln!(f, "directUrl = {direct_url}")?; } if let Some(url) = self.shadow_database_url { - writeln!(f, "shadowDatabaseUrl = {}", url)?; + writeln!(f, "shadowDatabaseUrl = {url}")?; } if let Some(relation_mode) = self.relation_mode { - writeln!(f, "relationMode = \"{}\"", relation_mode)?; + writeln!(f, "relationMode = \"{relation_mode}\"")?; } for (key, value) in self.custom_properties.iter() { diff --git a/introspection-engine/datamodel-renderer/src/configuration/generator.rs b/introspection-engine/datamodel-renderer/src/configuration/generator.rs index 3a1e80e4433c..eeadc3160ef7 100644 --- a/introspection-engine/datamodel-renderer/src/configuration/generator.rs +++ b/introspection-engine/datamodel-renderer/src/configuration/generator.rs @@ -137,11 +137,11 @@ impl<'a> fmt::Display for Generator<'a> { writeln!(f, "provider = {}", self.provider)?; if let Some(output) = self.output { - writeln!(f, "output = {}", output)?; + writeln!(f, "output = {output}")?; } if let Some(ref features) = self.preview_features { - writeln!(f, "previewFeatures = {}", features)?; + writeln!(f, "previewFeatures = {features}")?; } if !self.binary_targets.is_empty() { diff --git a/introspection-engine/introspection-engine-tests/build.rs b/introspection-engine/introspection-engine-tests/build.rs index ce17cbc76b7d..832922c0ab1a 100644 --- a/introspection-engine/introspection-engine-tests/build.rs +++ b/introspection-engine/introspection-engine-tests/build.rs @@ -30,7 +30,7 @@ fn find_all_sql_files(prefix: &str, all_sql_files: &mut Vec) { let entry = entry.unwrap(); let file_name = entry.file_name(); let file_name = file_name.to_str().unwrap(); - let entry_path = format!("{}/{}", prefix, file_name); + let entry_path = format!("{prefix}/{file_name}"); let file_type = entry.file_type().unwrap(); if file_name == "." || file_name == ".." { diff --git a/introspection-engine/introspection-engine-tests/src/test_api.rs b/introspection-engine/introspection-engine-tests/src/test_api.rs index ce76ddec5ce7..3e910ce4c007 100644 --- a/introspection-engine/introspection-engine-tests/src/test_api.rs +++ b/introspection-engine/introspection-engine-tests/src/test_api.rs @@ -144,7 +144,6 @@ impl TestApi { &self.namespaces } - #[track_caller] async fn test_introspect_internal( &self, previous_schema: psl::ValidatedSchema, @@ -160,7 +159,6 @@ impl TestApi { } #[tracing::instrument(skip(self, data_model_string))] - #[track_caller] pub async fn re_introspect(&self, data_model_string: &str) -> Result { let schema = format!("{}{}", self.pure_config(), data_model_string); let schema = parse_datamodel(&schema); @@ -170,7 +168,6 @@ impl TestApi { } #[tracing::instrument(skip(self, data_model_string))] - #[track_caller] pub async fn re_introspect_dml(&self, data_model_string: &str) -> Result { let data_model = parse_datamodel(&format!("{}{}", self.pure_config(), data_model_string)); let introspection_result = self.test_introspect_internal(data_model, false).await?; @@ -179,7 +176,6 @@ impl TestApi { } #[tracing::instrument(skip(self, data_model_string))] - #[track_caller] pub async fn re_introspect_config(&self, data_model_string: &str) -> Result { let data_model = parse_datamodel(data_model_string); let introspection_result = self.test_introspect_internal(data_model, true).await?; @@ -261,7 +257,7 @@ impl TestApi { "" }; - let namespaces: Vec = self.namespaces().iter().map(|ns| format!(r#""{}""#, ns)).collect(); + let namespaces: Vec = self.namespaces().iter().map(|ns| format!(r#""{ns}""#)).collect(); let namespaces = if namespaces.is_empty() { "".to_string() @@ -300,13 +296,11 @@ impl TestApi { psl::parse_configuration(&self.pure_config()).unwrap() } - #[track_caller] pub async fn expect_datamodel(&self, expectation: &expect_test::Expect) { let found = self.introspect().await.unwrap(); expectation.assert_eq(&found); } - #[track_caller] pub async fn expect_warnings(&self, expectation: &expect_test::Expect) { let previous_schema = psl::validate(self.pure_config().into()); let introspection_result = self.test_introspect_internal(previous_schema, true).await.unwrap(); @@ -314,7 +308,6 @@ impl TestApi { expectation.assert_eq(&serde_json::to_string_pretty(&introspection_result.warnings).unwrap()); } - #[track_caller] pub async fn expect_no_warnings(&self) { let previous_schema = psl::validate(self.pure_config().into()); let introspection_result = self.test_introspect_internal(previous_schema, true).await.unwrap(); @@ -323,7 +316,6 @@ impl TestApi { assert!(introspection_result.warnings.is_empty()) } - #[track_caller] pub async fn expect_re_introspected_datamodel(&self, schema: &str, expectation: expect_test::Expect) { let data_model = parse_datamodel(&format!("{}{}", self.pure_config(), schema)); let reintrospected = self.test_introspect_internal(data_model, false).await.unwrap(); @@ -331,7 +323,6 @@ impl TestApi { expectation.assert_eq(&reintrospected.data_model); } - #[track_caller] pub async fn expect_re_introspect_warnings(&self, schema: &str, expectation: expect_test::Expect) { let data_model = parse_datamodel(&format!("{}{}", self.pure_config(), schema)); let introspection_result = self.test_introspect_internal(data_model, false).await.unwrap(); @@ -339,7 +330,6 @@ impl TestApi { expectation.assert_eq(&serde_json::to_string_pretty(&introspection_result.warnings).unwrap()); } - #[track_caller] pub fn assert_eq_datamodels(&self, expected_without_header: &str, result_with_header: &str) { let expected_with_source = self.dm_with_sources(expected_without_header); let expected_with_generator = self.dm_with_generator_and_preview_flags(&expected_with_source); @@ -365,11 +355,7 @@ impl TestApi { } fn generator_block(&self) -> String { - let preview_features: Vec = self - .preview_features() - .iter() - .map(|pf| format!(r#""{}""#, pf)) - .collect(); + let preview_features: Vec = self.preview_features().iter().map(|pf| format!(r#""{pf}""#)).collect(); let preview_feature_string = if preview_features.is_empty() { "".to_string() @@ -379,14 +365,12 @@ impl TestApi { let generator_block = format!( r#"generator client {{ - provider = "prisma-client-js"{} - }}"#, - preview_feature_string + provider = "prisma-client-js"{preview_feature_string} + }}"# ); generator_block } - #[track_caller] pub async fn raw_cmd(&self, query: &str) { self.database.raw_cmd(query).await.unwrap() } diff --git a/introspection-engine/introspection-engine-tests/tests/commenting_out/postgres.rs b/introspection-engine/introspection-engine-tests/tests/commenting_out/postgres.rs index 5cab9d4bbf1e..51b8ff30fd72 100644 --- a/introspection-engine/introspection-engine-tests/tests/commenting_out/postgres.rs +++ b/introspection-engine/introspection-engine-tests/tests/commenting_out/postgres.rs @@ -155,7 +155,7 @@ async fn a_table_with_only_an_unsupported_id(api: &TestApi) -> TestResult { let expected = json!([ { "code": 1, - "message": "The following models were commented out as they do not have a valid unique identifier or id. This is currently not supported by the Prisma Client.", + "message": "The following models were ignored as they do not have a valid unique identifier or id. This is currently not supported by the Prisma Client.", "affected": [{ "model": "Test" }] diff --git a/introspection-engine/introspection-engine-tests/tests/enums/cockroachdb.rs b/introspection-engine/introspection-engine-tests/tests/enums/cockroachdb.rs index cb7986cd9643..70e3d594c26e 100644 --- a/introspection-engine/introspection-engine-tests/tests/enums/cockroachdb.rs +++ b/introspection-engine/introspection-engine-tests/tests/enums/cockroachdb.rs @@ -244,16 +244,15 @@ async fn a_table_with_enum_default_values_that_look_like_booleans(api: &TestApi) r#" model News {{ id BigInt @id @default(autoincrement()) - confirmed {0} @default(true) + confirmed {enum_name} @default(true) }} - enum {0} {{ + enum {enum_name} {{ true false rumor }} "#, - enum_name, ); api.assert_eq_datamodels(&dm, &api.introspect().await?); diff --git a/introspection-engine/introspection-engine-tests/tests/enums/mod.rs b/introspection-engine/introspection-engine-tests/tests/enums/mod.rs index 6ce480d2cfdb..83e91e004a32 100644 --- a/introspection-engine/introspection-engine-tests/tests/enums/mod.rs +++ b/introspection-engine/introspection-engine-tests/tests/enums/mod.rs @@ -43,21 +43,20 @@ async fn a_table_with_enums(api: &TestApi) -> TestResult { r#" model Book {{ id Int @id @default(autoincrement()) - color {0} - color2 {1} + color {color} + color2 {color2} }} - enum {0} {{ + enum {color} {{ black white }} - enum {1} {{ + enum {color2} {{ black2 white2 }} "#, - color, color2, ); for _ in 0..4 { @@ -105,21 +104,20 @@ async fn a_table_enums_should_return_alphabetically_even_when_in_different_order r#" model Book {{ id Int @id @default(autoincrement()) - color {1} - color2 {0} + color {color} + color2 {color2} }} - enum {1} {{ + enum {color} {{ black white }} - enum {0} {{ + enum {color2} {{ black2 white2 }} "#, - color2, color, ); for _ in 0..4 { @@ -161,15 +159,14 @@ async fn a_table_with_enum_default_values(api: &TestApi) -> TestResult { r#" model Book {{ id Int @id @default(autoincrement()) - color {0} @default(black) + color {enum_name} @default(black) }} - enum {0} {{ + enum {enum_name} {{ black white }} "#, - enum_name, ); api.assert_eq_datamodels(&dm, &api.introspect().await?); diff --git a/introspection-engine/introspection-engine-tests/tests/multi_schema/cockroach.rs b/introspection-engine/introspection-engine-tests/tests/multi_schema/cockroach.rs index 7627e88e4b05..81f300cd8cae 100644 --- a/introspection-engine/introspection-engine-tests/tests/multi_schema/cockroach.rs +++ b/introspection-engine/introspection-engine-tests/tests/multi_schema/cockroach.rs @@ -296,7 +296,7 @@ async fn same_table_name_with_relation_in_two_schemas(api: &TestApi) -> TestResu CREATE TABLE "second_schema"."tbl" ( id SERIAL PRIMARY KEY, fst INT REFERENCES "first"."tbl"("id") ); "#; - api.raw_cmd(&sql).await; + api.raw_cmd(sql).await; let expected = expect![[r#" generator client { diff --git a/introspection-engine/introspection-engine-tests/tests/multi_schema/postgres.rs b/introspection-engine/introspection-engine-tests/tests/multi_schema/postgres.rs index 30d6ddb81a1d..b5923341db48 100644 --- a/introspection-engine/introspection-engine-tests/tests/multi_schema/postgres.rs +++ b/introspection-engine/introspection-engine-tests/tests/multi_schema/postgres.rs @@ -830,7 +830,7 @@ async fn same_table_name_with_relation_in_two_schemas(api: &TestApi) -> TestResu CREATE TABLE "second_schema"."tbl" ( id SERIAL PRIMARY KEY, fst INT REFERENCES "first"."tbl"("id") ); "#; - api.raw_cmd(&sql).await; + api.raw_cmd(sql).await; let expected = expect![[r#" generator client { diff --git a/introspection-engine/introspection-engine-tests/tests/multi_schema/sql_server.rs b/introspection-engine/introspection-engine-tests/tests/multi_schema/sql_server.rs index 6f31032a307e..ac300621fd62 100644 --- a/introspection-engine/introspection-engine-tests/tests/multi_schema/sql_server.rs +++ b/introspection-engine/introspection-engine-tests/tests/multi_schema/sql_server.rs @@ -53,7 +53,7 @@ async fn multiple_schemas_w_tables_are_introspected(api: &TestApi) -> TestResult let setup = format!("CREATE SCHEMA {other_name}"); api.database().raw_cmd(&setup).await?; - let setup = format!("CREATE SCHEMA third"); + let setup = "CREATE SCHEMA third".to_string(); api.database().raw_cmd(&setup).await?; let setup = formatdoc!( @@ -132,7 +132,7 @@ async fn multiple_schemas_w_tables_are_reintrospected(api: &TestApi) -> TestResu let setup = format!("CREATE SCHEMA {other_name}"); api.database().raw_cmd(&setup).await?; - let setup = format!("CREATE SCHEMA third"); + let setup = "CREATE SCHEMA third".to_string(); api.database().raw_cmd(&setup).await?; let setup = formatdoc!( diff --git a/introspection-engine/introspection-engine-tests/tests/native_types/mssql.rs b/introspection-engine/introspection-engine-tests/tests/native_types/mssql.rs index 01aabf7fa9cd..e6fce3d62fec 100644 --- a/introspection-engine/introspection-engine-tests/tests/native_types/mssql.rs +++ b/introspection-engine/introspection-engine-tests/tests/native_types/mssql.rs @@ -40,7 +40,7 @@ const TYPES: &[(&str, &str)] = &[ async fn native_type_columns_feature_on(api: &TestApi) -> TestResult { let columns: Vec = TYPES .iter() - .map(|(name, db_type)| format!("[{}] {} NOT NULL", name, db_type)) + .map(|(name, db_type)| format!("[{name}] {db_type} NOT NULL")) .collect(); api.barrel() @@ -93,8 +93,8 @@ async fn native_type_columns_feature_on(api: &TestApi) -> TestResult { let result = api.introspect().await?; - println!("EXPECTATION: \n {:#}", types); - println!("RESULT: \n {:#}", result); + println!("EXPECTATION: \n {types:#}"); + println!("RESULT: \n {result:#}"); api.assert_eq_datamodels(types, &result); diff --git a/introspection-engine/introspection-engine-tests/tests/native_types/mysql.rs b/introspection-engine/introspection-engine-tests/tests/native_types/mysql.rs index 03c12ee9ba3a..586d7b9b899d 100644 --- a/introspection-engine/introspection-engine-tests/tests/native_types/mysql.rs +++ b/introspection-engine/introspection-engine-tests/tests/native_types/mysql.rs @@ -50,7 +50,7 @@ const TYPES: &[(&str, &str)] = &[ async fn native_type_columns_feature_on(api: &TestApi) -> TestResult { let columns: Vec = TYPES .iter() - .map(|(name, db_type)| format!("`{}` {} Not Null", name, db_type)) + .map(|(name, db_type)| format!("`{name}` {db_type} Not Null")) .collect(); api.barrel() @@ -115,8 +115,8 @@ async fn native_type_columns_feature_on(api: &TestApi) -> TestResult { let result = api.introspect().await?; - println!("EXPECTATION: \n {:#}", types); - println!("RESULT: \n {:#}", result); + println!("EXPECTATION: \n {types:#}"); + println!("RESULT: \n {result:#}"); api.assert_eq_datamodels(&types, &result); diff --git a/introspection-engine/introspection-engine-tests/tests/native_types/postgres.rs b/introspection-engine/introspection-engine-tests/tests/native_types/postgres.rs index 2ec8c0ba659f..9747a4216ce9 100644 --- a/introspection-engine/introspection-engine-tests/tests/native_types/postgres.rs +++ b/introspection-engine/introspection-engine-tests/tests/native_types/postgres.rs @@ -40,7 +40,7 @@ const TYPES: &[(&str, &str)] = &[ async fn native_type_columns_feature_on(api: &TestApi) -> TestResult { let columns: Vec = TYPES .iter() - .map(|(name, db_type)| format!("\"{}\" {} Not Null", name, db_type)) + .map(|(name, db_type)| format!("\"{name}\" {db_type} Not Null")) .collect(); api.barrel() @@ -92,8 +92,8 @@ async fn native_type_columns_feature_on(api: &TestApi) -> TestResult { let result = api.introspect().await?; - println!("EXPECTATION: \n {:#}", types); - println!("RESULT: \n {:#}", result); + println!("EXPECTATION: \n {types:#}"); + println!("RESULT: \n {result:#}"); api.assert_eq_datamodels(types, &result); @@ -147,8 +147,8 @@ async fn native_type_array_columns_feature_on(api: &TestApi) -> TestResult { let result = api.introspect().await?; - println!("EXPECTATION: \n {:#}", types); - println!("RESULT: \n {:#}", result); + println!("EXPECTATION: \n {types:#}"); + println!("RESULT: \n {result:#}"); api.assert_eq_datamodels(&types, &result); diff --git a/introspection-engine/introspection-engine-tests/tests/re_introspection/postgresql.rs b/introspection-engine/introspection-engine-tests/tests/re_introspection/postgresql.rs index 3f54214ffad7..acce9c1d0bd6 100644 --- a/introspection-engine/introspection-engine-tests/tests/re_introspection/postgresql.rs +++ b/introspection-engine/introspection-engine-tests/tests/re_introspection/postgresql.rs @@ -193,3 +193,87 @@ async fn mapped_enum_value_name(api: &TestApi) -> TestResult { Ok(()) } + +#[test_connector(tags(Postgres), exclude(CockroachDb))] +async fn ignore_docs_only_added_once(api: &TestApi) -> TestResult { + let setup = indoc! {r#" + CREATE TABLE "A" ( + id INT NULL + ); + "#}; + + api.raw_cmd(setup).await; + + let input_dm = indoc! {r#" + /// The underlying table does not contain a valid unique identifier and can therefore currently not be handled by the Prisma Client. + model A { + id Int? + + @@ignore + } + "#}; + + let expectation = expect![[r#" + /// The underlying table does not contain a valid unique identifier and can therefore currently not be handled by the Prisma Client. + model A { + id Int? + + @@ignore + } + "#]]; + + api.expect_re_introspected_datamodel(input_dm, expectation).await; + + let expectation = expect!["[]"]; + api.expect_re_introspect_warnings(input_dm, expectation).await; + + Ok(()) +} + +#[test_connector(tags(Postgres), exclude(CockroachDb))] +async fn reserved_name_docs_are_only_added_once(api: &TestApi) -> TestResult { + let setup = indoc! {r#" + CREATE TABLE "if" ( + id INT PRIMARY KEY + ); + "#}; + + api.raw_cmd(setup).await; + + let input_dm = indoc! {r#" + /// This model has been renamed to Renamedif during introspection, because the original name if is reserved. + model Renamedif { + id Int @id + + @@map("if") + } + "#}; + + let expectation = expect![[r#" + /// This model has been renamed to Renamedif during introspection, because the original name if is reserved. + model Renamedif { + id Int @id + + @@map("if") + } + "#]]; + + api.expect_re_introspected_datamodel(input_dm, expectation).await; + + let expectation = expect![[r#" + [ + { + "code": 7, + "message": "These models were enriched with `@@map` information taken from the previous Prisma schema.", + "affected": [ + { + "model": "Renamedif" + } + ] + } + ]"#]]; + + api.expect_re_introspect_warnings(input_dm, expectation).await; + + Ok(()) +} diff --git a/introspection-engine/introspection-engine-tests/tests/referential_actions/mysql.rs b/introspection-engine/introspection-engine-tests/tests/referential_actions/mysql.rs index afde905f94a7..c4d2e147eb47 100644 --- a/introspection-engine/introspection-engine-tests/tests/referential_actions/mysql.rs +++ b/introspection-engine/introspection-engine-tests/tests/referential_actions/mysql.rs @@ -55,7 +55,7 @@ async fn introspect_set_default_should_warn(api: &TestApi) -> TestResult { let warning_messages = schema .diagnostics - .warnings_to_pretty_string("schema.prisma", &schema.db.source()); + .warnings_to_pretty_string("schema.prisma", schema.db.source()); let expected_validation = expect![[r#" warning: MySQL does not actually support the `SetDefault` referential action, so using it may result in unexpected errors. Read more at https://pris.ly/d/mysql-set-default  diff --git a/introspection-engine/introspection-engine-tests/tests/remapping_database_names/mod.rs b/introspection-engine/introspection-engine-tests/tests/remapping_database_names/mod.rs index 3c8e2a1bdc88..f303533ed58b 100644 --- a/introspection-engine/introspection-engine-tests/tests/remapping_database_names/mod.rs +++ b/introspection-engine/introspection-engine-tests/tests/remapping_database_names/mod.rs @@ -292,15 +292,14 @@ async fn remapping_enum_values(api: &TestApi) -> TestResult { r#" model Book {{ id Int @id @default(autoincrement()) - color {0}? + color {enum_name}? }} - enum {0} {{ + enum {enum_name} {{ b_lack @map("b lack") w_hite @map("w hite") }} - "#, - enum_name + "# ); api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -340,15 +339,14 @@ async fn remapping_enum_default_values(api: &TestApi) -> TestResult { r#" model Book {{ id Int @id @default(autoincrement()) - color {0} @default(b_lack) + color {enum_name} @default(b_lack) }} - enum {0} {{ + enum {enum_name} {{ b_lack @map("b lack") white }} - "#, - enum_name + "# ); api.assert_eq_datamodels(&dm, &api.introspect().await?); @@ -440,9 +438,9 @@ async fn not_automatically_remapping_invalid_compound_primary_key_names(api: &Te first Int last Int - @@id([first, last]{}) + @@id([first, last]{pk_name}) }} - "#, pk_name}; + "#}; api.assert_eq_datamodels(&dm, &api.introspect().await?); Ok(()) diff --git a/introspection-engine/introspection-engine-tests/tests/simple.rs b/introspection-engine/introspection-engine-tests/tests/simple.rs index 64b0b33455c7..4d220824f6da 100644 --- a/introspection-engine/introspection-engine-tests/tests/simple.rs +++ b/introspection-engine/introspection-engine-tests/tests/simple.rs @@ -14,8 +14,7 @@ fn run_simple_test(test_file_path: &str, test_function_name: &'static str) { let expected_tags_prefix = "-- tags="; assert!( first_line.starts_with(expected_tags_prefix), - "The first line of a simple test must start with \"{}\"", - expected_tags_prefix + "The first line of a simple test must start with \"{expected_tags_prefix}\"" ); let tags = first_line.trim_start_matches(expected_tags_prefix); test_setup::tags_from_comma_separated_list(tags) diff --git a/introspection-engine/introspection-engine-tests/tests/views/postgresql.rs b/introspection-engine/introspection-engine-tests/tests/views/postgresql.rs index f24f62cb7dbd..2d306fc5018e 100644 --- a/introspection-engine/introspection-engine-tests/tests/views/postgresql.rs +++ b/introspection-engine/introspection-engine-tests/tests/views/postgresql.rs @@ -85,8 +85,8 @@ async fn simple_view_from_one_table(api: &TestApi) -> TestResult { let expected = expect![[r#" [ { - "code": 23, - "message": "The following views were commented out as they do not have a valid unique identifier or id. This is currently not supported by the Prisma Client.", + "code": 24, + "message": "The following views were ignored as they do not have a valid unique identifier or id. This is currently not supported by the Prisma Client.", "affected": [ { "view": "Schwuser" @@ -707,8 +707,8 @@ async fn unsupported_types_trigger_a_warning(api: &TestApi) -> TestResult { let expected = expect![[r#" [ { - "code": 23, - "message": "The following views were commented out as they do not have a valid unique identifier or id. This is currently not supported by the Prisma Client.", + "code": 24, + "message": "The following views were ignored as they do not have a valid unique identifier or id. This is currently not supported by the Prisma Client.", "affected": [ { "view": "A" @@ -716,7 +716,7 @@ async fn unsupported_types_trigger_a_warning(api: &TestApi) -> TestResult { ] }, { - "code": 20, + "code": 21, "message": "These fields are not supported by the Prisma Client, because Prisma currently does not support their types.", "affected": [ { @@ -762,7 +762,7 @@ async fn re_intro_keeps_the_map(api: &TestApi) -> TestResult { let expected = expect![[r#" [ { - "code": 22, + "code": 23, "message": "These views were enriched with `@@map` information taken from the previous Prisma schema.", "affected": [ { @@ -802,7 +802,7 @@ async fn re_intro_keeps_the_field_map(api: &TestApi) -> TestResult { let expected = expect![[r#" [ { - "code": 21, + "code": 22, "message": "These fields were enriched with `@map` information taken from the previous Prisma schema.", "affected": [ { @@ -921,7 +921,7 @@ async fn id_names_are_reintrospected(api: &TestApi) -> TestResult { let expected = expect![[r#" [ { - "code": 24, + "code": 25, "message": "These views were enriched with custom compound id names taken from the previous Prisma schema.", "affected": [ { @@ -970,8 +970,8 @@ async fn invalid_field_names_trigger_warnings(api: &TestApi) -> TestResult { let expected = expect![[r#" [ { - "code": 23, - "message": "The following views were commented out as they do not have a valid unique identifier or id. This is currently not supported by the Prisma Client.", + "code": 24, + "message": "The following views were ignored as they do not have a valid unique identifier or id. This is currently not supported by the Prisma Client.", "affected": [ { "view": "A" @@ -979,7 +979,7 @@ async fn invalid_field_names_trigger_warnings(api: &TestApi) -> TestResult { ] }, { - "code": 25, + "code": 26, "message": "These fields were commented out because their names are currently not supported by Prisma. Please provide valid ones that match [a-zA-Z][a-zA-Z0-9_]* using the `@map` attribute.", "affected": [ { @@ -1044,8 +1044,8 @@ async fn dupes_are_renamed(api: &TestApi) -> TestResult { let expected = expect![[r#" [ { - "code": 23, - "message": "The following views were commented out as they do not have a valid unique identifier or id. This is currently not supported by the Prisma Client.", + "code": 24, + "message": "The following views were ignored as they do not have a valid unique identifier or id. This is currently not supported by the Prisma Client.", "affected": [ { "view": "public_A" @@ -1114,3 +1114,85 @@ async fn dupe_views_are_not_considered_without_preview_feature(api: &TestApi) -> Ok(()) } + +#[test_connector(tags(Postgres), exclude(CockroachDb), preview_features("views"))] +async fn ignore_docs_only_added_once(api: &TestApi) -> TestResult { + let setup = indoc! {r#" + CREATE VIEW "A" AS SELECT 1 AS id; + "#}; + + api.raw_cmd(setup).await; + + let input_dm = indoc! {r#" + /// The underlying view does not contain a valid unique identifier and can therefore currently not be handled by the Prisma Client. + view A { + id Int? + + @@ignore + } + "#}; + + let expectation = expect![[r#" + /// The underlying view does not contain a valid unique identifier and can therefore currently not be handled by the Prisma Client. + view A { + id Int? + + @@ignore + } + "#]]; + + api.expect_re_introspected_datamodel(input_dm, expectation).await; + + let expectation = expect!["[]"]; + api.expect_re_introspect_warnings(input_dm, expectation).await; + + Ok(()) +} + +#[test_connector(tags(Postgres), exclude(CockroachDb), preview_features("views"))] +async fn reserved_name_docs_are_only_added_once(api: &TestApi) -> TestResult { + let setup = indoc! {r#" + CREATE VIEW "if" AS SELECT 1 AS id; + "#}; + + api.raw_cmd(setup).await; + + let input_dm = indoc! {r#" + /// This view has been renamed to Renamedif during introspection, because the original name if is reserved. + view Renamedif { + id Int? + + @@map("if") + @@ignore + } + "#}; + + let expectation = expect![[r#" + /// This view has been renamed to Renamedif during introspection, because the original name if is reserved. + view Renamedif { + id Int? + + @@map("if") + @@ignore + } + "#]]; + + api.expect_re_introspected_datamodel(input_dm, expectation).await; + + let expectation = expect![[r#" + [ + { + "code": 23, + "message": "These views were enriched with `@@map` information taken from the previous Prisma schema.", + "affected": [ + { + "view": "Renamedif" + } + ] + } + ]"#]]; + + api.expect_re_introspect_warnings(input_dm, expectation).await; + + Ok(()) +} diff --git a/libs/mongodb-client/src/lib.rs b/libs/mongodb-client/src/lib.rs index a1f1caf62a54..584568e178f8 100644 --- a/libs/mongodb-client/src/lib.rs +++ b/libs/mongodb-client/src/lib.rs @@ -143,8 +143,7 @@ impl FromStr for MongoConnectionString { Some(part) => { if part.is_empty() { return Err(ErrorKind::invalid_argument(format!( - "invalid server address: \"{}\"; hostname cannot be empty", - address + "invalid server address: \"{address}\"; hostname cannot be empty" )) .into()); } @@ -152,7 +151,7 @@ impl FromStr for MongoConnectionString { } None => { return Err( - ErrorKind::invalid_argument(format!("invalid server address: \"{}\"", address)).into(), + ErrorKind::invalid_argument(format!("invalid server address: \"{address}\"")).into(), ); } }; @@ -161,22 +160,19 @@ impl FromStr for MongoConnectionString { Some(part) => { let port = u16::from_str(part).map_err(|_| { ErrorKind::invalid_argument(format!( - "port must be valid 16-bit unsigned integer, instead got: {}", - part + "port must be valid 16-bit unsigned integer, instead got: {part}" )) })?; if port == 0 { return Err(ErrorKind::invalid_argument(format!( - "invalid server address: \"{}\"; port must be non-zero", - address + "invalid server address: \"{address}\"; port must be non-zero" )) .into()); } if parts.next().is_some() { return Err(ErrorKind::invalid_argument(format!( - "address \"{}\" contains more than one unescaped ':'", - address + "address \"{address}\" contains more than one unescaped ':'" )) .into()); } diff --git a/libs/prisma-value/src/lib.rs b/libs/prisma-value/src/lib.rs index 6b4941cfe1f3..f0239bf451a8 100644 --- a/libs/prisma-value/src/lib.rs +++ b/libs/prisma-value/src/lib.rs @@ -307,18 +307,18 @@ impl fmt::Display for PrismaValue { PrismaValue::Xml(x) => x.fmt(f), PrismaValue::BigInt(x) => x.fmt(f), PrismaValue::List(x) => { - let as_string = format!("{:?}", x); + let as_string = format!("{x:?}"); as_string.fmt(f) } PrismaValue::Bytes(b) => encode_bytes(b).fmt(f), PrismaValue::Object(pairs) => { let joined = pairs .iter() - .map(|(key, value)| format!(r#""{}": {}"#, key, value)) + .map(|(key, value)| format!(r#""{key}": {value}"#)) .collect::>() .join(", "); - write!(f, "{{ {} }}", joined) + write!(f, "{{ {joined} }}") } } } diff --git a/libs/sql-ddl/src/mysql.rs b/libs/sql-ddl/src/mysql.rs index c1f64f31aba2..4e745045103c 100644 --- a/libs/sql-ddl/src/mysql.rs +++ b/libs/sql-ddl/src/mysql.rs @@ -58,15 +58,15 @@ pub enum AlterTableClause<'a> { impl Display for AlterTableClause<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - AlterTableClause::RenameTo { next_name } => write!(f, "RENAME TO {}", next_name), + AlterTableClause::RenameTo { next_name } => write!(f, "RENAME TO {next_name}"), AlterTableClause::RenameIndex { previous_name, next_name, - } => write!(f, "RENAME INDEX `{}` TO `{}`", previous_name, next_name), - AlterTableClause::DropColumn { column_name } => write!(f, "DROP COLUMN `{}`", column_name), - AlterTableClause::DropForeignKey { constraint_name } => write!(f, "DROP FOREIGN KEY `{}`", constraint_name), + } => write!(f, "RENAME INDEX `{previous_name}` TO `{next_name}`"), + AlterTableClause::DropColumn { column_name } => write!(f, "DROP COLUMN `{column_name}`"), + AlterTableClause::DropForeignKey { constraint_name } => write!(f, "DROP FOREIGN KEY `{constraint_name}`"), AlterTableClause::DropPrimaryKey => f.write_str("DROP PRIMARY KEY"), - AlterTableClause::AddForeignKey(fk) => write!(f, "ADD {}", fk), + AlterTableClause::AddForeignKey(fk) => write!(f, "ADD {fk}"), } } } @@ -84,7 +84,7 @@ pub struct ForeignKey<'a> { impl<'a> Display for ForeignKey<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if let Some(constraint_name) = &self.constraint_name { - write!(f, "CONSTRAINT `{constraint_name}` ", constraint_name = constraint_name,)?; + write!(f, "CONSTRAINT `{constraint_name}` ")?; } f.write_str("FOREIGN KEY (")?; @@ -209,7 +209,7 @@ impl Display for CreateIndex<'_> { let mut rendered = Ident(&s.name).to_string(); if let Some(length) = s.length { - write!(rendered, "({})", length).unwrap(); + write!(rendered, "({length})").unwrap(); } if let Some(sort_order) = s.sort_order { @@ -261,7 +261,7 @@ impl Display for CreateTable<'_> { let mut rendered = Ident(&col.name).to_string(); if let Some(length) = col.length { - write!(rendered, "({})", length).unwrap(); + write!(rendered, "({length})").unwrap(); } if let Some(sort_order) = col.sort_order { @@ -356,7 +356,7 @@ impl Display for IndexClause<'_> { let mut rendered = format!("{}", Ident(col.name.as_ref())); if let Some(length) = col.length { - write!(rendered, "({})", length).unwrap(); + write!(rendered, "({length})").unwrap(); }; if let Some(sort_order) = col.sort_order { diff --git a/libs/sql-ddl/src/postgres.rs b/libs/sql-ddl/src/postgres.rs index f8747ec3e499..4e8190d5e0c0 100644 --- a/libs/sql-ddl/src/postgres.rs +++ b/libs/sql-ddl/src/postgres.rs @@ -195,11 +195,7 @@ pub struct ForeignKey<'a> { impl Display for ForeignKey<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if let Some(constraint_name) = &self.constraint_name { - write!( - f, - "CONSTRAINT \"{constraint_name}\" ", - constraint_name = constraint_name, - )?; + write!(f, "CONSTRAINT \"{constraint_name}\" ",)?; } f.write_str("FOREIGN KEY (")?; diff --git a/libs/sql-ddl/src/sqlite.rs b/libs/sql-ddl/src/sqlite.rs index 9d081f2b2dbe..69d0ea00d7d9 100644 --- a/libs/sql-ddl/src/sqlite.rs +++ b/libs/sql-ddl/src/sqlite.rs @@ -31,12 +31,7 @@ impl Display for CreateTable<'_> { } for foreign_key in &self.foreign_keys { - write!( - f, - ",\n{indentation}{fk}", - indentation = SQL_INDENTATION, - fk = foreign_key - )?; + write!(f, ",\n{SQL_INDENTATION}{foreign_key}")?; } write!(f, "\n)") @@ -90,7 +85,7 @@ impl Display for ForeignKeyAction { impl Display for ForeignKey<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { if let Some(constraint_name) = &self.constraint_name { - write!(f, "CONSTRAINT \"{}\" ", constraint_name)?; + write!(f, "CONSTRAINT \"{constraint_name}\" ")?; } f.write_str("FOREIGN KEY (")?; diff --git a/libs/sql-schema-describer/src/lib.rs b/libs/sql-schema-describer/src/lib.rs index 49b582f7a39b..fbe4301ca03d 100644 --- a/libs/sql-schema-describer/src/lib.rs +++ b/libs/sql-schema-describer/src/lib.rs @@ -790,7 +790,7 @@ impl DefaultValue { } #[cfg(test)] - pub(crate) fn as_sequence<'a>(&'a self) -> Option<&'a str> { + pub(crate) fn as_sequence(&self) -> Option<&str> { match self.kind { DefaultKind::Sequence(ref name) => Some(name), _ => None, diff --git a/libs/sql-schema-describer/src/mssql.rs b/libs/sql-schema-describer/src/mssql.rs index 092a37f65643..584436e618c7 100644 --- a/libs/sql-schema-describer/src/mssql.rs +++ b/libs/sql-schema-describer/src/mssql.rs @@ -337,7 +337,7 @@ impl<'a> SqlSchemaDescriber<'a> { .or_else(|| DEFAULT_STRING.captures_iter(&default_string).next()) .or_else(|| DEFAULT_DB_GEN.captures_iter(&default_string).next()) .map(|cap| cap[1].to_string()) - .ok_or_else(|| format!("Couldn't parse default value: `{}`", default_string)) + .ok_or_else(|| format!("Couldn't parse default value: `{default_string}`")) .unwrap(); let mut default = match tpe.family { @@ -612,9 +612,9 @@ impl<'a> SqlSchemaDescriber<'a> { let definition = row .get_string("system_type_name") .map(|name| match (max_length, precision, scale) { - (Some(len), _, _) if len == -1 => format!("{}(max)", name), - (Some(len), _, _) => format!("{}({})", name, len), - (_, Some(p), Some(s)) => format!("{}({},{})", name, p, s), + (Some(len), _, _) if len == -1 => format!("{name}(max)"), + (Some(len), _, _) => format!("{name}({len})"), + (_, Some(p), Some(s)) => format!("{name}({p},{s})"), _ => name, }); @@ -729,7 +729,7 @@ impl<'a> SqlSchemaDescriber<'a> { 1 => ForeignKeyAction::Cascade, 2 => ForeignKeyAction::SetNull, 3 => ForeignKeyAction::SetDefault, - s => panic!("Unrecognized on delete action '{}'", s), + s => panic!("Unrecognized on delete action '{s}'"), }; let on_update_action = match row.get_expect_i64("update_referential_action") { @@ -737,7 +737,7 @@ impl<'a> SqlSchemaDescriber<'a> { 1 => ForeignKeyAction::Cascade, 2 => ForeignKeyAction::SetNull, 3 => ForeignKeyAction::SetDefault, - s => panic!("Unrecognized on delete action '{}'", s), + s => panic!("Unrecognized on delete action '{s}'"), }; match ¤t_fk { @@ -774,28 +774,28 @@ impl<'a> SqlSchemaDescriber<'a> { // TODO: can we achieve this more elegantly? let params = match data_type { "numeric" | "decimal" => match (numeric_precision, numeric_scale) { - (Some(p), Some(s)) => Cow::from(format!("({},{})", p, s)), + (Some(p), Some(s)) => Cow::from(format!("({p},{s})")), (None, None) => Cow::from(""), _ => unreachable!("Unexpected params for a decimal field."), }, "float" => match numeric_precision { - Some(p) => Cow::from(format!("({})", p)), + Some(p) => Cow::from(format!("({p})")), None => Cow::from(""), }, "varchar" | "nvarchar" | "varbinary" => match character_maximum_length { Some(-1) => Cow::from("(max)"), - Some(length) => Cow::from(format!("({})", length)), + Some(length) => Cow::from(format!("({length})")), None => Cow::from(""), }, "char" | "nchar" | "binary" => match character_maximum_length { Some(-1) => unreachable!("Cannot have a `max` variant for type `{}`", data_type), - Some(length) => Cow::from(format!("({})", length)), + Some(length) => Cow::from(format!("({length})")), None => Cow::from(""), }, _ => Cow::from(""), }; - let full_data_type = format!("{}{}", data_type, params); + let full_data_type = format!("{data_type}{params}"); let casted_character_maximum_length = character_maximum_length.map(|x| x as u32); let type_parameter = parse_type_parameter(character_maximum_length); diff --git a/libs/sql-schema-describer/src/mysql.rs b/libs/sql-schema-describer/src/mysql.rs index 2e91e9d681a3..d225ba55bf71 100644 --- a/libs/sql-schema-describer/src/mysql.rs +++ b/libs/sql-schema-describer/src/mysql.rs @@ -131,7 +131,7 @@ async fn push_indexes( let sort_order = row.get_string("column_order").map(|v| match v.as_ref() { "A" => SQLSortOrder::Asc, "D" => SQLSortOrder::Desc, - misc => panic!("Unexpected sort order `{}`, collation should be A, D or Null", misc), + misc => panic!("Unexpected sort order `{misc}`, collation should be A, D or Null"), }); let column_name = if let Some(name) = row.get_string("column_name") { name @@ -205,7 +205,7 @@ impl<'a> SqlSchemaDescriber<'a> { .map(|row| row.get_expect_string("schema_name")) .collect(); - trace!("Found schema names: {:?}", names); + trace!("Found schema names: {names:?}"); Ok(names) } @@ -285,7 +285,7 @@ impl<'a> SqlSchemaDescriber<'a> { map.insert(cloned_name, id); } - trace!("Found table names: {:?}", map); + trace!("Found table names: {map:?}"); Ok(map) } @@ -309,7 +309,7 @@ impl<'a> SqlSchemaDescriber<'a> { }) .unwrap_or(0); - trace!("Found db size: {:?}", size); + trace!("Found db size: {size:?}"); Ok(size) } @@ -346,7 +346,7 @@ impl<'a> SqlSchemaDescriber<'a> { let rows = conn.query_raw(sql, &[schema_name.into()]).await?; for col in rows { - trace!("Got column: {:?}", col); + trace!("Got column: {col:?}"); let table_name = col.get_expect_string("table_name"); let table_id = if let Some(id) = table_ids.get(table_name.as_str()) { *id @@ -361,7 +361,7 @@ impl<'a> SqlSchemaDescriber<'a> { let is_required = match is_nullable.as_ref() { "no" => true, "yes" => false, - x => panic!("unrecognized is_nullable variant '{}'", x), + x => panic!("unrecognized is_nullable variant '{x}'"), }; let arity = if is_required { @@ -589,7 +589,7 @@ impl<'a> SqlSchemaDescriber<'a> { "mediumtext" => (ColumnTypeFamily::String, Some(MySqlType::MediumText)), "longtext" => (ColumnTypeFamily::String, Some(MySqlType::LongText)), "enum" => { - let enum_name = format!("{}_{}", table, column_name); + let enum_name = format!("{table}_{column_name}"); let enum_id = sql_schema.push_enum(Default::default(), enum_name); push_enum_variants(full_data_type, enum_id, sql_schema); (ColumnTypeFamily::Enum(enum_id), None) @@ -747,7 +747,7 @@ async fn push_foreign_keys( let mut current_fk: Option<(TableId, String, ForeignKeyId)> = None; for row in result_set.into_iter() { - trace!("Got description FK row {:#?}", row); + trace!("Got description FK row {row:#?}"); let (table_id, column_id, referenced_table_id, referenced_column_id) = if let Some(ids) = get_ids(&row, table_ids, sql_schema) { ids @@ -761,7 +761,7 @@ async fn push_foreign_keys( "set default" => ForeignKeyAction::SetDefault, "restrict" => ForeignKeyAction::Restrict, "no action" => ForeignKeyAction::NoAction, - s => panic!("Unrecognized on delete action '{}'", s), + s => panic!("Unrecognized on delete action '{s}'"), }; let on_update_action = match row.get_expect_string("update_rule").to_lowercase().as_str() { "cascade" => ForeignKeyAction::Cascade, @@ -769,7 +769,7 @@ async fn push_foreign_keys( "set default" => ForeignKeyAction::SetDefault, "restrict" => ForeignKeyAction::Restrict, "no action" => ForeignKeyAction::NoAction, - s => panic!("Unrecognized on update action '{}'", s), + s => panic!("Unrecognized on update action '{s}'"), }; match ¤t_fk { diff --git a/libs/sql-schema-describer/src/postgres.rs b/libs/sql-schema-describer/src/postgres.rs index 6534ca8c7aba..ed7c2861f8a5 100644 --- a/libs/sql-schema-describer/src/postgres.rs +++ b/libs/sql-schema-describer/src/postgres.rs @@ -730,10 +730,9 @@ impl<'a> SqlSchemaDescriber<'a> { AND relname = info.table_name AND namespace = info.table_schema LEFT OUTER JOIN pg_attrdef attdef ON attdef.adrelid = att.attrelid AND attdef.adnum = att.attnum AND table_schema = namespace - WHERE table_schema = ANY ( $1 ) {} + WHERE table_schema = ANY ( $1 ) {is_visible_clause} ORDER BY namespace, table_name, ordinal_position; - "#, - is_visible_clause, + "# ); let rows = self @@ -762,7 +761,7 @@ impl<'a> SqlSchemaDescriber<'a> { let is_identity = match col.get_string("is_identity") { Some(is_id) if is_id.eq_ignore_ascii_case("yes") => true, Some(is_id) if is_id.eq_ignore_ascii_case("no") => false, - Some(is_identity_str) => panic!("unrecognized is_identity variant '{}'", is_identity_str), + Some(is_identity_str) => panic!("unrecognized is_identity variant '{is_identity_str}'"), None => false, }; @@ -994,8 +993,8 @@ impl<'a> SqlSchemaDescriber<'a> { let referenced_schema_name = row.get_expect_string("referenced_schema_name"); if !sql_schema.namespaces.contains(&referenced_schema_name) { return Err(DescriberError::from(DescriberErrorKind::CrossSchemaReference { - from: format!("{}.{}", sql_schema.namespaces[0], table_name), - to: format!("{}.{}", referenced_schema_name, referenced_table), + from: format!("{}.{table_name}", sql_schema.namespaces[0]), + to: format!("{referenced_schema_name}.{referenced_table}"), constraint: constraint_name, missing_namespace: referenced_schema_name, })); @@ -1029,7 +1028,7 @@ impl<'a> SqlSchemaDescriber<'a> { 'c' => ForeignKeyAction::Cascade, 'n' => ForeignKeyAction::SetNull, 'd' => ForeignKeyAction::SetDefault, - _ => panic!("unrecognized foreign key action (on delete) '{}'", confdeltype), + _ => panic!("unrecognized foreign key action (on delete) '{confdeltype}'"), }; let on_update_action = match confupdtype { 'a' => ForeignKeyAction::NoAction, @@ -1037,7 +1036,7 @@ impl<'a> SqlSchemaDescriber<'a> { 'c' => ForeignKeyAction::Cascade, 'n' => ForeignKeyAction::SetNull, 'd' => ForeignKeyAction::SetDefault, - _ => panic!("unrecognized foreign key action (on update) '{}'", confupdtype), + _ => panic!("unrecognized foreign key action (on update) '{confupdtype}'"), }; match current_fk { @@ -1101,10 +1100,7 @@ impl<'a> SqlSchemaDescriber<'a> { let sort_order = row.get_string("column_order").map(|v| match v.as_ref() { "ASC" => SQLSortOrder::Asc, "DESC" => SQLSortOrder::Desc, - misc => panic!( - "Unexpected sort order `{}`, collation should be ASC, DESC or Null", - misc - ), + misc => panic!("Unexpected sort order `{misc}`, collation should be ASC, DESC or Null"), }); let algorithm = if self.is_cockroach() { @@ -1277,7 +1273,7 @@ fn get_column_type_postgresql(row: &ResultRow, schema: &SqlSchema) -> ColumnType let is_required = match row.get_expect_string("is_nullable").to_lowercase().as_ref() { "no" => true, "yes" => false, - x => panic!("unrecognized is_nullable variant '{}'", x), + x => panic!("unrecognized is_nullable variant '{x}'"), }; let arity = match matches!(data_type.as_str(), "ARRAY") { @@ -1371,7 +1367,7 @@ fn get_column_type_cockroachdb(row: &ResultRow, schema: &SqlSchema) -> ColumnTyp let is_required = match row.get_expect_string("is_nullable").to_lowercase().as_ref() { "no" => true, "yes" => false, - x => panic!("unrecognized is_nullable variant '{}'", x), + x => panic!("unrecognized is_nullable variant '{x}'"), }; let arity = match matches!(data_type.as_str(), "ARRAY") { diff --git a/libs/sql-schema-describer/src/sqlite.rs b/libs/sql-schema-describer/src/sqlite.rs index 72d991dd45e5..047030169135 100644 --- a/libs/sql-schema-describer/src/sqlite.rs +++ b/libs/sql-schema-describer/src/sqlite.rs @@ -198,7 +198,7 @@ impl<'a> SqlSchemaDescriber<'a> { table_ids: &IndexMap, schema: &mut SqlSchema, ) -> DescriberResult<()> { - let sql = format!(r#"PRAGMA foreign_key_list("{}");"#, table_name); + let sql = format!(r#"PRAGMA foreign_key_list("{table_name}");"#); let result_set = self.conn.query_raw(&sql, &[]).await?; let mut current_foreign_key: Option<(i64, ForeignKeyId)> = None; let mut current_foreign_key_columns: Vec<(i64, TableColumnId, Option)> = Vec::new(); @@ -226,7 +226,7 @@ impl<'a> SqlSchemaDescriber<'a> { "set null" => ForeignKeyAction::SetNull, "set default" => ForeignKeyAction::SetDefault, "cascade" => ForeignKeyAction::Cascade, - s => panic!("Unrecognized on delete action '{}'", s), + s => panic!("Unrecognized on delete action '{s}'"), }; let on_update_action = match row.get_expect_string("on_update").to_lowercase().as_str() { "no action" => ForeignKeyAction::NoAction, @@ -234,7 +234,7 @@ impl<'a> SqlSchemaDescriber<'a> { "set null" => ForeignKeyAction::SetNull, "set default" => ForeignKeyAction::SetDefault, "cascade" => ForeignKeyAction::Cascade, - s => panic!("Unrecognized on update action '{}'", s), + s => panic!("Unrecognized on update action '{s}'"), }; [on_delete_action, on_update_action] } @@ -322,11 +322,11 @@ async fn push_columns( schema: &mut SqlSchema, conn: &(dyn Connection + Send + Sync), ) -> DescriberResult<()> { - let sql = format!(r#"PRAGMA table_info ("{}")"#, table_name); + let sql = format!(r#"PRAGMA table_info ("{table_name}")"#); let result_set = conn.query_raw(&sql, &[]).await?; let mut pk_cols: BTreeMap = BTreeMap::new(); for row in result_set { - trace!("Got column row {:?}", row); + trace!("Got column row {row:?}"); let is_required = row.get("notnull").and_then(|x| x.as_bool()).expect("notnull"); let arity = if is_required { @@ -446,7 +446,7 @@ async fn push_indexes( schema: &mut SqlSchema, conn: &(dyn Connection + Send + Sync), ) -> DescriberResult<()> { - let sql = format!(r#"PRAGMA index_list("{}");"#, table); + let sql = format!(r#"PRAGMA index_list("{table}");"#); let result_set = conn.query_raw(&sql, &[]).await?; let mut indexes = Vec::new(); // (index_name, is_unique, columns) @@ -464,9 +464,9 @@ async fn push_indexes( let index_name = row.get_expect_string("name"); let mut columns = Vec::new(); - let sql = format!(r#"PRAGMA index_info("{}");"#, index_name); + let sql = format!(r#"PRAGMA index_info("{index_name}");"#); let result_set = conn.query_raw(&sql, &[]).await?; - trace!("Got index description results: {:?}", result_set); + trace!("Got index description results: {result_set:?}"); for row in result_set.into_iter() { // if the index is on a rowid or expression, the name of the column will be null, @@ -482,9 +482,9 @@ async fn push_indexes( } } - let sql = format!(r#"PRAGMA index_xinfo("{}");"#, index_name); + let sql = format!(r#"PRAGMA index_xinfo("{index_name}");"#); let result_set = conn.query_raw(&sql, &[]).await?; - trace!("Got index description results: {:?}", result_set); + trace!("Got index description results: {result_set:?}"); for row in result_set.into_iter() { //if the index is on a rowid or expression, the name of the column will be null, we ignore these for now diff --git a/libs/sql-schema-describer/tests/describers/postgres_describer_tests.rs b/libs/sql-schema-describer/tests/describers/postgres_describer_tests.rs index 19e1a1cedeed..b1840cb699dd 100644 --- a/libs/sql-schema-describer/tests/describers/postgres_describer_tests.rs +++ b/libs/sql-schema-describer/tests/describers/postgres_describer_tests.rs @@ -1046,15 +1046,14 @@ fn cross_schema_references_are_not_allowed(api: TestApi) { let schema2 = format!("{}_2", api.schema_name()); let sql = format!( - "DROP SCHEMA IF EXISTS \"{0}\" CASCADE; - CREATE SCHEMA \"{0}\"; - CREATE TABLE \"{0}\".\"City\" (id INT PRIMARY KEY); + "DROP SCHEMA IF EXISTS \"{schema2}\" CASCADE; + CREATE SCHEMA \"{schema2}\"; + CREATE TABLE \"{schema2}\".\"City\" (id INT PRIMARY KEY); CREATE TABLE \"User\" ( id INT PRIMARY KEY, - city INT REFERENCES \"{0}\".\"City\" (id) ON DELETE NO ACTION + city INT REFERENCES \"{schema2}\".\"City\" (id) ON DELETE NO ACTION ); ", - schema2, ); api.raw_cmd(&sql); diff --git a/libs/sql-schema-describer/tests/test_api/mod.rs b/libs/sql-schema-describer/tests/test_api/mod.rs index 7b4fdc860faf..d02814d7acb8 100644 --- a/libs/sql-schema-describer/tests/test_api/mod.rs +++ b/libs/sql-schema-describer/tests/test_api/mod.rs @@ -187,7 +187,7 @@ impl TableAssertion<'_> { column: self .table .column(column_name) - .ok_or_else(|| format!("Could not find the {} column", column_name)) + .ok_or_else(|| format!("Could not find the {column_name} column")) .unwrap(), }; @@ -282,7 +282,7 @@ impl ColumnAssertion<'_> { pub fn assert_type_is_int_or_bigint(&self) -> &Self { let fam = self.column.column_type_family(); - assert!(fam.is_int() || fam.is_bigint(), "Expected int or bigint, got {:?}", fam); + assert!(fam.is_int() || fam.is_bigint(), "Expected int or bigint, got {fam:?}"); self } diff --git a/libs/test-cli/build.rs b/libs/test-cli/build.rs index 78982f9fb99a..9bd10ecb9c58 100644 --- a/libs/test-cli/build.rs +++ b/libs/test-cli/build.rs @@ -3,5 +3,5 @@ use std::process::Command; fn main() { let output = Command::new("git").args(["rev-parse", "HEAD"]).output().unwrap(); let git_hash = String::from_utf8(output.stdout).unwrap(); - println!("cargo:rustc-env=GIT_HASH={}", git_hash); + println!("cargo:rustc-env=GIT_HASH={git_hash}"); } diff --git a/libs/test-cli/src/diagnose_migration_history.rs b/libs/test-cli/src/diagnose_migration_history.rs index fbf7781cabe8..9093d62b572d 100644 --- a/libs/test-cli/src/diagnose_migration_history.rs +++ b/libs/test-cli/src/diagnose_migration_history.rs @@ -14,7 +14,7 @@ impl DiagnoseMigrationHistory { let output = engine.diagnose_migration_history(input).await?; - eprintln!("{:#?}", output); + eprintln!("{output:#?}"); Ok(()) } diff --git a/libs/test-cli/src/main.rs b/libs/test-cli/src/main.rs index 0d748809ea35..c266d61f3893 100644 --- a/libs/test-cli/src/main.rs +++ b/libs/test-cli/src/main.rs @@ -93,7 +93,7 @@ impl FromStr for DiffOutputType { "ddl" => Ok(Self::Ddl), _ => { let kind = std::io::ErrorKind::InvalidInput; - Err(std::io::Error::new(kind, format!("Invalid output type: `{}`", s))) + Err(std::io::Error::new(kind, format!("Invalid output type: `{s}`"))) } } } @@ -302,11 +302,10 @@ fn minimal_schema_from_url(url: &str) -> anyhow::Result { let schema = format!( r#" datasource db {{ - provider = "{}" - url = "{}" + provider = "{provider}" + url = "{url}" }} - "#, - provider, url + "# ); Ok(schema) @@ -413,7 +412,7 @@ struct DiffHost; impl migration_connector::ConnectorHost for DiffHost { fn print(&self, s: &str) -> BoxFuture<'_, migration_core::CoreResult<()>> { - print!("{}", s); + print!("{s}"); Box::pin(std::future::ready(Ok(()))) } } @@ -463,6 +462,6 @@ fn init_logger() { .with(migration_core::TimingsLayer::default()); tracing::subscriber::set_global_default(subscriber) - .map_err(|err| eprintln!("Error initializing the global logger: {}", err)) + .map_err(|err| eprintln!("Error initializing the global logger: {err}")) .ok(); } diff --git a/libs/test-macros/src/lib.rs b/libs/test-macros/src/lib.rs index 2c59f28c8704..379f0a9cff6c 100644 --- a/libs/test-macros/src/lib.rs +++ b/libs/test-macros/src/lib.rs @@ -208,7 +208,7 @@ fn extract_api_arg(sig: &Signature) -> Result<(&syn::Ident, &syn::Ident), syn::E } (_, n) => Err(syn::Error::new_spanned( &sig.inputs, - format!("Test functions should take one argument, not {}", n), + format!("Test functions should take one argument, not {n}"), )), } } diff --git a/libs/test-setup/src/diff.rs b/libs/test-setup/src/diff.rs index df86d3288d2c..6433c26276a6 100644 --- a/libs/test-setup/src/diff.rs +++ b/libs/test-setup/src/diff.rs @@ -21,8 +21,8 @@ fn format_chunks(chunks: Vec>) -> String { for chunk in chunks { let formatted = match chunk { dissimilar::Chunk::Equal(text) => text.into(), - dissimilar::Chunk::Delete(text) => format!("\x1b[41m{}\x1b[0m", text), - dissimilar::Chunk::Insert(text) => format!("\x1b[42m{}\x1b[0m", text), + dissimilar::Chunk::Delete(text) => format!("\x1b[41m{text}\x1b[0m"), + dissimilar::Chunk::Insert(text) => format!("\x1b[42m{text}\x1b[0m"), }; buf.push_str(&formatted); } diff --git a/libs/test-setup/src/logging.rs b/libs/test-setup/src/logging.rs index 9ffbf4071b0a..2eab8f9f2df5 100644 --- a/libs/test-setup/src/logging.rs +++ b/libs/test-setup/src/logging.rs @@ -12,7 +12,7 @@ use tracing_subscriber::{ pub(crate) fn init_logger() { tracing::subscriber::set_global_default(test_tracing_subscriber()) .map_err(|err| { - eprintln!("Error initializing the global logger: {}", err); + eprintln!("Error initializing the global logger: {err}"); std::process::exit(1); }) .ok(); diff --git a/libs/test-setup/src/mssql.rs b/libs/test-setup/src/mssql.rs index d746c77d1b2a..ddd3b5aee3f3 100644 --- a/libs/test-setup/src/mssql.rs +++ b/libs/test-setup/src/mssql.rs @@ -30,7 +30,7 @@ pub async fn init_mssql_database(original_url: &str, db_name: &str) -> Result<(Q let conn = Quaint::new(original_url).await?; reset_schema(&conn, db_name).await?; - let mut url: JdbcString = format!("jdbc:{}", original_url).parse().unwrap(); + let mut url: JdbcString = format!("jdbc:{original_url}").parse().unwrap(); url.properties_mut().insert("database".into(), db_name.into()); let url = url.to_string().trim_start_matches("jdbc:").to_owned(); diff --git a/libs/test-setup/src/mysql.rs b/libs/test-setup/src/mysql.rs index de52f40a034c..1b9a58f071a9 100644 --- a/libs/test-setup/src/mysql.rs +++ b/libs/test-setup/src/mysql.rs @@ -41,7 +41,7 @@ pub(crate) fn get_mysql_tags(database_url: &str) -> Result, Strin match first_row.get("version").and_then(|version| version.to_string()) { None => Ok(tags), Some(version) => { - eprintln!("Version: {:?}", version); + eprintln!("Version: {version:?}"); // order matters... @@ -65,7 +65,7 @@ pub(crate) fn get_mysql_tags(database_url: &str) -> Result, Strin } } - eprintln!("Inferred tags: {:?}", tags); + eprintln!("Inferred tags: {tags:?}"); Ok(tags) } @@ -99,14 +99,12 @@ pub async fn create_mysql_database<'a>(database_url: &str, db_name: &'a str) -> r#" DROP DATABASE IF EXISTS `{db_name}`; "#, - db_name = db_name, ); let recreate = format!( r#" CREATE DATABASE `{db_name}`; "#, - db_name = db_name, ); // The two commands have to be run separately on mariadb. diff --git a/libs/test-setup/src/postgres.rs b/libs/test-setup/src/postgres.rs index ff86d11c59b4..d33d17b49df9 100644 --- a/libs/test-setup/src/postgres.rs +++ b/libs/test-setup/src/postgres.rs @@ -12,7 +12,7 @@ pub(crate) fn get_postgres_tags(database_url: &str) -> Result, St match version { None => Ok(tags), Some(version) => { - eprintln!("version: {:?}", version); + eprintln!("version: {version:?}"); if version.contains("9.") { tags |= Tags::Postgres9; @@ -34,7 +34,7 @@ pub(crate) fn get_postgres_tags(database_url: &str) -> Result, St tags |= Tags::CockroachDb; } - eprintln!("Inferred tags: {:?}", tags); + eprintln!("Inferred tags: {tags:?}"); Ok(tags) } @@ -55,14 +55,12 @@ pub async fn create_postgres_database(database_url: &str, db_name: &str) -> Resu r#" DROP DATABASE IF EXISTS "{db_name}"; "#, - db_name = db_name, ); let recreate = format!( r#" CREATE DATABASE "{db_name}"; "#, - db_name = db_name, ); let conn = Quaint::new(postgres_db_url.as_str()).await?; diff --git a/libs/test-setup/src/test_api_args.rs b/libs/test-setup/src/test_api_args.rs index 83d7675b3bce..f104d2710f25 100644 --- a/libs/test-setup/src/test_api_args.rs +++ b/libs/test-setup/src/test_api_args.rs @@ -206,7 +206,7 @@ impl<'a> DatasourceBlock<'a> { } } fn generator_block(preview_features: &'static [&'static str]) -> String { - let preview_features: Vec = preview_features.iter().map(|pf| format!(r#""{}""#, pf)).collect(); + let preview_features: Vec = preview_features.iter().map(|pf| format!(r#""{pf}""#)).collect(); let preview_feature_string = if preview_features.is_empty() { "".to_string() @@ -216,9 +216,8 @@ fn generator_block(preview_features: &'static [&'static str]) -> String { format!( r#"generator generated_test_preview_flags {{ - provider = "prisma-client-js"{} - }}"#, - preview_feature_string + provider = "prisma-client-js"{preview_feature_string} + }}"# ) } diff --git a/libs/user-facing-errors/src/common.rs b/libs/user-facing-errors/src/common.rs index 4a3db5d24df1..8976889e6114 100644 --- a/libs/user-facing-errors/src/common.rs +++ b/libs/user-facing-errors/src/common.rs @@ -94,9 +94,7 @@ impl UserFacingError for DatabaseDoesNotExist { database_file_name, database_file_path, } => format!( - "Database {database_file_name} does not exist at {database_file_path}", - database_file_name = database_file_name, - database_file_path = database_file_path + "Database {database_file_name} does not exist at {database_file_path}" ), DatabaseDoesNotExist::Postgres { database_name, @@ -104,9 +102,6 @@ impl UserFacingError for DatabaseDoesNotExist { database_port, } => format!( "Database `{database_name}` does not exist on the database server at `{database_host}:{database_port}`.", - database_name = database_name, - database_host = database_host, - database_port = database_port, ), DatabaseDoesNotExist::Mysql { database_name, @@ -114,9 +109,6 @@ impl UserFacingError for DatabaseDoesNotExist { database_port, } => format!( "Database `{database_name}` does not exist on the database server at `{database_host}:{database_port}`.", - database_name = database_name, - database_host = database_host, - database_port = database_port, ), DatabaseDoesNotExist::Mssql { database_name, @@ -124,9 +116,6 @@ impl UserFacingError for DatabaseDoesNotExist { database_port, } => format!( "Database `{database_name}` does not exist on the database server at `{database_host}:{database_port}`.", - database_name = database_name, - database_host = database_host, - database_port = database_port, ), } } diff --git a/libs/user-facing-errors/src/lib.rs b/libs/user-facing-errors/src/lib.rs index 41cdb5f56722..5cc1d183466d 100644 --- a/libs/user-facing-errors/src/lib.rs +++ b/libs/user-facing-errors/src/lib.rs @@ -135,12 +135,12 @@ impl Error { let backtrace = Some(format!("{:?}", backtrace::Backtrace::new())); let location = panic_info .location() - .map(|loc| format!("{}", loc)) + .map(|loc| format!("{loc}")) .unwrap_or_else(|| "".to_owned()); Error { inner: ErrorType::Unknown(UnknownError { - message: format!("[{}] {}", location, message), + message: format!("[{location}] {message}"), backtrace, }), is_panic: true, @@ -181,7 +181,7 @@ impl Error { pub fn unwrap_known(self) -> KnownError { match self.inner { ErrorType::Known(err) => err, - err @ ErrorType::Unknown(_) => panic!("Expected known error, got {:?}", err), + err @ ErrorType::Unknown(_) => panic!("Expected known error, got {err:?}"), } } diff --git a/libs/user-facing-errors/src/migration_engine.rs b/libs/user-facing-errors/src/migration_engine.rs index de4381b430bd..e446b15cc131 100644 --- a/libs/user-facing-errors/src/migration_engine.rs +++ b/libs/user-facing-errors/src/migration_engine.rs @@ -88,7 +88,7 @@ impl crate::UserFacingError for PreviewFeaturesBlocked { const ERROR_CODE: &'static str = "P3007"; fn message(&self) -> String { - let blocked: Vec<_> = self.features.iter().map(|s| format!("`{}`", s)).collect(); + let blocked: Vec<_> = self.features.iter().map(|s| format!("`{s}`")).collect(); format!( "Some of the requested preview features are not yet allowed in migration engine. Please remove them from your data model before using migrations. (blocked: {list_of_blocked_features})", diff --git a/libs/user-facing-errors/src/quaint.rs b/libs/user-facing-errors/src/quaint.rs index 040735c2692b..aab6598d81bc 100644 --- a/libs/user-facing-errors/src/quaint.rs +++ b/libs/user-facing-errors/src/quaint.rs @@ -81,7 +81,7 @@ pub fn render_quaint_error(kind: &ErrorKind, connection_info: &ConnectionInfo) - (ErrorKind::DatabaseAlreadyExists { db_name }, ConnectionInfo::Postgres(url)) => { Some(KnownError::new(common::DatabaseAlreadyExists { - database_name: format!("{}", db_name), + database_name: format!("{db_name}"), database_host: url.host().to_owned(), database_port: url.port(), })) @@ -89,7 +89,7 @@ pub fn render_quaint_error(kind: &ErrorKind, connection_info: &ConnectionInfo) - (ErrorKind::DatabaseAlreadyExists { db_name }, ConnectionInfo::Mysql(url)) => { Some(KnownError::new(common::DatabaseAlreadyExists { - database_name: format!("{}", db_name), + database_name: format!("{db_name}"), database_host: url.host().to_owned(), database_port: url.port(), })) @@ -97,14 +97,14 @@ pub fn render_quaint_error(kind: &ErrorKind, connection_info: &ConnectionInfo) - (ErrorKind::AuthenticationFailed { user }, ConnectionInfo::Postgres(url)) => { Some(KnownError::new(common::IncorrectDatabaseCredentials { - database_user: format!("{}", user), + database_user: format!("{user}"), database_host: url.host().to_owned(), })) } (ErrorKind::AuthenticationFailed { user }, ConnectionInfo::Mysql(url)) => { Some(KnownError::new(common::IncorrectDatabaseCredentials { - database_user: format!("{}", user), + database_user: format!("{user}"), database_host: url.host().to_owned(), })) } @@ -206,7 +206,7 @@ pub fn render_quaint_error(kind: &ErrorKind, connection_info: &ConnectionInfo) - (ErrorKind::LengthMismatch { column }, _connection_info) => { Some(KnownError::new(query_engine::InputValueTooLong { - column_name: format!("{}", column), + column_name: format!("{column}"), })) } @@ -218,28 +218,28 @@ pub fn render_quaint_error(kind: &ErrorKind, connection_info: &ConnectionInfo) - (ErrorKind::TableDoesNotExist { table: model }, ConnectionInfo::Mysql(_)) => { Some(KnownError::new(common::InvalidModel { - model: format!("{}", model), + model: format!("{model}"), kind: ModelKind::Table, })) } (ErrorKind::TableDoesNotExist { table: model }, ConnectionInfo::Postgres(_)) => { Some(KnownError::new(common::InvalidModel { - model: format!("{}", model), + model: format!("{model}"), kind: ModelKind::Table, })) } (ErrorKind::TableDoesNotExist { table: model }, ConnectionInfo::Sqlite { .. }) => { Some(KnownError::new(common::InvalidModel { - model: format!("{}", model), + model: format!("{model}"), kind: ModelKind::Table, })) } (ErrorKind::TableDoesNotExist { table: model }, ConnectionInfo::Mssql(_)) => { Some(KnownError::new(common::InvalidModel { - model: format!("{}", model), + model: format!("{model}"), kind: ModelKind::Table, })) } diff --git a/libs/user-facing-errors/src/query_engine.rs b/libs/user-facing-errors/src/query_engine.rs index 0b825cae3974..b857bc59cb85 100644 --- a/libs/user-facing-errors/src/query_engine.rs +++ b/libs/user-facing-errors/src/query_engine.rs @@ -15,10 +15,10 @@ impl fmt::Display for DatabaseConstraint { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::Fields(fields) => { - let quoted_fields: Vec = fields.iter().map(|f| format!("`{}`", f)).collect(); + let quoted_fields: Vec = fields.iter().map(|f| format!("`{f}`")).collect(); write!(f, "fields: ({})", quoted_fields.join(",")) } - Self::Index(index) => write!(f, "constraint: `{}`", index), + Self::Index(index) => write!(f, "constraint: `{index}`"), Self::ForeignKey => write!(f, "foreign key"), Self::CannotParse => write!(f, "(not available)"), } diff --git a/migration-engine/cli/build.rs b/migration-engine/cli/build.rs index d9a74f6b8c89..2e8fe20c0503 100644 --- a/migration-engine/cli/build.rs +++ b/migration-engine/cli/build.rs @@ -3,7 +3,7 @@ use std::process::Command; fn store_git_commit_hash() { let output = Command::new("git").args(["rev-parse", "HEAD"]).output().unwrap(); let git_hash = String::from_utf8(output.stdout).unwrap(); - println!("cargo:rustc-env=GIT_HASH={}", git_hash); + println!("cargo:rustc-env=GIT_HASH={git_hash}"); } fn main() { diff --git a/migration-engine/cli/src/commands.rs b/migration-engine/cli/src/commands.rs index 5245282bd01e..be7ef31dd8d6 100644 --- a/migration-engine/cli/src/commands.rs +++ b/migration-engine/cli/src/commands.rs @@ -34,7 +34,7 @@ impl Cli { }), }) .await?; - Ok(format!("Database '{}' was successfully created.", database_name)) + Ok(format!("Database '{database_name}' was successfully created.")) } CliCommand::CanConnectToDatabase => { api.ensure_connection_validity(migration_core::json_rpc::types::EnsureConnectionValidityParams { diff --git a/migration-engine/cli/src/logger.rs b/migration-engine/cli/src/logger.rs index 227c8f8df5e0..7225a0ea8012 100644 --- a/migration-engine/cli/src/logger.rs +++ b/migration-engine/cli/src/logger.rs @@ -15,7 +15,7 @@ pub(crate) fn init_logger() { .with(TimingsLayer::default()); tracing::subscriber::set_global_default(subscriber) - .map_err(|err| eprintln!("Error initializing the global logger: {}", err)) + .map_err(|err| eprintln!("Error initializing the global logger: {err}")) .ok(); } diff --git a/migration-engine/cli/tests/cli_tests.rs b/migration-engine/cli/tests/cli_tests.rs index 332f1d12bcdf..7ae403c94e16 100644 --- a/migration-engine/cli/tests/cli_tests.rs +++ b/migration-engine/cli/tests/cli_tests.rs @@ -79,9 +79,9 @@ fn test_connecting_with_a_working_mysql_connection_string(api: TestApi) { let connection_string = api.connection_string(); let output = api.run(&["--datasource", &connection_string, "can-connect-to-database"]); - assert!(output.status.success(), "{:?}", output); + assert!(output.status.success(), "{output:?}"); let stderr = String::from_utf8_lossy(&output.stderr); - assert!(stderr.contains("Connection successful"), "{:?}", stderr); + assert!(stderr.contains("Connection successful"), "{stderr:?}"); } #[test_connector(tags(Mysql))] @@ -106,9 +106,9 @@ fn test_connecting_with_a_working_postgres_connection_string(api: TestApi) { let output = api.run(&["--datasource", &conn_string, "can-connect-to-database"]); - assert!(output.status.success(), "{:?}", output); + assert!(output.status.success(), "{output:?}"); let stderr = String::from_utf8_lossy(&output.stderr); - assert!(stderr.contains("Connection successful"), "{:?}", stderr); + assert!(stderr.contains("Connection successful"), "{stderr:?}"); } // Note: not redundant with previous test because of the different URL scheme. @@ -122,9 +122,9 @@ fn test_connecting_with_a_working_postgresql_connection_string(api: TestApi) { let output = api.run(&["--datasource", &conn_string, "can-connect-to-database"]); - assert!(output.status.success(), "{:?}", output); + assert!(output.status.success(), "{output:?}"); let stderr = String::from_utf8_lossy(&output.stderr); - assert!(stderr.contains("Connection successful"), "{:?}", stderr); + assert!(stderr.contains("Connection successful"), "{stderr:?}"); } #[test_connector(tags(Postgres))] @@ -144,16 +144,16 @@ fn test_connecting_with_a_working_mssql_connection_string(api: TestApi) { let output = api.run(&["--datasource", &connection_string, "can-connect-to-database"]); - assert!(output.status.success(), "{:?}", output); + assert!(output.status.success(), "{output:?}"); let stderr = String::from_utf8_lossy(&output.stderr); - assert!(stderr.contains("Connection successful"), "{:?}", stderr); + assert!(stderr.contains("Connection successful"), "{stderr:?}"); } #[test_connector(tags(Postgres, Mysql))] fn test_create_database(api: TestApi) { let connection_string = api.connection_string(); let output = api.run(&["--datasource", &connection_string, "drop-database"]); - assert!(output.status.success(), "{:#?}", output); + assert!(output.status.success(), "{output:#?}"); let output = api.run(&["--datasource", &connection_string, "create-database"]); assert!(output.status.success()); @@ -197,7 +197,7 @@ fn test_create_sqlite_database(api: TestApi) { let url = format!("file:{}", sqlite_path.to_string_lossy()); let output = api.run(&["--datasource", &url, "create-database"]); let stderr = String::from_utf8_lossy(&output.stderr); - assert!(output.status.success(), "{:?}", stderr); + assert!(output.status.success(), "{stderr:?}"); assert!(stderr.contains("success")); assert!(stderr.contains("test_create_sqlite_database.db")); @@ -223,7 +223,7 @@ fn test_drop_sqlite_database(api: TestApi) { fn test_drop_database(api: TestApi) { let connection_string = api.connection_string(); let output = run(&["--datasource", &connection_string, "drop-database"]); - assert!(output.status.success(), "{:#?}", output); + assert!(output.status.success(), "{output:#?}"); let output = run(&["--datasource", &connection_string, "can-connect-to-database"]); assert_eq!(output.status.code(), Some(1)); @@ -280,13 +280,13 @@ fn database_already_exists_must_return_a_proper_error(api: TestApi) { let stderr = String::from_utf8_lossy(&output.stderr); assert!(stderr.contains(r#""error_code":"P1009""#)); - assert!(stderr.contains(&format!("Database `database_already_exists_must_return_a_proper_error` already exists on the database server at `{host}:{port}`", host = host, port = port))); + assert!(stderr.contains(&format!("Database `database_already_exists_must_return_a_proper_error` already exists on the database server at `{host}:{port}`"))); } #[test_connector(tags(Postgres))] fn tls_errors_must_be_mapped_in_the_cli(api: TestApi) { let connection_string = api.connection_string(); - let url = format!("{}&sslmode=require&sslaccept=strict", connection_string); + let url = format!("{connection_string}&sslmode=require&sslaccept=strict"); let output = api.run(&["--datasource", &url, "can-connect-to-database"]); assert_eq!(output.status.code(), Some(1)); @@ -434,9 +434,9 @@ fn execute_postgres(api: TestApi) { let connection_string = api.connection_string(); let output = api.run(&["--datasource", &connection_string, "drop-database"]); - assert!(output.status.success(), "{:#?}", output); + assert!(output.status.success(), "{output:#?}"); let output = api.run(&["--datasource", &connection_string, "create-database"]); - assert!(output.status.success(), "{:#?}", output); + assert!(output.status.success(), "{output:#?}"); let tmpdir = tempfile::tempdir().unwrap(); let schema = r#" @@ -491,10 +491,10 @@ fn introspect_postgres(api: TestApi) { let connection_string = api.connection_string(); let output = api.run(&["--datasource", &connection_string, "drop-database"]); - assert!(output.status.success(), "{:#?}", output); + assert!(output.status.success(), "{output:#?}"); let output = api.run(&["--datasource", &connection_string, "create-database"]); - assert!(output.status.success(), "{:#?}", output); + assert!(output.status.success(), "{output:#?}"); let tmpdir = tempfile::tempdir().unwrap(); let schema = indoc! {r#" diff --git a/migration-engine/connectors/migration-connector/src/checksum.rs b/migration-engine/connectors/migration-connector/src/checksum.rs index ab8d88ab66b8..807c15265112 100644 --- a/migration-engine/connectors/migration-connector/src/checksum.rs +++ b/migration-engine/connectors/migration-connector/src/checksum.rs @@ -67,7 +67,7 @@ impl FormatChecksum for [u8; 32] { let mut checksum_string = String::with_capacity(32 * 2); for byte in self { - write!(checksum_string, "{:02x}", byte).unwrap(); + write!(checksum_string, "{byte:02x}").unwrap(); } assert_eq!(checksum_string.len(), CHECKSUM_STR_LEN); @@ -87,7 +87,7 @@ impl FormatChecksum for [u8; 32] { let mut checksum_string = String::with_capacity(32 * 2); for byte in self { - write!(checksum_string, "{:x}", byte).unwrap(); + write!(checksum_string, "{byte:x}").unwrap(); } checksum_string diff --git a/migration-engine/connectors/migration-connector/src/error.rs b/migration-engine/connectors/migration-connector/src/error.rs index 5ef2d6ff9741..c8581b0fb793 100644 --- a/migration-engine/connectors/migration-connector/src/error.rs +++ b/migration-engine/connectors/migration-connector/src/error.rs @@ -245,7 +245,7 @@ impl From for ConnectorError { fn invalid_connection_string_description(error_details: impl Display) -> String { let docs = r#"https://www.prisma.io/docs/reference/database-reference/connection-urls"#; - format! {r#"{} in database URL. Please refer to the documentation in {} for constructing a correct connection string. In some cases, certain characters must be escaped. Please check the string for any illegal characters."#, error_details, docs} + format! {r#"{error_details} in database URL. Please refer to the documentation in {docs} for constructing a correct connection string. In some cases, certain characters must be escaped. Please check the string for any illegal characters."#} } #[cfg(test)] diff --git a/migration-engine/connectors/migration-connector/src/migrations_directory.rs b/migration-engine/connectors/migration-connector/src/migrations_directory.rs index 92f5f0b03476..1259296dc387 100644 --- a/migration-engine/connectors/migration-connector/src/migrations_directory.rs +++ b/migration-engine/connectors/migration-connector/src/migrations_directory.rs @@ -28,11 +28,7 @@ pub fn create_migration_directory( migration_name: &str, ) -> io::Result { let timestamp = chrono::Utc::now().format("%Y%m%d%H%M%S"); - let directory_name = format!( - "{timestamp}_{migration_name}", - timestamp = timestamp, - migration_name = migration_name - ); + let directory_name = format!("{timestamp}_{migration_name}"); let directory_path = migrations_directory_path.join(directory_name); if directory_path.exists() { @@ -63,8 +59,7 @@ pub fn write_migration_lock_file(migrations_directory_path: &str, provider: &str let content = format!( r##"# Please do not edit this file manually # It should be added in your version-control system (i.e. Git) -provider = "{}""##, - provider +provider = "{provider}""## ); file.write_all(content.as_bytes())?; diff --git a/migration-engine/connectors/mongodb-migration-connector/tests/migrations/test_api.rs b/migration-engine/connectors/mongodb-migration-connector/tests/migrations/test_api.rs index d4a8fa7bb010..0a44f9cfabbd 100644 --- a/migration-engine/connectors/mongodb-migration-connector/tests/migrations/test_api.rs +++ b/migration-engine/connectors/mongodb-migration-connector/tests/migrations/test_api.rs @@ -50,7 +50,7 @@ fn fresh_db_name() -> String { let id = DATABASE_ID.fetch_add(1, std::sync::atomic::Ordering::Relaxed); let mut out = String::with_capacity(PREFIX.len() + 4); out.push_str(PREFIX); - out.write_fmt(format_args!("{:04}", id)).unwrap(); + out.write_fmt(format_args!("{id:04}")).unwrap(); out } @@ -149,13 +149,13 @@ pub(crate) fn test_scenario(scenario_name: &str) { let mut path = String::with_capacity(SCENARIOS_PATH.len() + 12); let schema = { - write!(path, "{}/{}/schema.prisma", SCENARIOS_PATH, scenario_name).unwrap(); + write!(path, "{SCENARIOS_PATH}/{scenario_name}/schema.prisma").unwrap(); std::fs::read_to_string(&path).unwrap() }; let state: State = { path.clear(); - write!(path, "{}/{}/state.json", SCENARIOS_PATH, scenario_name).unwrap(); + write!(path, "{SCENARIOS_PATH}/{scenario_name}/state.json").unwrap(); let file = std::fs::File::open(&path).unwrap(); let collections: BTreeMap = serde_json::from_reader(&file).unwrap(); State { collections } @@ -163,7 +163,7 @@ pub(crate) fn test_scenario(scenario_name: &str) { let mut expected_result = { path.clear(); - write!(path, "{}/{}/result", SCENARIOS_PATH, scenario_name).unwrap(); + write!(path, "{SCENARIOS_PATH}/{scenario_name}/result").unwrap(); std::fs::read_to_string(&path).unwrap() }; @@ -201,7 +201,7 @@ pub(crate) fn test_scenario(scenario_name: &str) { if *UPDATE_EXPECT { let mut file = std::fs::File::create(&path).unwrap(); // truncate - write!(file, "{}", rendered_migration).unwrap(); + write!(file, "{rendered_migration}").unwrap(); } else if expected_result != rendered_migration { let chunks = dissimilar::diff(&expected_result, &rendered_migration); panic!( @@ -245,8 +245,8 @@ fn format_chunks(chunks: Vec) -> String { for chunk in chunks { let formatted = match chunk { dissimilar::Chunk::Equal(text) => text.into(), - dissimilar::Chunk::Delete(text) => format!("\x1b[41m{}\x1b[0m", text), - dissimilar::Chunk::Insert(text) => format!("\x1b[42m{}\x1b[0m", text), + dissimilar::Chunk::Delete(text) => format!("\x1b[41m{text}\x1b[0m"), + dissimilar::Chunk::Insert(text) => format!("\x1b[42m{text}\x1b[0m"), }; buf.push_str(&formatted); } diff --git a/migration-engine/connectors/sql-migration-connector/src/apply_migration.rs b/migration-engine/connectors/sql-migration-connector/src/apply_migration.rs index a7c9aa35d87f..0e8a71bc6c5c 100644 --- a/migration-engine/connectors/sql-migration-connector/src/apply_migration.rs +++ b/migration-engine/connectors/sql-migration-connector/src/apply_migration.rs @@ -110,7 +110,7 @@ pub(crate) async fn apply_script( ) -> ConnectorResult<()> { connector .host - .print(&format!("Applying migration `{}`\n", migration_name)) + .print(&format!("Applying migration `{migration_name}`\n")) .await?; connector.flavour.scan_migration_script(script); connector.flavour.apply_migration_script(migration_name, script).await diff --git a/migration-engine/connectors/sql-migration-connector/src/flavour.rs b/migration-engine/connectors/sql-migration-connector/src/flavour.rs index 8022b021ea47..d703a3ce8ad8 100644 --- a/migration-engine/connectors/sql-migration-connector/src/flavour.rs +++ b/migration-engine/connectors/sql-migration-connector/src/flavour.rs @@ -78,7 +78,6 @@ where } /// Convenience wrapper to transition from WithParams to Connected. - #[track_caller] async fn try_connect( &mut self, f: impl for<'b> FnOnce(&'b P) -> BoxFuture<'b, ConnectorResult>, diff --git a/migration-engine/connectors/sql-migration-connector/src/flavour/mssql.rs b/migration-engine/connectors/sql-migration-connector/src/flavour/mssql.rs index 814a8ab3af82..31852b2c2f14 100644 --- a/migration-engine/connectors/sql-migration-connector/src/flavour/mssql.rs +++ b/migration-engine/connectors/sql-migration-connector/src/flavour/mssql.rs @@ -48,7 +48,7 @@ impl MssqlFlavour { /// Get the url as a JDBC string, extract the database name, and re-encode the string. fn master_url(input: &str) -> ConnectorResult<(String, String)> { - let mut conn = JdbcString::from_str(&format!("jdbc:{}", input)) + let mut conn = JdbcString::from_str(&format!("jdbc:{input}")) .map_err(|e| ConnectorError::from_source(e, "JDBC string parse error"))?; let params = conn.properties_mut(); @@ -109,7 +109,7 @@ impl SqlFlavour for MssqlFlavour { let (db_name, master_uri) = Self::master_url(connection_string)?; let mut master_conn = Connection::new(&master_uri).await?; - let query = format!("CREATE DATABASE [{}]", db_name); + let query = format!("CREATE DATABASE [{db_name}]"); master_conn .raw_cmd( &query, @@ -158,7 +158,7 @@ impl SqlFlavour for MssqlFlavour { let params = self.state.get_unwrapped_params(); let connection_string = ¶ms.connector_params.connection_string; { - let conn_str: JdbcString = format!("jdbc:{}", connection_string) + let conn_str: JdbcString = format!("jdbc:{connection_string}") .parse() .map_err(ConnectorError::url_parse_error)?; @@ -174,7 +174,7 @@ impl SqlFlavour for MssqlFlavour { let (db_name, master_uri) = Self::master_url(¶ms.connector_params.connection_string)?; let mut conn = Connection::new(&master_uri.to_string()).await?; - let query = format!("DROP DATABASE IF EXISTS [{}]", db_name); + let query = format!("DROP DATABASE IF EXISTS [{db_name}]"); conn.raw_cmd( &query, &Params { @@ -265,11 +265,10 @@ impl SqlFlavour for MssqlFlavour { SELECT @stmt = ISNULL(@stmt + @n, '') + 'DROP PROCEDURE [' + SCHEMA_NAME(schema_id) + '].[' + OBJECT_NAME(object_id) + ']' FROM sys.objects - WHERE SCHEMA_NAME(schema_id) = '{0}' AND type = 'P' + WHERE SCHEMA_NAME(schema_id) = '{schema_name}' AND type = 'P' EXEC SP_EXECUTESQL @stmt - "#, - schema_name + "# ); let drop_shared_defaults = format!( @@ -282,11 +281,10 @@ impl SqlFlavour for MssqlFlavour { SELECT @stmt = ISNULL(@stmt + @n, '') + 'DROP DEFAULT [' + SCHEMA_NAME(schema_id) + '].[' + OBJECT_NAME(object_id) + ']' FROM sys.objects - WHERE SCHEMA_NAME(schema_id) = '{0}' AND type = 'D' AND parent_object_id = 0 + WHERE SCHEMA_NAME(schema_id) = '{schema_name}' AND type = 'D' AND parent_object_id = 0 EXEC SP_EXECUTESQL @stmt - "#, - schema_name + "# ); let drop_views = format!( @@ -299,11 +297,10 @@ impl SqlFlavour for MssqlFlavour { SELECT @stmt = ISNULL(@stmt + @n, '') + 'DROP VIEW [' + SCHEMA_NAME(schema_id) + '].[' + name + ']' FROM sys.views - WHERE SCHEMA_NAME(schema_id) = '{0}' + WHERE SCHEMA_NAME(schema_id) = '{schema_name}' EXEC SP_EXECUTESQL @stmt - "#, - schema_name + "# ); let drop_fks = format!( @@ -316,11 +313,10 @@ impl SqlFlavour for MssqlFlavour { SELECT @stmt = ISNULL(@stmt + @n, '') + 'ALTER TABLE [' + SCHEMA_NAME(schema_id) + '].[' + OBJECT_NAME(parent_object_id) + '] DROP CONSTRAINT [' + name + ']' FROM sys.foreign_keys - WHERE SCHEMA_NAME(schema_id) = '{0}' + WHERE SCHEMA_NAME(schema_id) = '{schema_name}' EXEC SP_EXECUTESQL @stmt - "#, - schema_name + "# ); let drop_tables = format!( @@ -333,11 +329,10 @@ impl SqlFlavour for MssqlFlavour { SELECT @stmt = ISNULL(@stmt + @n, '') + 'DROP TABLE [' + SCHEMA_NAME(schema_id) + '].[' + name + ']' FROM sys.tables - WHERE SCHEMA_NAME(schema_id) = '{0}' + WHERE SCHEMA_NAME(schema_id) = '{schema_name}' EXEC SP_EXECUTESQL @stmt - "#, - schema_name + "# ); let drop_types = format!( @@ -350,12 +345,11 @@ impl SqlFlavour for MssqlFlavour { SELECT @stmt = ISNULL(@stmt + @n, '') + 'DROP TYPE [' + SCHEMA_NAME(schema_id) + '].[' + name + ']' FROM sys.types - WHERE SCHEMA_NAME(schema_id) = '{0}' + WHERE SCHEMA_NAME(schema_id) = '{schema_name}' AND is_user_defined = 1 EXEC SP_EXECUTESQL @stmt - "#, - schema_name + "# ); connection.raw_cmd(&drop_procedures, params).await?; @@ -368,7 +362,7 @@ impl SqlFlavour for MssqlFlavour { // We need to drop namespaces after we've dropped everything else. for schema_name in ns_vec { - let drop_namespace = format!("DROP SCHEMA IF EXISTS [{0}]", schema_name); + let drop_namespace = format!("DROP SCHEMA IF EXISTS [{schema_name}]"); connection.raw_cmd(&drop_namespace, params).await?; } @@ -461,7 +455,7 @@ impl SqlFlavour for MssqlFlavour { )); } - let create_database = format!("CREATE DATABASE [{}]", shadow_database_name); + let create_database = format!("CREATE DATABASE [{shadow_database_name}]"); main_connection .raw_cmd(&create_database, params) @@ -549,7 +543,7 @@ mod tests { let mut flavour = MssqlFlavour::default(); flavour.set_params(params).unwrap(); - let debugged = format!("{:?}", flavour); + let debugged = format!("{flavour:?}"); let words = &["myname", "mypassword", "myserver", "8765", "mydbname"]; diff --git a/migration-engine/connectors/sql-migration-connector/src/flavour/mysql.rs b/migration-engine/connectors/sql-migration-connector/src/flavour/mysql.rs index fc581f9bfd3e..4f7fa6541a27 100644 --- a/migration-engine/connectors/sql-migration-connector/src/flavour/mysql.rs +++ b/migration-engine/connectors/sql-migration-connector/src/flavour/mysql.rs @@ -208,10 +208,7 @@ impl SqlFlavour for MysqlFlavour { let mut conn = Connection::new(url).await?; let db_name = params.url.dbname(); - let query = format!( - "CREATE DATABASE `{}` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;", - db_name - ); + let query = format!("CREATE DATABASE `{db_name}` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;"); conn.raw_cmd(&query, &mysql_url).await?; @@ -243,7 +240,7 @@ impl SqlFlavour for MysqlFlavour { let db_name = params.url.dbname(); connection - .raw_cmd(&format!("DROP DATABASE `{}`", db_name), ¶ms.url) + .raw_cmd(&format!("DROP DATABASE `{db_name}`"), ¶ms.url) .await?; Ok(()) @@ -293,12 +290,12 @@ impl SqlFlavour for MysqlFlavour { let db_name = params.url.dbname(); connection - .raw_cmd(&format!("DROP DATABASE `{}`", db_name), ¶ms.url) + .raw_cmd(&format!("DROP DATABASE `{db_name}`"), ¶ms.url) .await?; connection - .raw_cmd(&format!("CREATE DATABASE `{}`", db_name), ¶ms.url) + .raw_cmd(&format!("CREATE DATABASE `{db_name}`"), ¶ms.url) .await?; - connection.raw_cmd(&format!("USE `{}`", db_name), ¶ms.url).await?; + connection.raw_cmd(&format!("USE `{db_name}`"), ¶ms.url).await?; Ok(()) }) @@ -369,13 +366,13 @@ impl SqlFlavour for MysqlFlavour { with_connection(&mut self.state, move |params, _circumstances, conn| async move { let shadow_database_name = crate::new_shadow_database_name(); - let create_database = format!("CREATE DATABASE `{}`", shadow_database_name); + let create_database = format!("CREATE DATABASE `{shadow_database_name}`"); conn.raw_cmd(&create_database, ¶ms.url) .await .map_err(|err| err.into_shadow_db_creation_error())?; let mut shadow_database_url = params.url.url().clone(); - shadow_database_url.set_path(&format!("/{}", shadow_database_name)); + shadow_database_url.set_path(&format!("/{shadow_database_name}")); let shadow_db_params = ConnectorParams { connection_string: shadow_database_url.to_string(), preview_features: params.connector_params.preview_features, @@ -391,7 +388,7 @@ impl SqlFlavour for MysqlFlavour { // leaving shadow databases behind in case of e.g. faulty migrations. let ret = shadow_db::sql_schema_from_migrations_history(migrations, shadow_database).await; - let drop_database = format!("DROP DATABASE IF EXISTS `{}`", shadow_database_name); + let drop_database = format!("DROP DATABASE IF EXISTS `{shadow_database_name}`"); conn.raw_cmd(&drop_database, ¶ms.url).await?; ret @@ -465,7 +462,7 @@ mod tests { shadow_database_connection_string: None, }; flavour.set_params(params).unwrap(); - let debugged = format!("{:?}", flavour); + let debugged = format!("{flavour:?}"); let words = &["myname", "mypassword", "myserver", "8765", "mydbname"]; diff --git a/migration-engine/connectors/sql-migration-connector/src/flavour/postgres.rs b/migration-engine/connectors/sql-migration-connector/src/flavour/postgres.rs index cae12d482aa3..44c84ba50428 100644 --- a/migration-engine/connectors/sql-migration-connector/src/flavour/postgres.rs +++ b/migration-engine/connectors/sql-migration-connector/src/flavour/postgres.rs @@ -226,7 +226,7 @@ impl SqlFlavour for PostgresFlavour { let (mut conn, admin_url) = create_postgres_admin_conn(url.clone()).await?; - let query = format!("CREATE DATABASE \"{}\"", db_name); + let query = format!("CREATE DATABASE \"{db_name}\""); let mut database_already_exists_error = None; @@ -244,7 +244,7 @@ impl SqlFlavour for PostgresFlavour { // Now create the schema let mut conn = Connection::new(connection_string.parse().unwrap()).await?; - let schema_sql = format!("CREATE SCHEMA IF NOT EXISTS \"{}\";", schema_name); + let schema_sql = format!("CREATE SCHEMA IF NOT EXISTS \"{schema_name}\";"); conn.raw_cmd(&schema_sql, ¶ms.url).await?; @@ -285,7 +285,7 @@ impl SqlFlavour for PostgresFlavour { let (mut admin_conn, admin_url) = create_postgres_admin_conn(url.clone()).await?; admin_conn - .raw_cmd(&format!("DROP DATABASE \"{}\"", db_name), &admin_url) + .raw_cmd(&format!("DROP DATABASE \"{db_name}\""), &admin_url) .await?; Ok(()) @@ -322,9 +322,9 @@ impl SqlFlavour for PostgresFlavour { tracing::info!(?schemas_to_reset, "Resetting schema(s)"); for schema_name in schemas_to_reset { - conn.raw_cmd(&format!("DROP SCHEMA \"{}\" CASCADE", schema_name), ¶ms.url) + conn.raw_cmd(&format!("DROP SCHEMA \"{schema_name}\" CASCADE"), ¶ms.url) .await?; - conn.raw_cmd(&format!("CREATE SCHEMA \"{}\"", schema_name), ¶ms.url) + conn.raw_cmd(&format!("CREATE SCHEMA \"{schema_name}\""), ¶ms.url) .await?; } @@ -414,7 +414,7 @@ impl SqlFlavour for PostgresFlavour { let shadow_database_name = crate::new_shadow_database_name(); { - let create_database = format!("CREATE DATABASE \"{}\"", shadow_database_name); + let create_database = format!("CREATE DATABASE \"{shadow_database_name}\""); main_connection .raw_cmd(&create_database, ¶ms.url) .await @@ -426,7 +426,7 @@ impl SqlFlavour for PostgresFlavour { .connection_string .parse() .map_err(ConnectorError::url_parse_error)?; - shadow_database_url.set_path(&format!("/{}", shadow_database_name)); + shadow_database_url.set_path(&format!("/{shadow_database_name}")); let shadow_db_params = ConnectorParams { connection_string: shadow_database_url.to_string(), preview_features: params.connector_params.preview_features, @@ -442,7 +442,7 @@ impl SqlFlavour for PostgresFlavour { let ret = shadow_db::sql_schema_from_migrations_history(migrations, shadow_database, namespaces).await; - let drop_database = format!("DROP DATABASE IF EXISTS \"{}\"", shadow_database_name); + let drop_database = format!("DROP DATABASE IF EXISTS \"{shadow_database_name}\""); main_connection.raw_cmd(&drop_database, ¶ms.url).await?; ret @@ -461,7 +461,7 @@ impl SqlFlavour for PostgresFlavour { fn strip_schema_param_from_url(url: &mut Url) { let mut params: HashMap = url.query_pairs().into_owned().collect(); params.remove("schema"); - let params: Vec = params.into_iter().map(|(k, v)| format!("{}={}", k, v)).collect(); + let params: Vec = params.into_iter().map(|(k, v)| format!("{k}={v}")).collect(); let params: String = params.join("&"); url.set_query(Some(¶ms)); } @@ -477,7 +477,7 @@ async fn create_postgres_admin_conn(mut url: Url) -> ConnectorResult<(Connection let mut conn = None; for database_name in CANDIDATE_DEFAULT_DATABASES { - url.set_path(&format!("/{}", database_name)); + url.set_path(&format!("/{database_name}")); let postgres_url = PostgresUrl::new(url.clone()).unwrap(); match Connection::new(url.clone()).await { // If the database does not exist, try the next one. @@ -605,7 +605,7 @@ where schema_name = schema_name, ); - connection.raw_cmd(&format!("CREATE SCHEMA \"{}\"", schema_name), ¶ms.url).await?; + connection.raw_cmd(&format!("CREATE SCHEMA \"{schema_name}\""), ¶ms.url).await?; Ok((circumstances, connection)) })).await?; @@ -628,7 +628,7 @@ mod tests { shadow_database_connection_string: None, }; flavour.set_params(params).unwrap(); - let debugged = format!("{:?}", flavour); + let debugged = format!("{flavour:?}"); let words = &["myname", "mypassword", "myserver", "8765", "mydbname"]; diff --git a/migration-engine/connectors/sql-migration-connector/src/flavour/postgres/connection.rs b/migration-engine/connectors/sql-migration-connector/src/flavour/postgres/connection.rs index 0406a110d690..05947ce1f130 100644 --- a/migration-engine/connectors/sql-migration-connector/src/flavour/postgres/connection.rs +++ b/migration-engine/connectors/sql-migration-connector/src/flavour/postgres/connection.rs @@ -137,8 +137,7 @@ impl Connection { .join("\n"); error_position = format!( - "\n\nPosition:\n{}\n\x1b[1m{:>3}\x1b[1;31m {}\x1b[0m", - numbered_lines, line_number, line + "\n\nPosition:\n{numbered_lines}\n\x1b[1m{line_number:>3}\x1b[1;31m {line}\x1b[0m" ); break; } else { @@ -157,7 +156,7 @@ impl Connection { String::new() }; - let database_error = format!("{}{}\n\n{:?}", db_error, position, db_error); + let database_error = format!("{db_error}{position}\n\n{db_error:?}"); (Some(db_error.code().code()), database_error) } else { diff --git a/migration-engine/connectors/sql-migration-connector/src/flavour/sqlite.rs b/migration-engine/connectors/sql-migration-connector/src/flavour/sqlite.rs index 7fed44d1cc51..8fc562c9218a 100644 --- a/migration-engine/connectors/sql-migration-connector/src/flavour/sqlite.rs +++ b/migration-engine/connectors/sql-migration-connector/src/flavour/sqlite.rs @@ -125,7 +125,7 @@ impl SqlFlavour for SqliteFlavour { let params = self.state.get_unwrapped_params(); let file_path = ¶ms.file_path; let ret = std::fs::remove_file(file_path).map_err(|err| { - ConnectorError::from_msg(format!("Failed to delete SQLite database at `{}`.\n{}", file_path, err)) + ConnectorError::from_msg(format!("Failed to delete SQLite database at `{file_path}`.\n{err}")) }); ready(ret) } diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/destructive_change_checker_flavour/postgres.rs b/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/destructive_change_checker_flavour/postgres.rs index ad83a97ea5fb..9873441cfc76 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/destructive_change_checker_flavour/postgres.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/destructive_change_checker_flavour/postgres.rs @@ -131,7 +131,7 @@ impl DestructiveChangeCheckerFlavour for PostgresFlavour { Some(namespace) => format!("\"{}\".\"{}\"", namespace, table.table), None => format!("\"{}\"", table.table), }; - let query = format!("SELECT COUNT(*) FROM {}", from); + let query = format!("SELECT COUNT(*) FROM {from}"); let result_set = self.query_raw(&query, &[]).await?; super::extract_table_rows_count(table, result_set) }) diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/unexecutable_step_check.rs b/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/unexecutable_step_check.rs index c1c3ed9f1876..89baa5ba6a0b 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/unexecutable_step_check.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/unexecutable_step_check.rs @@ -48,8 +48,7 @@ impl Check for UnexecutableStepCheck { let message = match database_checks.get_row_count(&Table::from_column(column)) { Some(0) => return None, // Adding a required column is possible if there is no data Some(row_count) => message(format_args!( - "There are {row_count} rows in this table, it is not possible to execute this step.", - row_count = row_count + "There are {row_count} rows in this table, it is not possible to execute this step." )), None => message(format_args!("This is not possible if the table is not empty.")), }; @@ -69,8 +68,7 @@ impl Check for UnexecutableStepCheck { let message = match database_checks.get_row_count(&Table::from_column(column)) { Some(0) => return None, // Adding a required column is possible if there is no data Some(row_count) => message(format_args!( - "There are {row_count} rows in this table, it is not possible to execute this step.", - row_count = row_count + "There are {row_count} rows in this table, it is not possible to execute this step." )), None => message(format_args!("This is not possible if the table is not empty.")), }; @@ -113,8 +111,7 @@ impl Check for UnexecutableStepCheck { (Some(0), _) => None, (_, Some(0)) => None, (_, Some(value_count)) => Some(message(format_args!( - "There are {} existing non-null values in that column, this step cannot be executed.", - value_count + "There are {value_count} existing non-null values in that column, this step cannot be executed." ))), (_, _) => Some(message(format_args!( "If there are non-null values in that column, this step will fail." diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/warning_check.rs b/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/warning_check.rs index 2fc2427f186c..1deee1d508c8 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/warning_check.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_destructive_change_checker/warning_check.rs @@ -104,8 +104,8 @@ impl Check for SqlMigrationWarningCheck { column: column.clone()}) { (Some(0), _) => None, (_, Some(0)) => None, - (_, None) => Some(format!("The `{}` column on the `{}` table would be dropped and recreated. This will lead to data loss if there is data in the column.", column, table)), - (_, Some(_row_count)) => Some(format!("The `{}` column on the `{}` table would be dropped and recreated. This will lead to data loss.", column, table)), + (_, None) => Some(format!("The `{column}` column on the `{table}` table would be dropped and recreated. This will lead to data loss if there is data in the column.")), + (_, Some(_row_count)) => Some(format!("The `{column}` column on the `{table}` table would be dropped and recreated. This will lead to data loss.")), } }, @@ -114,8 +114,8 @@ impl Check for SqlMigrationWarningCheck { table: table.clone(), namespace: namespace.clone()}) { Some(0) => None, // dropping the table is safe if it's empty - Some(rows_count) => Some(format!("You are about to drop the `{table_name}` table, which is not empty ({rows_count} rows).", table_name = table, rows_count = rows_count)), - None => Some(format!("You are about to drop the `{}` table. If the table is not empty, all the data it contains will be lost.", table)), + Some(rows_count) => Some(format!("You are about to drop the `{table}` table, which is not empty ({rows_count} rows).")), + None => Some(format!("You are about to drop the `{table}` table. If the table is not empty, all the data it contains will be lost.")), }, SqlMigrationWarningCheck::NonEmptyColumnDrop { table, column, namespace } => match database_check_results.get_row_and_non_null_value_count(&Column{ @@ -124,8 +124,8 @@ impl Check for SqlMigrationWarningCheck { column: column.clone()}) { (Some(0), _) => None, // it's safe to drop a column on an empty table (_, Some(0)) => None, // it's safe to drop a column if it only contains null values - (_, Some(value_count)) => Some(format!("You are about to drop the column `{column_name}` on the `{table_name}` table, which still contains {value_count} non-null values.", column_name = column, table_name = table, value_count = value_count)), - (_, _) => Some(format!("You are about to drop the column `{column_name}` on the `{table_name}` table. All the data in the column will be lost.", column_name = column, table_name = table)), + (_, Some(value_count)) => Some(format!("You are about to drop the column `{column}` on the `{table}` table, which still contains {value_count} non-null values.")), + (_, _) => Some(format!("You are about to drop the column `{column}` on the `{table}` table. All the data in the column will be lost.")), }, SqlMigrationWarningCheck::RiskyCast { table, column, previous_type, next_type, namespace } => match database_check_results.get_row_and_non_null_value_count(&Column{ @@ -134,8 +134,8 @@ impl Check for SqlMigrationWarningCheck { column: column.clone()}) { (Some(0), _) => None, // it's safe to alter a column on an empty table (_, Some(0)) => None, // it's safe to alter a column if it only contains null values - (_, Some(value_count)) => Some(format!("You are about to alter the column `{column_name}` on the `{table_name}` table, which contains {value_count} non-null values. The data in that column will be cast from `{old_type}` to `{new_type}`.", column_name = column, table_name = table, value_count = value_count, old_type = previous_type, new_type = next_type)), - (_, _) => Some(format!("You are about to alter the column `{column_name}` on the `{table_name}` table. The data in that column could be lost. The data in that column will be cast from `{old_type}` to `{new_type}`.", column_name = column, table_name = table, old_type = previous_type, new_type = next_type)), + (_, Some(value_count)) => Some(format!("You are about to alter the column `{column}` on the `{table}` table, which contains {value_count} non-null values. The data in that column will be cast from `{previous_type}` to `{next_type}`.")), + (_, _) => Some(format!("You are about to alter the column `{column}` on the `{table}` table. The data in that column could be lost. The data in that column will be cast from `{previous_type}` to `{next_type}`.")), }, @@ -147,8 +147,8 @@ impl Check for SqlMigrationWarningCheck { column: column.clone()}) { (Some(0), _) => None, // it's safe to alter a column on an empty table (_, Some(0)) => None, // it's safe to alter a column if it only contains null values - (_, Some(value_count)) => Some(format!("You are about to alter the column `{column_name}` on the `{table_name}` table, which contains {value_count} non-null values. The data in that column will be cast from `{old_type}` to `{new_type}`. This cast may fail. Please make sure the data in the column can be cast.", column_name = column, table_name = table, value_count = value_count, old_type = previous_type, new_type = next_type)), - (_, _) => Some(format!("You are about to alter the column `{column_name}` on the `{table_name}` table. The data in that column will be cast from `{old_type}` to `{new_type}`. This cast may fail. Please make sure the data in the column can be cast.", column_name = column, table_name = table, old_type = previous_type, new_type = next_type)), + (_, Some(value_count)) => Some(format!("You are about to alter the column `{column}` on the `{table}` table, which contains {value_count} non-null values. The data in that column will be cast from `{previous_type}` to `{next_type}`. This cast may fail. Please make sure the data in the column can be cast.")), + (_, _) => Some(format!("You are about to alter the column `{column}` on the `{table}` table. The data in that column will be cast from `{previous_type}` to `{next_type}`. This cast may fail. Please make sure the data in the column can be cast.")), }, SqlMigrationWarningCheck::PrimaryKeyChange { table, namespace } => @@ -156,7 +156,7 @@ impl Check for SqlMigrationWarningCheck { table: table.clone(), namespace: namespace.clone()}) { Some(0) => None, - _ => Some(format!("The primary key for the `{table}` table will be changed. If it partially fails, the table could be left without primary key constraint.", table = table)), + _ => Some(format!("The primary key for the `{table}` table will be changed. If it partially fails, the table could be left without primary key constraint.")), }, SqlMigrationWarningCheck::UniqueConstraintAddition { table, columns } => Some(format!("A unique constraint covering the columns `[{columns}]` on the table `{table}` will be added. If there are existing duplicate values, this will fail.", table = table, columns = columns.join(","))), diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_renderer.rs b/migration-engine/connectors/sql-migration-connector/src/sql_renderer.rs index bb11da4afb3a..a8b88f3f5221 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_renderer.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_renderer.rs @@ -84,7 +84,7 @@ pub(crate) trait SqlRenderer { Some(namespace) => format!("{}.{}", self.quote(namespace), self.quote(table_name)), None => format!("{}", self.quote(table_name)), }; - vec![format!("DROP TABLE {}", name)] + vec![format!("DROP TABLE {name}")] } /// Render a `RedefineTables` step. diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/common.rs b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/common.rs index 9d836452f820..2d045d103e3e 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/common.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/common.rs @@ -81,10 +81,10 @@ where { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Quoted::Double(inner) => write!(f, "\"{}\"", inner), - Quoted::Single(inner) => write!(f, "'{}'", inner), - Quoted::Backticks(inner) => write!(f, "`{}`", inner), - Quoted::SquareBrackets(inner) => write!(f, "[{}]", inner), + Quoted::Double(inner) => write!(f, "\"{inner}\""), + Quoted::Single(inner) => write!(f, "'{inner}'"), + Quoted::Backticks(inner) => write!(f, "`{inner}`"), + Quoted::SquareBrackets(inner) => write!(f, "[{inner}]"), } } } @@ -113,7 +113,7 @@ pub(crate) fn format_hex(bytes: &[u8], out: &mut String) { out.reserve(bytes.len() * 2); for byte in bytes { - write!(out, "{:02x}", byte).expect("failed to hex format a byte"); + write!(out, "{byte:02x}").expect("failed to hex format a byte"); } } @@ -131,12 +131,12 @@ where let mut out = String::with_capacity(sep.len() * lower_bound); if let Some(first_item) = self.next() { - write!(out, "{}", first_item).unwrap(); + write!(out, "{first_item}").unwrap(); } for item in self { out.push_str(sep); - write!(out, "{}", item).unwrap(); + write!(out, "{item}").unwrap(); } out @@ -181,7 +181,7 @@ impl StatementRenderer { } pub(super) fn push_display(&mut self, d: &dyn std::fmt::Display) { - std::fmt::Write::write_fmt(&mut self.statement, format_args!("{}", d)).unwrap(); + std::fmt::Write::write_fmt(&mut self.statement, format_args!("{d}")).unwrap(); } } diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mssql_renderer.rs b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mssql_renderer.rs index 6cba2c94ebbd..e09024baf929 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mssql_renderer.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mssql_renderer.rs @@ -55,7 +55,7 @@ impl MssqlFlavour { .unwrap_or_default() }; - format!("{} {}{}{}", column_name, r#type, nullability, default) + format!("{column_name} {type}{nullability}{default}") } fn render_references(&self, foreign_key: sql::ForeignKeyWalker<'_>) -> String { @@ -215,7 +215,7 @@ impl SqlRenderer for MssqlFlavour { }) .join(",\n "); - format!(",\n {}", constraints) + format!(",\n {constraints}") } else { String::new() }; @@ -358,7 +358,7 @@ impl SqlRenderer for MssqlFlavour { fn render_rename_table(&self, namespace: Option<&str>, name: &str, new_name: &str) -> String { let ns = namespace.unwrap_or_else(|| self.schema_name()); - let with_schema = format!("{}.{}", ns, name); + let with_schema = format!("{ns}.{name}"); format!( "EXEC SP_RENAME N{}, N{}", @@ -466,13 +466,13 @@ fn render_column_type(column: sql::TableColumnWalker<'_>) -> Cow<'static, str> { fn format_u32_arg(arg: Option) -> String { match arg { None => "".to_string(), - Some(x) => format!("({})", x), + Some(x) => format!("({x})"), } } fn format_type_param(arg: Option) -> String { match arg { None => "".to_string(), - Some(MsSqlTypeParameter::Number(x)) => format!("({})", x), + Some(MsSqlTypeParameter::Number(x)) => format!("({x})"), Some(MsSqlTypeParameter::Max) => "(max)".to_string(), } } @@ -490,7 +490,7 @@ fn render_column_type(column: sql::TableColumnWalker<'_>) -> Cow<'static, str> { MsSqlType::SmallInt => "SMALLINT".into(), MsSqlType::Int => "INT".into(), MsSqlType::BigInt => "BIGINT".into(), - MsSqlType::Decimal(Some((p, s))) => format!("DECIMAL({p},{s})", p = p, s = s).into(), + MsSqlType::Decimal(Some((p, s))) => format!("DECIMAL({p},{s})").into(), MsSqlType::Decimal(None) => "DECIMAL".into(), MsSqlType::Money => "MONEY".into(), MsSqlType::SmallMoney => "SMALLMONEY".into(), diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mssql_renderer/alter_table.rs b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mssql_renderer/alter_table.rs index c83d12a29b83..65687b8b51e7 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mssql_renderer/alter_table.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mssql_renderer/alter_table.rs @@ -197,7 +197,7 @@ impl<'a> AlterTableConstructor<'a> { fn drop_column(&mut self, column_id: TableColumnId) { let name = self.renderer.quote(self.tables.previous.walk(column_id).name()); - self.drop_columns.push(format!("{}", name)); + self.drop_columns.push(format!("{name}")); } fn drop_and_recreate_column(&mut self, columns: Pair) { diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mysql_renderer.rs b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mysql_renderer.rs index feb692cf8b91..075ce4b5249f 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mysql_renderer.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/mysql_renderer.rs @@ -123,7 +123,7 @@ impl SqlRenderer for MysqlFlavour { let mut rendered = format!("{}", self.quote(c.as_column().name())); if let Some(length) = c.length() { - write!(rendered, "({})", length).unwrap(); + write!(rendered, "({length})").unwrap(); } if let Some(sort_order) = c.sort_order() { @@ -142,7 +142,7 @@ impl SqlRenderer for MysqlFlavour { let column = tables.next.walk(*column_id); let col_sql = self.render_column(column); - lines.push(format!("ADD COLUMN {}", col_sql)); + lines.push(format!("ADD COLUMN {col_sql}")); } TableChange::DropColumn { column_id } => lines.push( sql_ddl::mysql::AlterTableClause::DropColumn { @@ -365,7 +365,7 @@ fn render_mysql_modify( .filter(|default| !default.is_empty_dbgenerated()) .map(|default| render_default(next_column, default)) .filter(|expr| !expr.is_empty()) - .map(|expression| format!(" DEFAULT {}", expression)) + .map(|expression| format!(" DEFAULT {expression}")) .unwrap_or_else(String::new); format!( @@ -390,7 +390,7 @@ fn render_column_type(column: TableColumnWalker<'_>) -> Cow<'static, str> { if let ColumnTypeFamily::Enum(enum_id) = column.column_type_family() { let variants: String = column.walk(*enum_id).values().map(Quoted::mysql_string).join(", "); - return format!("ENUM({})", variants).into(); + return format!("ENUM({variants})").into(); } if let ColumnTypeFamily::Unsupported(description) = &column.column_type().family { @@ -404,14 +404,14 @@ fn render_column_type(column: TableColumnWalker<'_>) -> Cow<'static, str> { fn render(input: Option) -> String { match input { None => "".to_string(), - Some(arg) => format!("({})", arg), + Some(arg) => format!("({arg})"), } } fn render_decimal(input: Option<(u32, u32)>) -> String { match input { None => "".to_string(), - Some((precision, scale)) => format!("({}, {})", precision, scale), + Some((precision, scale)) => format!("({precision}, {scale})"), } } @@ -425,11 +425,11 @@ fn render_column_type(column: TableColumnWalker<'_>) -> Cow<'static, str> { MySqlType::Decimal(precision) => format!("DECIMAL{}", render_decimal(*precision)).into(), MySqlType::Float => "FLOAT".into(), MySqlType::Double => "DOUBLE".into(), - MySqlType::Bit(size) => format!("BIT({size})", size = size).into(), - MySqlType::Char(size) => format!("CHAR({size})", size = size).into(), - MySqlType::VarChar(size) => format!("VARCHAR({size})", size = size).into(), - MySqlType::Binary(size) => format!("BINARY({size})", size = size).into(), - MySqlType::VarBinary(size) => format!("VARBINARY({size})", size = size).into(), + MySqlType::Bit(size) => format!("BIT({size})").into(), + MySqlType::Char(size) => format!("CHAR({size})").into(), + MySqlType::VarChar(size) => format!("VARCHAR({size})").into(), + MySqlType::Binary(size) => format!("BINARY({size})").into(), + MySqlType::VarBinary(size) => format!("VARBINARY({size})").into(), MySqlType::TinyBlob => "TINYBLOB".into(), MySqlType::Blob => "BLOB".into(), MySqlType::MediumBlob => "MEDIUMBLOB".into(), @@ -509,7 +509,7 @@ fn render_default<'a>(column: TableColumnWalker<'a>, default: &'a DefaultValue) .and_then(MySqlType::timestamp_precision) .unwrap_or(3); - format!("CURRENT_TIMESTAMP({})", precision).into() + format!("CURRENT_TIMESTAMP({precision})").into() } DefaultKind::Value(PrismaValue::DateTime(dt)) if column.column_type_family().is_datetime() => { Quoted::mysql_string(dt.to_rfc3339()).to_string().into() diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/postgres_renderer.rs b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/postgres_renderer.rs index 4ee2090d1c26..deecfe7ba24e 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/postgres_renderer.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/postgres_renderer.rs @@ -29,7 +29,7 @@ impl PostgresFlavour { .default() .map(|d| render_default(d.inner(), &render_column_type(column, self))) .filter(|default| !default.is_empty()) - .map(|default| format!(" DEFAULT {}", default)) + .map(|default| format!(" DEFAULT {default}")) .unwrap_or_else(String::new); let identity_str = render_column_identity_str(column, self); @@ -270,11 +270,11 @@ impl SqlRenderer for PostgresFlavour { let column = schemas.next.walk(*column_id); let col_sql = self.render_column(column); - lines.push(format!("ADD COLUMN {}", col_sql)); + lines.push(format!("ADD COLUMN {col_sql}")); } TableChange::DropColumn { column_id } => { let name = self.quote(schemas.previous.walk(*column_id).name()); - lines.push(format!("DROP COLUMN {}", name)); + lines.push(format!("DROP COLUMN {name}")); } TableChange::AlterColumn(AlterColumn { column_id, @@ -296,10 +296,10 @@ impl SqlRenderer for PostgresFlavour { let columns = schemas.walk(*column_id); let name = self.quote(columns.previous.name()); - lines.push(format!("DROP COLUMN {}", name)); + lines.push(format!("DROP COLUMN {name}")); let col_sql = self.render_column(columns.next); - lines.push(format!("ADD COLUMN {}", col_sql)); + lines.push(format!("ADD COLUMN {col_sql}")); } }; } @@ -640,14 +640,14 @@ fn render_column_type_cockroachdb(col: TableColumnWalker<'_>) -> Cow<'static, st fn render_optional_args(input: Option) -> String { match input { None => "".to_string(), - Some(arg) => format!("({})", arg), + Some(arg) => format!("({arg})"), } } fn render_decimal_args(input: Option<(u32, u32)>) -> String { match input { None => "".to_string(), - Some((precision, scale)) => format!("({},{})", precision, scale), + Some((precision, scale)) => format!("({precision},{scale})"), } } @@ -690,7 +690,7 @@ fn render_alter_column( let table_name = QuotedWithPrefix::pg_from_table_walker(columns.previous.table()); let column_name = Quoted::postgres_ident(columns.previous.name()); - let alter_column_prefix = format!("ALTER COLUMN {}", column_name); + let alter_column_prefix = format!("ALTER COLUMN {column_name}"); for step in steps { match step { @@ -734,7 +734,7 @@ fn render_alter_column( ) .to_lowercase(); - before_statements.push(format!("CREATE SEQUENCE {}", sequence_name)); + before_statements.push(format!("CREATE SEQUENCE {sequence_name}")); clauses.push(format!( "{prefix} SET DEFAULT {default}", @@ -744,8 +744,6 @@ fn render_alter_column( after_statements.push(format!( "ALTER SEQUENCE {sequence_name} OWNED BY {table_name}.{column_name}", - table_name = table_name, - column_name = column_name, )); } } diff --git a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/sqlite_renderer.rs b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/sqlite_renderer.rs index 30244bd0b007..51d72194bf11 100644 --- a/migration-engine/connectors/sql-migration-connector/src/sql_renderer/sqlite_renderer.rs +++ b/migration-engine/connectors/sql-migration-connector/src/sql_renderer/sqlite_renderer.rs @@ -217,7 +217,7 @@ impl SqlRenderer for SqliteFlavour { } fn render_rename_table(&self, _namespace: Option<&str>, name: &str, new_name: &str) -> String { - format!(r#"ALTER TABLE "{}" RENAME TO "{}""#, name, new_name) + format!(r#"ALTER TABLE "{name}" RENAME TO "{new_name}""#) } fn render_drop_view(&self, view: ViewWalker<'_>) -> String { diff --git a/migration-engine/core/src/commands/dev_diagnostic.rs b/migration-engine/core/src/commands/dev_diagnostic.rs index 60dfffa0c56e..457d859fbfe6 100644 --- a/migration-engine/core/src/commands/dev_diagnostic.rs +++ b/migration-engine/core/src/commands/dev_diagnostic.rs @@ -50,13 +50,12 @@ fn check_for_reset_conditions(output: &DiagnoseMigrationHistoryOutput) -> Option let mut reset_reasons = Vec::new(); for failed_migration_name in &output.failed_migration_names { - reset_reasons.push(format!("The migration `{}` failed.", failed_migration_name)); + reset_reasons.push(format!("The migration `{failed_migration_name}` failed.")); } for edited_migration_name in &output.edited_migration_names { reset_reasons.push(format!( - "The migration `{}` was modified after it was applied.", - edited_migration_name + "The migration `{edited_migration_name}` was modified after it was applied." )) } @@ -77,7 +76,7 @@ fn check_for_reset_conditions(output: &DiagnoseMigrationHistoryOutput) -> Option format!(" Last common migration: `{}`. Migrations applied to the database but absent from the migrations directory are: {}", last_common_migration_name, unpersisted_migration_names.join(", ")) }).unwrap_or_else(String::new); - reset_reasons.push(format!("The migrations recorded in the database diverge from the local migrations directory.{}", details)) + reset_reasons.push(format!("The migrations recorded in the database diverge from the local migrations directory.{details}")) }, Some(HistoryDiagnostic::MigrationsDirectoryIsBehind { unpersisted_migration_names}) => reset_reasons.push( format!("The following migration(s) are applied to the database but missing from the local migrations directory: {}", unpersisted_migration_names.join(", ")), diff --git a/migration-engine/core/src/commands/diagnose_migration_history.rs b/migration-engine/core/src/commands/diagnose_migration_history.rs index 100b99dcf5c6..b1c21e63d767 100644 --- a/migration-engine/core/src/commands/diagnose_migration_history.rs +++ b/migration-engine/core/src/commands/diagnose_migration_history.rs @@ -310,7 +310,7 @@ impl DriftDiagnostic { pub fn unwrap_drift_detected(self) -> String { match self { DriftDiagnostic::DriftDetected { summary } => summary, - other => panic!("unwrap_drift_detected on {:?}", other), + other => panic!("unwrap_drift_detected on {other:?}"), } } } diff --git a/migration-engine/core/src/commands/diff.rs b/migration-engine/core/src/commands/diff.rs index 5877c140c9fe..d9d2ed99f06a 100644 --- a/migration-engine/core/src/commands/diff.rs +++ b/migration-engine/core/src/commands/diff.rs @@ -101,7 +101,7 @@ fn namespaces_and_preview_features_from_diff_targets( let schema_str: String = std::fs::read_to_string(schema).map_err(|err| { ConnectorError::from_source_with_context( err, - format!("Error trying to read Prisma schema file at `{}`.", schema).into_boxed_str(), + format!("Error trying to read Prisma schema file at `{schema}`.").into_boxed_str(), ) })?; @@ -136,7 +136,7 @@ async fn json_rpc_diff_target_to_connector( std::fs::read_to_string(schema_path).map_err(|err| { ConnectorError::from_source_with_context( err, - format!("Error trying to read Prisma schema file at `{}`.", schema_path).into_boxed_str(), + format!("Error trying to read Prisma schema file at `{schema_path}`.").into_boxed_str(), ) }) }; diff --git a/migration-engine/core/src/lib.rs b/migration-engine/core/src/lib.rs index 2696e477bc03..2aff7a7bf9ed 100644 --- a/migration-engine/core/src/lib.rs +++ b/migration-engine/core/src/lib.rs @@ -156,8 +156,7 @@ fn connector_for_provider(provider: &str) -> CoreResult Err(CoreError::from_msg(format!( - "`{}` is not a supported connector.", - provider + "`{provider}` is not a supported connector." ))), } } diff --git a/migration-engine/json-rpc-api-build/src/error.rs b/migration-engine/json-rpc-api-build/src/error.rs index 36d14cac988e..ac738da160d7 100644 --- a/migration-engine/json-rpc-api-build/src/error.rs +++ b/migration-engine/json-rpc-api-build/src/error.rs @@ -38,7 +38,7 @@ impl Debug for Error { f.write_str(" ")?; } - f.write_fmt(format_args!("Caused by: {}\n", source))?; + f.write_fmt(format_args!("Caused by: {source}\n"))?; indentation_levels += 1; src = source.source(); diff --git a/migration-engine/json-rpc-api-build/src/lib.rs b/migration-engine/json-rpc-api-build/src/lib.rs index 3ab6525152e8..bbf9c6fb0ca6 100644 --- a/migration-engine/json-rpc-api-build/src/lib.rs +++ b/migration-engine/json-rpc-api-build/src/lib.rs @@ -12,7 +12,7 @@ pub fn generate_rust_modules(out_dir: &Path) -> CrateResult { let api_defs_root = concat!(env!("CARGO_MANIFEST_DIR"), "/methods"); // https://doc.rust-lang.org/cargo/reference/build-scripts.html - println!("cargo:rerun-if-changed={}", api_defs_root); + println!("cargo:rerun-if-changed={api_defs_root}"); let entries = std::fs::read_dir(api_defs_root)?; let mut api = Api::default(); @@ -44,21 +44,18 @@ fn validate(api: &Api) { for (method_name, method) in &api.methods { if !shape_exists(&method.request_shape, api) { - errs.push(format!("Request shape for {} does not exist", method_name)) + errs.push(format!("Request shape for {method_name} does not exist")) } if !shape_exists(&method.response_shape, api) { - errs.push(format!("Response shape for {} does not exist", method_name)) + errs.push(format!("Response shape for {method_name} does not exist")) } } for (record_name, record_shape) in &api.record_shapes { for (field_name, field) in &record_shape.fields { if !shape_exists(&field.shape, api) { - errs.push(format!( - "Field shape for {}.{} does not exist.", - record_name, field_name - )) + errs.push(format!("Field shape for {record_name}.{field_name} does not exist.")) } } } @@ -68,8 +65,7 @@ fn validate(api: &Api) { if let Some(shape) = variant.shape.as_ref() { if !shape_exists(shape, api) { errs.push(format!( - "Enum variant shape for {}.{} does not exist.", - enum_name, variant_name + "Enum variant shape for {enum_name}.{variant_name} does not exist." )) } } @@ -78,7 +74,7 @@ fn validate(api: &Api) { if !errs.is_empty() { for err in errs { - eprintln!("{}", err); + eprintln!("{err}"); } std::process::exit(1); } diff --git a/migration-engine/json-rpc-api-build/src/rust_crate.rs b/migration-engine/json-rpc-api-build/src/rust_crate.rs index d05ca815d06b..f4fbd387d550 100644 --- a/migration-engine/json-rpc-api-build/src/rust_crate.rs +++ b/migration-engine/json-rpc-api-build/src/rust_crate.rs @@ -28,7 +28,7 @@ pub(crate) fn generate_rust_crate(out_dir: &Path, api: &Api) -> CrateResult { if let Some(description) = &method.description { for line in description.lines() { - writeln!(librs, "//! {}", line)?; + writeln!(librs, "//! {line}")?; } } } @@ -38,7 +38,7 @@ pub(crate) fn generate_rust_crate(out_dir: &Path, api: &Api) -> CrateResult { )?; for method_name in &method_names { - writeln!(librs, " \"{}\",", method_name)?; + writeln!(librs, " \"{method_name}\",")?; } writeln!(librs, "];")?; @@ -68,7 +68,7 @@ fn generate_types_rs(mut file: impl std::io::Write, api: &Api) -> CrateResult { for (type_name, record_type) in &api.record_shapes { if let Some(description) = &record_type.description { for line in description.lines() { - writeln!(file, "/// {}", line)?; + writeln!(file, "/// {line}")?; } } @@ -89,23 +89,23 @@ fn generate_types_rs(mut file: impl std::io::Write, api: &Api) -> CrateResult { for (field_name, field) in &record_type.fields { if let Some(description) = &field.description { for line in description.lines() { - writeln!(file, " /// {}", line)?; + writeln!(file, " /// {line}")?; } } let type_name = rustify_type_name(&field.shape); let type_name: Cow<'static, str> = match (field.is_list, field.is_nullable) { - (true, true) => format!("Option>", type_name).into(), - (false, true) => format!("Option<{}>", type_name).into(), - (true, false) => format!("Vec<{}>", type_name).into(), + (true, true) => format!("Option>").into(), + (false, true) => format!("Option<{type_name}>").into(), + (true, false) => format!("Vec<{type_name}>").into(), (false, false) => type_name, }; let field_name_sc = field_name.to_snake_case(); if &field_name_sc != field_name { - writeln!(file, " ///\n /// JSON name: {}", field_name)?; - writeln!(file, " #[serde(rename = \"{}\")]", field_name)?; + writeln!(file, " ///\n /// JSON name: {field_name}")?; + writeln!(file, " #[serde(rename = \"{field_name}\")]")?; } - writeln!(file, " pub {}: {},", field_name_sc, type_name)?; + writeln!(file, " pub {field_name_sc}: {type_name},")?; } writeln!(file, "}}\n")?; } @@ -113,7 +113,7 @@ fn generate_types_rs(mut file: impl std::io::Write, api: &Api) -> CrateResult { for (type_name, variants) in &api.enum_shapes { if let Some(description) = &variants.description { for line in description.lines() { - writeln!(file, "/// {}", line)?; + writeln!(file, "/// {line}")?; } } @@ -126,21 +126,21 @@ fn generate_types_rs(mut file: impl std::io::Write, api: &Api) -> CrateResult { for (variant_name, variant) in &variants.variants { if let Some(description) = &variant.description { for line in description.lines() { - writeln!(file, "/// {}", line)?; + writeln!(file, "/// {line}")?; } } let cc_variant_name = variant_name.to_camel_case(); if cc_variant_name.as_str() != variant_name { - writeln!(file, "///\n/// JSON name: {}", variant_name)?; - writeln!(file, "#[serde(rename = \"{}\")]", variant_name)?; + writeln!(file, "///\n/// JSON name: {variant_name}")?; + writeln!(file, "#[serde(rename = \"{variant_name}\")]")?; } if let Some(shape) = &variant.shape { - writeln!(file, " {}({}),", cc_variant_name, rustify_type_name(shape))?; + writeln!(file, " {cc_variant_name}({}),", rustify_type_name(shape))?; } else { - writeln!(file, " {},", cc_variant_name)?; + writeln!(file, " {cc_variant_name},")?; } } diff --git a/migration-engine/migration-engine-tests/build.rs b/migration-engine/migration-engine-tests/build.rs index a37cddf1be53..cb6ebbb6bff0 100644 --- a/migration-engine/migration-engine-tests/build.rs +++ b/migration-engine/migration-engine-tests/build.rs @@ -30,7 +30,7 @@ fn find_all_schemas(prefix: &str, all_schemas: &mut Vec) { let entry = entry.unwrap(); let file_name = entry.file_name(); let file_name = file_name.to_str().unwrap(); - let entry_path = format!("{}/{}", prefix, file_name); + let entry_path = format!("{prefix}/{file_name}"); let file_type = entry.file_type().unwrap(); if file_name == "." || file_name == ".." { diff --git a/migration-engine/migration-engine-tests/src/assertions.rs b/migration-engine/migration-engine-tests/src/assertions.rs index 35679ad5aad5..dd1ed8ac6c06 100644 --- a/migration-engine/migration-engine-tests/src/assertions.rs +++ b/migration-engine/migration-engine-tests/src/assertions.rs @@ -248,8 +248,8 @@ impl SchemaAssertion { self.print_context(); println!( "The schema was expected to have {} tables, but {} were found.", - format!("{}", expected_count).green(), - format!("{}", actual_count).red() + format!("{expected_count}").green(), + format!("{actual_count}").red() ); print_tables(&self.schema); @@ -268,8 +268,8 @@ impl SchemaAssertion { self.print_context(); println!( "The schema was expected to have {} views, but {} were found.", - format!("{}", expected_count).green(), - format!("{}", actual_count).red() + format!("{expected_count}").green(), + format!("{actual_count}").red() ); println!("\n {}", "Views in database:".italic()); @@ -363,15 +363,13 @@ impl<'a> TableAssertion<'a> { assert!( columns_count == n, "Assertion failed. Expected {n} columns, found {columns_count}.", - n = n, - columns_count = columns_count, ); self } pub fn assert_foreign_keys_count(self, n: usize) -> Self { let fk_count = self.table.foreign_key_count(); - assert!(fk_count == n, "Expected {} foreign keys, found {}.", n, fk_count); + assert!(fk_count == n, "Expected {n} foreign keys, found {fk_count}."); self } @@ -453,9 +451,7 @@ impl<'a> TableAssertion<'a> { assert!( actual_count == count, - "Assertion failed: expected {} columns, found {}", - count, - actual_count, + "Assertion failed: expected {count} columns, found {actual_count}", ); self @@ -487,7 +483,7 @@ impl<'a> TableAssertion<'a> { #[track_caller] pub fn assert_indexes_count(self, n: usize) -> Self { let idx_count = self.table.indexes().filter(|idx| !idx.is_primary_key()).count(); - assert!(idx_count == n, "Expected {} indexes, found {}.", n, idx_count); + assert!(idx_count == n, "Expected {n} indexes, found {idx_count}."); self } @@ -520,7 +516,7 @@ impl<'a> TableAssertion<'a> { { self } else { - panic!("Could not find index with name {} and correct type", name); + panic!("Could not find index with name {name} and correct type"); } } } @@ -556,9 +552,7 @@ impl<'a> ColumnAssertion<'a> { assert!( self.column.default().map(|d| d.kind()) == expected.as_ref(), - "Assertion failed. Expected default: {:?}, but found {:?}", - expected, - found + "Assertion failed. Expected default: {expected:?}, but found {found:?}" ); self @@ -572,9 +566,7 @@ impl<'a> ColumnAssertion<'a> { assert!( found == expected, - "Assertion failed. Expected default constraint name: {:?}, but found {:?}", - expected, - found + "Assertion failed. Expected default constraint name: {expected:?}, but found {found:?}" ); this @@ -613,10 +605,7 @@ impl<'a> ColumnAssertion<'a> { expected, val ), - other => panic!( - "Assertion failed. Expected default: {:?}, but found {:?}", - expected, other - ), + other => panic!("Assertion failed. Expected default: {expected:?}, but found {other:?}"), } self @@ -633,10 +622,7 @@ impl<'a> ColumnAssertion<'a> { expected, val ), - other => panic!( - "Assertion failed. Expected default: {:?}, but found {:?}", - expected, other - ), + other => panic!("Assertion failed. Expected default: {expected:?}, but found {other:?}"), } self @@ -682,8 +668,7 @@ impl<'a> ColumnAssertion<'a> { assert!( found == &sql_schema_describer::ColumnTypeFamily::BigInt, - "Assertion failed. Expected a BigInt column, got {:?}.", - found + "Assertion failed. Expected a BigInt column, got {found:?}." ); self @@ -694,8 +679,7 @@ impl<'a> ColumnAssertion<'a> { assert!( found == &sql_schema_describer::ColumnTypeFamily::Binary, - "Assertion failed. Expected a bytes column, got {:?}.", - found + "Assertion failed. Expected a bytes column, got {found:?}." ); self @@ -706,8 +690,7 @@ impl<'a> ColumnAssertion<'a> { assert!( found == &sql_schema_describer::ColumnTypeFamily::Decimal, - "Assertion failed. Expected a decimal column, got {:?}.", - found + "Assertion failed. Expected a decimal column, got {found:?}." ); self @@ -719,8 +702,7 @@ impl<'a> ColumnAssertion<'a> { assert!( matches!(found, sql_schema_describer::ColumnTypeFamily::Enum(_)), - "Assertion failed. Expected an enum column, got {:?}.", - found + "Assertion failed. Expected an enum column, got {found:?}." ); self @@ -731,8 +713,7 @@ impl<'a> ColumnAssertion<'a> { assert!( found == &sql_schema_describer::ColumnTypeFamily::String, - "Assertion failed. Expected a string column, got {:?}.", - found + "Assertion failed. Expected a string column, got {found:?}." ); self @@ -743,8 +724,7 @@ impl<'a> ColumnAssertion<'a> { assert!( found == &sql_schema_describer::ColumnTypeFamily::Int, - "Assertion failed. Expected an integer column, got {:?}.", - found + "Assertion failed. Expected an integer column, got {found:?}." ); self @@ -838,7 +818,7 @@ impl<'a> PrimaryKeyAssertion<'a> { .pk .columns() .find(|c| c.name() == column_name) - .unwrap_or_else(|| panic!("Could not find column {}", column_name)); + .unwrap_or_else(|| panic!("Could not find column {column_name}")); f(IndexColumnAssertion { length: col.length(), @@ -909,9 +889,7 @@ impl<'a> ForeignKeyAssertion<'a> { assert!( self.is_same_table_name(self.fk.referenced_table().name(), table) && self.fk.referenced_columns().map(|c| c.name()).collect::>() == columns, - r#"Assertion failed. Expected reference to "{}" ({:?})."#, - table, - columns, + r#"Assertion failed. Expected reference to "{table}" ({columns:?})."#, ); self diff --git a/migration-engine/migration-engine-tests/src/assertions/quaint_result_set_ext.rs b/migration-engine/migration-engine-tests/src/assertions/quaint_result_set_ext.rs index 7652047ad5ef..3acb21c9af25 100644 --- a/migration-engine/migration-engine-tests/src/assertions/quaint_result_set_ext.rs +++ b/migration-engine/migration-engine-tests/src/assertions/quaint_result_set_ext.rs @@ -42,10 +42,7 @@ impl<'a> RowAssertion<'a> { assert_eq!( actual_value.map(|v| v.as_ref()), Some(expected_value), - "Value assertion failed for {}. Expected: {:?}, got: {:?}", - column_name, - expected_value, - actual_value, + "Value assertion failed for {column_name}. Expected: {expected_value:?}, got: {actual_value:?}", ); self @@ -57,10 +54,7 @@ impl<'a> RowAssertion<'a> { assert_eq!( actual_value, Some(expected_value), - "Value assertion failed for {}. Expected: {:?}, got: {:?}", - column_name, - expected_value, - actual_value, + "Value assertion failed for {column_name}. Expected: {expected_value:?}, got: {actual_value:?}", ); self @@ -76,10 +70,7 @@ impl<'a> RowAssertion<'a> { assert!( actual_value == expected_value, - "Value assertion failed for {}. Expected: {:?}, got: {:?}", - column_name, - expected_value, - actual_value, + "Value assertion failed for {column_name}. Expected: {expected_value:?}, got: {actual_value:?}", ); self @@ -87,7 +78,7 @@ impl<'a> RowAssertion<'a> { pub fn assert_null_value(self, column_name: &str) -> Self { if !self.0.get(column_name).expect("not in result set").is_null() { - panic!("Expected a null value for {}, but got something else.", column_name) + panic!("Expected a null value for {column_name}, but got something else.") } self @@ -104,8 +95,7 @@ impl<'a> RowAssertion<'a> { assert_eq!( value_text, expected_value, - "Value assertion failed for {}. Expected: {:?}, got: {:?}", - column_name, expected_value, value_text, + "Value assertion failed for {column_name}. Expected: {expected_value:?}, got: {value_text:?}", ); self @@ -116,10 +106,7 @@ impl<'a> RowAssertion<'a> { assert!( actual_value == Some(expected_value), - "Value assertion failed for {}. Expected: {:?}, got: {:?}", - column_name, - expected_value, - actual_value, + "Value assertion failed for {column_name}. Expected: {expected_value:?}, got: {actual_value:?}", ); self diff --git a/migration-engine/migration-engine-tests/src/commands/apply_migrations.rs b/migration-engine/migration-engine-tests/src/commands/apply_migrations.rs index 527455db2d70..60e288d224dc 100644 --- a/migration-engine/migration-engine-tests/src/commands/apply_migrations.rs +++ b/migration-engine/migration-engine-tests/src/commands/apply_migrations.rs @@ -78,9 +78,7 @@ impl<'a> ApplyMigrationsAssertion<'a> { assert!( found_names == names, - "Assertion failed. The applied migrations do not match the expectations. ({:?} vs {:?})", - found_names, - names + "Assertion failed. The applied migrations do not match the expectations. ({found_names:?} vs {names:?})" ); self } diff --git a/migration-engine/migration-engine-tests/src/commands/create_migration.rs b/migration-engine/migration-engine-tests/src/commands/create_migration.rs index 061532fdf56e..34a4bafc6d8c 100644 --- a/migration-engine/migration-engine-tests/src/commands/create_migration.rs +++ b/migration-engine/migration-engine-tests/src/commands/create_migration.rs @@ -98,9 +98,7 @@ impl<'a> CreateMigrationAssertion<'a> { assert!( // the lock file is counted as an entry expected_count == count, - "Assertion failed. Expected {expected} migrations in the migrations directory, found {actual}.", - expected = expected_count, - actual = count + "Assertion failed. Expected {expected_count} migrations in the migrations directory, found {count}." ); self @@ -131,10 +129,7 @@ impl<'a> CreateMigrationAssertion<'a> { assertions(assertion); } - None => panic!( - "Assertion error. Could not find migration with name matching `{}`", - name_matcher - ), + None => panic!("Assertion error. Could not find migration with name matching `{name_matcher}`"), } self @@ -168,7 +163,7 @@ impl<'a> CreateMigrationAssertion<'a> { }; let mut file = std::fs::File::create(&migration_script_path).unwrap(); - write!(file, "{}", new_contents).unwrap(); + write!(file, "{new_contents}").unwrap(); self } @@ -187,7 +182,7 @@ impl MigrationAssertion<'_> { pub fn expect_contents(self, expected_contents: expect_test::Expect) -> Self { let migration_file_path = self.path.join("migration.sql"); let contents: String = std::fs::read_to_string(&migration_file_path) - .map_err(|_| format!("Trying to read migration file at {:?}", migration_file_path)) + .map_err(|_| format!("Trying to read migration file at {migration_file_path:?}")) .unwrap(); expected_contents.assert_eq(&contents); @@ -198,7 +193,7 @@ impl MigrationAssertion<'_> { pub fn assert_contents(self, expected_contents: &str) -> Self { let migration_file_path = self.path.join("migration.sql"); let contents: String = std::fs::read_to_string(&migration_file_path) - .map_err(|_| format!("Trying to read migration file at {:?}", migration_file_path)) + .map_err(|_| format!("Trying to read migration file at {migration_file_path:?}")) .unwrap(); assert_eq!(expected_contents, contents); diff --git a/migration-engine/migration-engine-tests/src/commands/list_migration_directories.rs b/migration-engine/migration-engine-tests/src/commands/list_migration_directories.rs index d969670af958..ad65d5d0f6b2 100644 --- a/migration-engine/migration-engine-tests/src/commands/list_migration_directories.rs +++ b/migration-engine/migration-engine-tests/src/commands/list_migration_directories.rs @@ -51,9 +51,7 @@ impl<'a> ListMigrationDirectoriesAssertion<'a> { assert!( found_names == names, - "Assertion failed. The listed migrations do not match the expectations. ({:?} vs {:?})", - found_names, - names + "Assertion failed. The listed migrations do not match the expectations. ({found_names:?} vs {names:?})" ); self diff --git a/migration-engine/migration-engine-tests/src/commands/schema_push.rs b/migration-engine/migration-engine-tests/src/commands/schema_push.rs index 1b9e70513843..8d8ada2a62a1 100644 --- a/migration-engine/migration-engine-tests/src/commands/schema_push.rs +++ b/migration-engine/migration-engine-tests/src/commands/schema_push.rs @@ -163,7 +163,7 @@ impl SchemaPushAssertion { println!("\nWarnings that were {}:", "found and expected".bold()); good.iter().for_each(|good| { - println!("\t - {}", good); + println!("\t - {good}"); }); panic!(); @@ -258,7 +258,7 @@ impl SchemaPushAssertion { println!("\nErrors that were {}:", "found and expected".bold()); good.iter().for_each(|good| { - println!("\t - {}", good); + println!("\t - {good}"); }); panic!(); diff --git a/migration-engine/migration-engine-tests/src/multi_engine_test_api.rs b/migration-engine/migration-engine-tests/src/multi_engine_test_api.rs index b57b7c9144e1..cca6d9bec3cf 100644 --- a/migration-engine/migration-engine-tests/src/multi_engine_test_api.rs +++ b/migration-engine/migration-engine-tests/src/multi_engine_test_api.rs @@ -246,7 +246,7 @@ impl TestApi { .args .preview_features() .iter() - .map(|pf| format!(r#""{}""#, pf)) + .map(|pf| format!(r#""{pf}""#)) .collect(); let preview_feature_string = if preview_features.is_empty() { @@ -257,9 +257,8 @@ impl TestApi { let generator_block = format!( r#"generator client {{ - provider = "prisma-client-js"{} - }}"#, - preview_feature_string + provider = "prisma-client-js"{preview_feature_string} + }}"# ); generator_block } diff --git a/migration-engine/migration-engine-tests/src/test_api.rs b/migration-engine/migration-engine-tests/src/test_api.rs index 6a5f52638c4e..11b93d29056b 100644 --- a/migration-engine/migration-engine-tests/src/test_api.rs +++ b/migration-engine/migration-engine-tests/src/test_api.rs @@ -350,7 +350,7 @@ impl TestApi { .args .datasource_block(self.root.args.database_url(), &used_params, preview_features); - write!(out, "{}", ds_block).unwrap() + write!(out, "{ds_block}").unwrap() } pub fn generator_block(&self) -> String { @@ -361,17 +361,16 @@ impl TestApi { .root .preview_features() .iter() - .map(|f| format!(r#""{}""#, f)) + .map(|f| format!(r#""{f}""#)) .join(", "); - format!("\npreviewFeatures = [{}]", features) + format!("\npreviewFeatures = [{features}]") }; let generator_block = format!( r#"generator client {{ - provider = "prisma-client-js"{} - }}"#, - preview_feature_string + provider = "prisma-client-js"{preview_feature_string} + }}"# ); generator_block } @@ -437,12 +436,12 @@ where let mut out = String::with_capacity(sep.len() * lower_bound); if let Some(first_item) = self.next() { - write!(out, "{}", first_item).unwrap(); + write!(out, "{first_item}").unwrap(); } for item in self { out.push_str(sep); - write!(out, "{}", item).unwrap(); + write!(out, "{item}").unwrap(); } out diff --git a/migration-engine/migration-engine-tests/tests/create_migration/create_migration_tests.rs b/migration-engine/migration-engine-tests/tests/create_migration/create_migration_tests.rs index 1f741fc5b245..ea2ea0eeee66 100644 --- a/migration-engine/migration-engine-tests/tests/create_migration/create_migration_tests.rs +++ b/migration-engine/migration-engine-tests/tests/create_migration/create_migration_tests.rs @@ -227,8 +227,8 @@ fn bad_migrations_should_make_the_command_fail_with_a_good_error(api: TestApi) { std::fs::create_dir(&migration_directory).unwrap(); let migration_file_path = migration_directory.join("migration.sql"); let script = "this is not valid SQL"; - let mut file = std::fs::File::create(&migration_file_path).unwrap(); - write!(file, "{}", script).unwrap(); + let mut file = std::fs::File::create(migration_file_path).unwrap(); + write!(file, "{script}").unwrap(); let error = api.create_migration("create-cats", &dm, &dir).send_unwrap_err(); diff --git a/migration-engine/migration-engine-tests/tests/errors/database_access_denied.rs b/migration-engine/migration-engine-tests/tests/errors/database_access_denied.rs index c11216b8e598..05ef4ce00e2f 100644 --- a/migration-engine/migration-engine-tests/tests/errors/database_access_denied.rs +++ b/migration-engine/migration-engine-tests/tests/errors/database_access_denied.rs @@ -18,10 +18,9 @@ fn database_access_denied_must_return_a_proper_error_in_rpc(api: TestApi) { r#" datasource db {{ provider = "mysql" - url = "{}" + url = "{url}" }} "#, - url, ); let error = tok(connection_error(dm)); diff --git a/migration-engine/migration-engine-tests/tests/errors/error_tests.rs b/migration-engine/migration-engine-tests/tests/errors/error_tests.rs index a2b814ef056a..1316dd91bf8d 100644 --- a/migration-engine/migration-engine-tests/tests/errors/error_tests.rs +++ b/migration-engine/migration-engine-tests/tests/errors/error_tests.rs @@ -29,10 +29,9 @@ fn authentication_failure_must_return_a_known_error_on_postgres(api: TestApi) { r#" datasource db {{ provider = "postgres" - url = "{}" + url = "{db_url}" }} - "#, - db_url + "# ); let error = tok(connection_error(dm)); @@ -43,7 +42,7 @@ fn authentication_failure_must_return_a_known_error_on_postgres(api: TestApi) { let json_error = serde_json::to_value(&error.to_user_facing()).unwrap(); let expected = json!({ "is_panic": false, - "message": format!("Authentication failed against database server at `{host}`, the provided database credentials for `postgres` are not valid.\n\nPlease make sure to provide valid database credentials for the database server at `{host}`.", host = host), + "message": format!("Authentication failed against database server at `{host}`, the provided database credentials for `postgres` are not valid.\n\nPlease make sure to provide valid database credentials for the database server at `{host}`."), "meta": { "database_user": user, "database_host": host, @@ -64,10 +63,9 @@ fn authentication_failure_must_return_a_known_error_on_mysql(api: TestApi) { r#" datasource db {{ provider = "mysql" - url = "{}" + url = "{url}" }} - "#, - url + "# ); let error = tok(connection_error(dm)); @@ -78,7 +76,7 @@ fn authentication_failure_must_return_a_known_error_on_mysql(api: TestApi) { let json_error = serde_json::to_value(&error.to_user_facing()).unwrap(); let expected = json!({ "is_panic": false, - "message": format!("Authentication failed against database server at `{host}`, the provided database credentials for `{user}` are not valid.\n\nPlease make sure to provide valid database credentials for the database server at `{host}`.", host = host, user = user), + "message": format!("Authentication failed against database server at `{host}`, the provided database credentials for `{user}` are not valid.\n\nPlease make sure to provide valid database credentials for the database server at `{host}`."), "meta": { "database_user": user, "database_host": host, @@ -99,10 +97,9 @@ fn unreachable_database_must_return_a_proper_error_on_mysql(api: TestApi) { r#" datasource db {{ provider = "mysql" - url = "{}" + url = "{url}" }} - "#, - url + "# ); let error = tok(connection_error(dm)); @@ -113,7 +110,7 @@ fn unreachable_database_must_return_a_proper_error_on_mysql(api: TestApi) { let json_error = serde_json::to_value(&error.to_user_facing()).unwrap(); let expected = json!({ "is_panic": false, - "message": format!("Can't reach database server at `{host}`:`{port}`\n\nPlease make sure your database server is running at `{host}`:`{port}`.", host = host, port = port), + "message": format!("Can't reach database server at `{host}`:`{port}`\n\nPlease make sure your database server is running at `{host}`:`{port}`."), "meta": { "database_host": host, "database_port": port, @@ -134,10 +131,9 @@ fn unreachable_database_must_return_a_proper_error_on_postgres(api: TestApi) { r#" datasource db {{ provider = "postgres" - url = "{}" + url = "{url}" }} - "#, - url + "# ); let error = tok(connection_error(dm)); @@ -148,7 +144,7 @@ fn unreachable_database_must_return_a_proper_error_on_postgres(api: TestApi) { let json_error = serde_json::to_value(&error.to_user_facing()).unwrap(); let expected = json!({ "is_panic": false, - "message": format!("Can't reach database server at `{host}`:`{port}`\n\nPlease make sure your database server is running at `{host}`:`{port}`.", host = host, port = port), + "message": format!("Can't reach database server at `{host}`:`{port}`\n\nPlease make sure your database server is running at `{host}`:`{port}`."), "meta": { "database_host": host, "database_port": port, @@ -164,16 +160,15 @@ fn database_does_not_exist_must_return_a_proper_error(api: TestApi) { let mut url: Url = api.connection_string().parse().unwrap(); let database_name = "notmydatabase"; - url.set_path(&format!("/{}", database_name)); + url.set_path(&format!("/{database_name}")); let dm = format!( r#" datasource db {{ provider = "mysql" - url = "{}" + url = "{url}" }} - "#, - url + "# ); let error = tok(connection_error(dm)); @@ -240,10 +235,9 @@ fn connections_to_system_databases_must_be_rejected(api: TestApi) { r#" datasource db {{ provider = "mysql" - url = "{}" + url = "{url}" }} - "#, - url + "# ); // "mysql" is the default in Quaint. @@ -254,7 +248,7 @@ fn connections_to_system_databases_must_be_rejected(api: TestApi) { let expected = json!({ "is_panic": false, - "message": format!("The `{}` database is a system database, it should not be altered with prisma migrate. Please connect to another database.", name), + "message": format!("The `{name}` database is a system database, it should not be altered with prisma migrate. Please connect to another database."), "meta": { "database_name": name, }, diff --git a/migration-engine/migration-engine-tests/tests/existing_data/mod.rs b/migration-engine/migration-engine-tests/tests/existing_data/mod.rs index fce93a63e5b3..f7098d6ec3cf 100644 --- a/migration-engine/migration-engine-tests/tests/existing_data/mod.rs +++ b/migration-engine/migration-engine-tests/tests/existing_data/mod.rs @@ -178,7 +178,7 @@ fn column_defaults_can_safely_be_changed(api: TestApi) { model_name, first_default .as_ref() - .map(|default| format!("@default(\"{}\")", default)) + .map(|default| format!("@default(\"{default}\")")) .unwrap_or_else(String::new) ); @@ -240,7 +240,7 @@ fn column_defaults_can_safely_be_changed(api: TestApi) { model_name, second_default .as_ref() - .map(|default| format!(r#"@default("{}")"#, default)) + .map(|default| format!(r#"@default("{default}")"#)) .unwrap_or_else(String::new) ); diff --git a/migration-engine/migration-engine-tests/tests/initialization/mod.rs b/migration-engine/migration-engine-tests/tests/initialization/mod.rs index 271c03c9dbc9..5dcb85812511 100644 --- a/migration-engine/migration-engine-tests/tests/initialization/mod.rs +++ b/migration-engine/migration-engine-tests/tests/initialization/mod.rs @@ -48,10 +48,9 @@ fn connecting_to_a_postgres_database_with_missing_schema_creates_it(api: TestApi r#" datasource db {{ provider = "postgresql" - url = "{}" + url = "{url}" }} - "#, - url + "# ); let me = migration_api(Some(schema.clone()), None).unwrap(); diff --git a/migration-engine/migration-engine-tests/tests/migrations/cockroachdb.rs b/migration-engine/migration-engine-tests/tests/migrations/cockroachdb.rs index d2a113b13bef..276502eda6d0 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/cockroachdb.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/cockroachdb.rs @@ -149,7 +149,7 @@ fn native_type_columns_can_be_created(api: TestApi) { .to_string(); for (field_name, prisma_type, native_type, _) in types { - writeln!(&mut dm, " {} {} @db.{}", field_name, prisma_type, native_type).unwrap(); + writeln!(&mut dm, " {field_name} {prisma_type} @db.{native_type}").unwrap(); } dm.push_str("}\n"); @@ -618,7 +618,7 @@ fn column_defaults_can_safely_be_changed(api: TestApi) { model_name, first_default .as_ref() - .map(|default| format!("@default(\"{}\")", default)) + .map(|default| format!("@default(\"{default}\")")) .unwrap_or_else(String::new) ); @@ -680,7 +680,7 @@ fn column_defaults_can_safely_be_changed(api: TestApi) { model_name, second_default .as_ref() - .map(|default| format!(r#"@default("{}")"#, default)) + .map(|default| format!(r#"@default("{default}")"#)) .unwrap_or_else(String::new) ); @@ -797,10 +797,9 @@ fn on_delete_referential_actions_should_work(api: TestApi) { model B {{ id BigInt @id aId BigInt? - a A? @relation(fields: [aId], references: [id], onDelete: {}) + a A? @relation(fields: [aId], references: [id], onDelete: {ra}) }} - "#, - ra + "# ); api.schema_push_w_datasource(&dm).send().assert_green(); diff --git a/migration-engine/migration-engine-tests/tests/migrations/cockroachdb/failure_modes.rs b/migration-engine/migration-engine-tests/tests/migrations/cockroachdb/failure_modes.rs index ef482792485d..b65539c34c27 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/cockroachdb/failure_modes.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/cockroachdb/failure_modes.rs @@ -212,7 +212,7 @@ fn syntax_errors_return_error_position(api: TestApi) { fn write_migrations(migrations: &[&str]) -> tempfile::TempDir { let dir = tempfile::tempdir().unwrap(); for (idx, migration) in migrations.iter().enumerate() { - let migration_dir = dir.path().join(format!("{:3}", idx)); + let migration_dir = dir.path().join(format!("{idx:3}")); std::fs::create_dir(&migration_dir).unwrap(); let migration_path = migration_dir.join("migration.sql"); std::fs::write(&migration_path, migration).unwrap(); diff --git a/migration-engine/migration-engine-tests/tests/migrations/db_execute.rs b/migration-engine/migration-engine-tests/tests/migrations/db_execute.rs index 617f7f005fdf..38c7ba63b7dd 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/db_execute.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/db_execute.rs @@ -61,7 +61,7 @@ fn db_execute_happy_path_with_prisma_schema() { url.replace('\\', "\\\\") ); let schema_path = tmpdir.path().join("schema.prisma"); - std::fs::write(&schema_path, &prisma_schema).unwrap(); + std::fs::write(&schema_path, prisma_schema).unwrap(); let script = r#" CREATE TABLE "dogs" ( id INTEGER PRIMARY KEY, name TEXT ); INSERT INTO "dogs" ("name") VALUES ('snoopy'), ('marmaduke'); diff --git a/migration-engine/migration-engine-tests/tests/migrations/dev_diagnostic_tests.rs b/migration-engine/migration-engine-tests/tests/migrations/dev_diagnostic_tests.rs index 79f91ed81f00..88d817216ff9 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/dev_diagnostic_tests.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/dev_diagnostic_tests.rs @@ -151,8 +151,7 @@ fn dev_diagnostic_calculates_drift_in_presence_of_failed_migrations(api: TestApi let DevDiagnosticOutput { action } = api.dev_diagnostic(&directory).send().into_output(); let expected_message = format!( - "- The migration `{}` failed.\n- The migration `{}` was modified after it was applied.\n- Drift detected: Your database schema is not in sync with your migration history.\n", - migration_two_name, migration_two_name, + "- The migration `{migration_two_name}` failed.\n- The migration `{migration_two_name}` was modified after it was applied.\n- Drift detected: Your database schema is not in sync with your migration history.\n", ); assert!(action.as_reset().unwrap().starts_with(&expected_message)); @@ -231,15 +230,14 @@ fn dev_diagnostic_can_detect_when_the_migrations_directory_is_behind(api: TestAp .assert_applied_migrations(&["initial", "second-migration"]); let second_migration_folder_path = directory.path().join(&name); - std::fs::remove_dir_all(&second_migration_folder_path).unwrap(); + std::fs::remove_dir_all(second_migration_folder_path).unwrap(); let DevDiagnosticOutput { action } = api.dev_diagnostic(&directory).send().into_output(); let message = action.as_reset().unwrap(); assert!(message.contains("- Drift detected: Your database schema is not in sync with your migration history")); assert!(message.contains(&format!( - "The following migration(s) are applied to the database but missing from the local migrations directory: {}", - name + "The following migration(s) are applied to the database but missing from the local migrations directory: {name}" ))); } @@ -285,7 +283,7 @@ fn dev_diagnostic_can_detect_when_history_diverges(api: TestApi) { .assert_applied_migrations(&["1-initial", "2-second-migration"]); let second_migration_folder_path = directory.path().join(&deleted_migration_name); - std::fs::remove_dir_all(&second_migration_folder_path).unwrap(); + std::fs::remove_dir_all(second_migration_folder_path).unwrap(); let dm3 = api.datamodel_with_provider( r#" @@ -307,7 +305,7 @@ fn dev_diagnostic_can_detect_when_history_diverges(api: TestApi) { let message = action.as_reset().unwrap(); assert!(message.contains("Drift detected: Your database schema is not in sync with your migration history")); - assert!(message.contains(&format!("- The migrations recorded in the database diverge from the local migrations directory. Last common migration: `{}`. Migrations applied to the database but absent from the migrations directory are: {}", first_migration_name, deleted_migration_name))); + assert!(message.contains(&format!("- The migrations recorded in the database diverge from the local migrations directory. Last common migration: `{first_migration_name}`. Migrations applied to the database but absent from the migrations directory are: {deleted_migration_name}"))); } #[test_connector] @@ -354,10 +352,7 @@ fn dev_diagnostic_can_detect_edited_migrations(api: TestApi) { let DevDiagnosticOutput { action } = api.dev_diagnostic(&directory).send().into_output(); - let expected_message = format!( - "The migration `{}` was modified after it was applied.", - initial_migration_name - ); + let expected_message = format!("The migration `{initial_migration_name}` was modified after it was applied."); assert_eq!(action.as_reset(), Some(expected_message.as_str())); } @@ -643,7 +638,7 @@ fn dev_diagnostic_shadow_database_creation_error_is_special_cased_mysql(api: Tes .to_user_facing() .unwrap_known(); - assert!(err.message.starts_with("Prisma Migrate could not create the shadow database. Please make sure the database user has permission to create databases. Read more about the shadow database (and workarounds) at https://pris.ly/d/migrate-shadow"), "{:?}", err); + assert!(err.message.starts_with("Prisma Migrate could not create the shadow database. Please make sure the database user has permission to create databases. Read more about the shadow database (and workarounds) at https://pris.ly/d/migrate-shadow"), "{err:?}"); } #[test_connector(tags(Postgres12))] diff --git a/migration-engine/migration-engine-tests/tests/migrations/diagnose_migration_history_tests.rs b/migration-engine/migration-engine-tests/tests/migrations/diagnose_migration_history_tests.rs index e778dbb17f56..6aee77618467 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/diagnose_migration_history_tests.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/diagnose_migration_history_tests.rs @@ -319,7 +319,7 @@ fn diagnose_migrations_history_can_detect_when_the_folder_is_behind(api: TestApi .assert_applied_migrations(&["initial", "second-migration"]); let second_migration_folder_path = directory.path().join(&name); - std::fs::remove_dir_all(&second_migration_folder_path).unwrap(); + std::fs::remove_dir_all(second_migration_folder_path).unwrap(); let DiagnoseMigrationHistoryOutput { drift, @@ -389,7 +389,7 @@ fn diagnose_migrations_history_can_detect_when_history_diverges(api: TestApi) { .assert_applied_migrations(&["1-initial", "2-second-migration"]); let second_migration_folder_path = directory.path().join(&deleted_migration_name); - std::fs::remove_dir_all(&second_migration_folder_path).unwrap(); + std::fs::remove_dir_all(second_migration_folder_path).unwrap(); let dm3 = api.datamodel_with_provider( r#" @@ -930,7 +930,7 @@ fn shadow_database_creation_error_is_special_cased_mssql(api: TestApi) { match result { Ok(api) => break api, Err(err) => { - eprintln!("got err, sleeping\nerr:{:?}", err); + eprintln!("got err, sleeping\nerr:{err:?}"); tries += 1; std::thread::sleep(std::time::Duration::from_millis(200)); } diff --git a/migration-engine/migration-engine-tests/tests/migrations/diff.rs b/migration-engine/migration-engine-tests/tests/migrations/diff.rs index 5e8adc9a627a..79e1142ed42c 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/diff.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/diff.rs @@ -91,13 +91,13 @@ fn from_empty_to_migrations_directory(mut api: TestApi) { let first_migration_file_path = first_migration_directory_path.join("migration.sql"); let migrations_lock_path = base_dir.path().join("migration_lock.toml"); std::fs::write( - &migrations_lock_path, - &format!("provider = \"{}\"", api.args().provider()), + migrations_lock_path, + format!("provider = \"{}\"", api.args().provider()), ) .unwrap(); std::fs::create_dir_all(&first_migration_directory_path).unwrap(); std::fs::write( - &first_migration_file_path, + first_migration_file_path, "CREATE TABLE cats ( id INTEGER PRIMARY KEY, moos BOOLEAN DEFAULT false );", ) .unwrap(); @@ -132,13 +132,13 @@ fn from_empty_to_migrations_folder_without_shadow_db_url_must_error(mut api: Tes let first_migration_file_path = first_migration_directory_path.join("migration.sql"); let migrations_lock_path = base_dir.path().join("migration_lock.toml"); std::fs::write( - &migrations_lock_path, - &format!("provider = \"{}\"", api.args().provider()), + migrations_lock_path, + format!("provider = \"{}\"", api.args().provider()), ) .unwrap(); std::fs::create_dir_all(&first_migration_directory_path).unwrap(); std::fs::write( - &first_migration_file_path, + first_migration_file_path, "CREATE TABLE cats ( id INTEGER PRIMARY KEY, moos BOOLEAN DEFAULT false );", ) .unwrap(); @@ -181,7 +181,7 @@ fn from_schema_datamodel_to_url(mut api: TestApi) { } "#; let schema_path = write_file_to_tmp(first_schema, &tempdir, "schema.prisma"); - let second_url = format!("file:{}/second_db.sqlite", base_dir_str); + let second_url = format!("file:{base_dir_str}/second_db.sqlite"); tok(async { let q = quaint::single::Quaint::new(&second_url).await.unwrap(); @@ -266,8 +266,8 @@ fn from_schema_datasource_to_url(mut api: TestApi) { let base_dir = tempfile::TempDir::new().unwrap(); let base_dir_str = base_dir.path().to_string_lossy(); - let first_url = format!("file:{}/first_db.sqlite", base_dir_str); - let second_url = format!("file:{}/second_db.sqlite", base_dir_str); + let first_url = format!("file:{base_dir_str}/first_db.sqlite"); + let second_url = format!("file:{base_dir_str}/second_db.sqlite"); tok(async { let q = quaint::single::Quaint::new(&first_url).await.unwrap(); @@ -321,8 +321,8 @@ fn from_url_to_url(mut api: TestApi) { let base_dir = tempfile::TempDir::new().unwrap(); let base_dir_str = base_dir.path().to_string_lossy(); - let first_url = format!("file:{}/first_db.sqlite", base_dir_str); - let second_url = format!("file:{}/second_db.sqlite", base_dir_str); + let first_url = format!("file:{base_dir_str}/first_db.sqlite"); + let second_url = format!("file:{base_dir_str}/second_db.sqlite"); tok(async { let q = quaint::single::Quaint::new(&first_url).await.unwrap(); @@ -420,9 +420,9 @@ fn diff_sqlite_migration_directories() { let base_dir_str_2 = base_dir_2.path().to_str().unwrap(); let migrations_lock_path = base_dir.path().join("migration_lock.toml"); - std::fs::write(&migrations_lock_path, "provider = \"sqlite\"").unwrap(); + std::fs::write(migrations_lock_path, "provider = \"sqlite\"").unwrap(); let migrations_lock_path = base_dir_2.path().join("migration_lock.toml"); - std::fs::write(&migrations_lock_path, "provider = \"sqlite\"").unwrap(); + std::fs::write(migrations_lock_path, "provider = \"sqlite\"").unwrap(); let params = DiffParams { exit_code: None, @@ -730,7 +730,7 @@ pub(crate) fn diff_result(params: DiffParams) -> (DiffResult, String) { let api = migration_core::migration_api(None, Some(host.clone())).unwrap(); let result = test_setup::runtime::run_with_thread_local_runtime(api.diff(params)).unwrap(); let printed_messages = host.printed_messages.lock().unwrap(); - assert!(printed_messages.len() == 1, "{:?}", printed_messages); + assert!(printed_messages.len() == 1, "{printed_messages:?}"); (result, printed_messages[0].clone()) } diff --git a/migration-engine/migration-engine-tests/tests/migrations/drift_summary.rs b/migration-engine/migration-engine-tests/tests/migrations/drift_summary.rs index f9e0787896b2..d0d9880258d0 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/drift_summary.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/drift_summary.rs @@ -22,7 +22,7 @@ fn check(from: &str, to: &str, expectation: Expect) { let api = migration_core::migration_api(None, Some(host.clone())).unwrap(); test_setup::runtime::run_with_thread_local_runtime(api.diff(params)).unwrap(); let printed_messages = host.printed_messages.lock().unwrap(); - assert!(printed_messages.len() == 1, "{:?}", printed_messages); + assert!(printed_messages.len() == 1, "{printed_messages:?}"); expectation.assert_eq(&printed_messages[0]); } diff --git a/migration-engine/migration-engine-tests/tests/migrations/mark_migration_applied_tests.rs b/migration-engine/migration-engine-tests/tests/migrations/mark_migration_applied_tests.rs index a379b89fc56d..8825a34844b4 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/mark_migration_applied_tests.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/mark_migration_applied_tests.rs @@ -152,8 +152,7 @@ fn mark_migration_applied_when_the_migration_is_already_applied_errors(api: Test .send_unwrap_err(); assert!(err.to_string().starts_with(&format!( - "The migration `{}` is already recorded as applied in the database.\n", - second_migration_name + "The migration `{second_migration_name}` is already recorded as applied in the database.\n" ))); let applied_migrations = tok(api.migration_persistence().list_migrations()).unwrap().unwrap(); diff --git a/migration-engine/migration-engine-tests/tests/migrations/mark_migration_rolled_back_tests.rs b/migration-engine/migration-engine-tests/tests/migrations/mark_migration_rolled_back_tests.rs index f4c2cf5641bb..3e16596082b1 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/mark_migration_rolled_back_tests.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/mark_migration_rolled_back_tests.rs @@ -174,8 +174,7 @@ fn mark_migration_rolled_back_with_a_successful_migration_errors(api: TestApi) { let err = api.mark_migration_rolled_back(&second_migration_name).send_unwrap_err(); assert!(err.to_string().starts_with(&format!( - "Migration `{}` cannot be rolled back because it is not in a failed state.\n", - second_migration_name + "Migration `{second_migration_name}` cannot be rolled back because it is not in a failed state.\n" ))); let applied_migrations = tok(api.migration_persistence().list_migrations()).unwrap().unwrap(); diff --git a/migration-engine/migration-engine-tests/tests/migrations/migrate_lock.rs b/migration-engine/migration-engine-tests/tests/migrations/migrate_lock.rs index d484ef5ac9a7..2c521d0d4c7b 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/migrate_lock.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/migrate_lock.rs @@ -44,7 +44,7 @@ fn create_migration_with_new_provider_errors(api: TestApi) { assert_eq!(err.error_code, ProviderSwitchedError::ERROR_CODE); assert!(err.message.contains("postgresql")); - assert!(err.message.contains("sqlite"), "{:?}", err); + assert!(err.message.contains("sqlite"), "{err:?}"); } #[test_connector(tags(Postgres), exclude(CockroachDb))] @@ -102,6 +102,6 @@ fn migration_lock_with_different_comment_shapes_work(api: TestApi) { assert_eq!(err.error_code, ProviderSwitchedError::ERROR_CODE); assert!(err.message.contains("postgresql")); - assert!(err.message.contains("sqlite"), "{:?}", err); + assert!(err.message.contains("sqlite"), "{err:?}"); } } diff --git a/migration-engine/migration-engine-tests/tests/migrations/mssql.rs b/migration-engine/migration-engine-tests/tests/migrations/mssql.rs index 7d4d30d3f2f7..e351ec8388cb 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/mssql.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/mssql.rs @@ -8,13 +8,10 @@ mod multi_schema; fn reset_clears_udts(api: TestApi) { let schema = api.schema_name(); - api.raw_cmd(&format!("CREATE TYPE {}.[testType] AS TABLE (FooBar INT)", schema)); + api.raw_cmd(&format!("CREATE TYPE {schema}.[testType] AS TABLE (FooBar INT)")); let schemas = api.query_raw( - &format!( - "SELECT * FROM sys.types WHERE SCHEMA_NAME(schema_id) = '{}' and NAME = 'testType'", - schema - ), + &format!("SELECT * FROM sys.types WHERE SCHEMA_NAME(schema_id) = '{schema}' and NAME = 'testType'"), &[], ); assert_eq!(1, schemas.len()); @@ -22,10 +19,7 @@ fn reset_clears_udts(api: TestApi) { api.reset().send_sync(None); let schemas = api.query_raw( - &format!( - "SELECT * FROM sys.types WHERE SCHEMA_NAME(schema_id) = '{}' and NAME = 'testType'", - schema - ), + &format!("SELECT * FROM sys.types WHERE SCHEMA_NAME(schema_id) = '{schema}' and NAME = 'testType'"), &[], ); assert_eq!(0, schemas.len()); @@ -35,20 +29,19 @@ fn reset_clears_udts(api: TestApi) { fn shared_default_constraints_are_ignored_issue_5423(api: TestApi) { let schema = api.schema_name(); - api.raw_cmd(&format!("CREATE DEFAULT [{}].catcat AS 'musti'", schema)); + api.raw_cmd(&format!("CREATE DEFAULT [{schema}].catcat AS 'musti'")); api.raw_cmd(&format!( r#" - CREATE TABLE [{0}].cats ( + CREATE TABLE [{schema}].cats ( id INT IDENTITY, name NVARCHAR(255) NOT NULL, CONSTRAINT [cats_pkey] PRIMARY KEY CLUSTERED ([id] ASC) ) - "#, - schema + "# )); - api.raw_cmd(&format!("sp_bindefault '{0}.catcat', '{0}.cats.name'", schema)); + api.raw_cmd(&format!("sp_bindefault '{schema}.catcat', '{schema}.cats.name'")); let dm = r#" model cats { diff --git a/migration-engine/migration-engine-tests/tests/migrations/mssql/multi_schema.rs b/migration-engine/migration-engine-tests/tests/migrations/mssql/multi_schema.rs index 4ffc2e3c6069..df4d6770b52a 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/mssql/multi_schema.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/mssql/multi_schema.rs @@ -424,7 +424,7 @@ fn multi_schema_tests(_api: TestApi) { @@schema("two") } "#}.into()), }, - namespaces: &namespaces, + namespaces, schema_push: SchemaPush::PushAnd(WithSchema::First, &SchemaPush::PushCustomAnd(CustomPushStep { warnings: &[] , @@ -781,7 +781,7 @@ fn multi_schema_tests(_api: TestApi) { @@schema("two") } "#}.into()), }, - namespaces: &namespaces, + namespaces, schema_push: SchemaPush::PushAnd(WithSchema::First, &SchemaPush::PushAnd(WithSchema::Second, &SchemaPush::Done)), @@ -1126,7 +1126,7 @@ fn multi_schema_migration(api: TestApi) { let dir = api.create_migrations_directory(); - api.create_migration("init", &dm, &dir).send_sync(); + api.create_migration("init", dm, &dir).send_sync(); api.apply_migrations(&dir) .send_sync() diff --git a/migration-engine/migration-engine-tests/tests/migrations/mysql.rs b/migration-engine/migration-engine-tests/tests/migrations/mysql.rs index f5dbae158727..6b2e4045a02d 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/mysql.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/mysql.rs @@ -250,7 +250,7 @@ fn native_type_columns_can_be_created(api: TestApi) { .to_string(); for (field_name, prisma_type, native_type, _) in types { - writeln!(&mut dm, " {} {} @db.{}", field_name, prisma_type, native_type).unwrap(); + writeln!(&mut dm, " {field_name} {prisma_type} @db.{native_type}").unwrap(); } dm.push_str("}\n"); diff --git a/migration-engine/migration-engine-tests/tests/migrations/postgres.rs b/migration-engine/migration-engine-tests/tests/migrations/postgres.rs index a0432bba96bc..e8c95fe160b5 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/postgres.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/postgres.rs @@ -116,7 +116,7 @@ fn native_type_columns_can_be_created(api: TestApi) { .to_string(); for (field_name, prisma_type, native_type, _) in types { - writeln!(&mut dm, " {} {} @db.{}", field_name, prisma_type, native_type).unwrap(); + writeln!(&mut dm, " {field_name} {prisma_type} @db.{native_type}").unwrap(); } dm.push_str("}\n"); diff --git a/migration-engine/migration-engine-tests/tests/migrations/postgres/multi_schema.rs b/migration-engine/migration-engine-tests/tests/migrations/postgres/multi_schema.rs index 498c84c3073e..bce7fd9e91f3 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/postgres/multi_schema.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/postgres/multi_schema.rs @@ -413,7 +413,7 @@ fn multi_schema_tests(_api: TestApi) { @@schema("two") } "#}.into()), }, - namespaces: &namespaces, + namespaces, schema_push: SchemaPush::PushAnd(WithSchema::First, &SchemaPush::PushCustomAnd(CustomPushStep { warnings: &[] , @@ -934,7 +934,7 @@ fn multi_schema_tests(_api: TestApi) { @@schema("two") } "#}.into()), }, - namespaces: &namespaces, + namespaces, schema_push: SchemaPush::PushAnd(WithSchema::First, &SchemaPush::PushAnd(WithSchema::Second, &SchemaPush::Done)), @@ -1267,7 +1267,7 @@ fn multi_schema_tests(_api: TestApi) { }"# }.into()), }, - namespaces: &namespaces, + namespaces, schema_push: SchemaPush::PushAnd(WithSchema::First, &SchemaPush::PushCustomAnd(CustomPushStep { warnings: &["The values [Second] on the enum `SomeEnum` will be removed. If these variants are still used in the database, this will fail."], @@ -1344,7 +1344,7 @@ fn multi_schema_tests(_api: TestApi) { @@schema("two") }"#}.into()), }, - namespaces: &namespaces, + namespaces, schema_push: SchemaPush::PushAnd(WithSchema::First, &SchemaPush::RawCmdAnd("insert into \"one\".\"SomeModel\" values(1, 'First');", &SchemaPush::PushCustomAnd(CustomPushStep { diff --git a/migration-engine/migration-engine-tests/tests/migrations/relations.rs b/migration-engine/migration-engine-tests/tests/migrations/relations.rs index f2f46d7f12f2..7baeb31b3206 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/relations.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/relations.rs @@ -553,10 +553,9 @@ fn on_delete_referential_actions_should_work(api: TestApi) { model B {{ id Int @id aId Int? - a A? @relation(fields: [aId], references: [id], onDelete: {}) + a A? @relation(fields: [aId], references: [id], onDelete: {ra}) }} - "#, - ra + "# ); api.schema_push_w_datasource(&dm).send().assert_green(); @@ -644,10 +643,9 @@ fn on_update_referential_actions_should_work(api: TestApi) { model B {{ id BigInt @id aId BigInt? - a A? @relation(fields: [aId], references: [id], onUpdate: {}) + a A? @relation(fields: [aId], references: [id], onUpdate: {ra}) }} - "#, - ra + "# ); api.schema_push_w_datasource(&dm).send().assert_green(); diff --git a/migration-engine/migration-engine-tests/tests/migrations/squashing_tests.rs b/migration-engine/migration-engine-tests/tests/migrations/squashing_tests.rs index 23d8d6940e0d..fb102b6b704c 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/squashing_tests.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/squashing_tests.rs @@ -45,7 +45,7 @@ fn squashing_whole_migration_history_works(api: TestApi) { for (count, schema) in [dm1, dm2, dm3].iter().enumerate() { let name = api - .create_migration(&format!("migration{}", count), schema, &directory) + .create_migration(&format!("migration{count}"), schema, &directory) .send_sync() .into_output() .generated_migration_name @@ -113,8 +113,7 @@ fn squashing_whole_migration_history_works(api: TestApi) { last_common_migration_name: None, }) if unapplied_migration_names == &["0000_initial"] ), - "got: {:#?}", - history + "got: {history:#?}" ); assert!(failed_migration_names.is_empty()); assert!(edited_migration_names.is_empty()); @@ -140,8 +139,7 @@ fn squashing_whole_migration_history_works(api: TestApi) { assert!(drift.is_none()); assert!( matches!(&history, Some(HistoryDiagnostic::MigrationsDirectoryIsBehind { unpersisted_migration_names }) if unpersisted_migration_names.len() == 3), - "got: {:#?}", - history + "got: {history:#?}" ); assert!(failed_migration_names.is_empty()); assert!(edited_migration_names.is_empty()); @@ -167,8 +165,7 @@ fn squashing_whole_migration_history_works(api: TestApi) { assert!(drift.is_none()); assert!( matches!(&history, Some(HistoryDiagnostic::MigrationsDirectoryIsBehind { unpersisted_migration_names }) if unpersisted_migration_names.len() == 3), - "got: {:#?}", - history + "got: {history:#?}" ); assert!(failed_migration_names.is_empty()); assert!(edited_migration_names.is_empty()); @@ -242,7 +239,7 @@ fn squashing_migrations_history_at_the_start_works(api: TestApi) { for (count, schema) in [dm1, dm2, dm3].iter().enumerate() { let name = api - .create_migration(&format!("migration{}", count), schema, &directory) + .create_migration(&format!("migration{count}"), schema, &directory) .send_sync() .into_output() .generated_migration_name @@ -314,8 +311,7 @@ fn squashing_migrations_history_at_the_start_works(api: TestApi) { assert!(drift.is_none()); assert!( matches!(&history, Some(HistoryDiagnostic::MigrationsDirectoryIsBehind { unpersisted_migration_names }) if unpersisted_migration_names.len() == 2), - "got: {:#?}", - history + "got: {history:#?}" ); assert!(failed_migration_names.is_empty()); assert!(edited_migration_names.is_empty()); @@ -342,8 +338,7 @@ fn squashing_migrations_history_at_the_start_works(api: TestApi) { assert!(drift.is_none()); assert!( matches!(&history, Some(HistoryDiagnostic::MigrationsDirectoryIsBehind { unpersisted_migration_names }) if unpersisted_migration_names.len() == 2), - "got: {:#?}", - history + "got: {history:#?}" ); assert!(failed_migration_names.is_empty()); assert!(edited_migration_names.is_empty()); @@ -394,7 +389,7 @@ fn squashing_migrations_history_at_the_end_works(api: TestApi) { for (count, schema) in [dm1, dm2, dm3].iter().enumerate() { let name = api - .create_migration(&format!("migration{}", count), schema, &directory) + .create_migration(&format!("migration{count}"), schema, &directory) .send_sync() .into_output() .generated_migration_name @@ -466,8 +461,7 @@ fn squashing_migrations_history_at_the_end_works(api: TestApi) { assert!(drift.is_none()); assert!( matches!(&history, Some(HistoryDiagnostic::MigrationsDirectoryIsBehind { unpersisted_migration_names }) if unpersisted_migration_names.len() == 2), - "got: {:#?}", - history + "got: {history:#?}" ); assert!(failed_migration_names.is_empty()); assert!(edited_migration_names.is_empty()); @@ -494,8 +488,7 @@ fn squashing_migrations_history_at_the_end_works(api: TestApi) { assert!(drift.is_none()); assert!( matches!(&history, Some(HistoryDiagnostic::MigrationsDirectoryIsBehind { unpersisted_migration_names }) if unpersisted_migration_names.len() == 2), - "got: {:#?}", - history + "got: {history:#?}" ); assert!(failed_migration_names.is_empty()); assert!(edited_migration_names.is_empty()); diff --git a/migration-engine/migration-engine-tests/tests/migrations/unsupported_types.rs b/migration-engine/migration-engine-tests/tests/migrations/unsupported_types.rs index d93caa7163ef..348754305f8f 100644 --- a/migration-engine/migration-engine-tests/tests/migrations/unsupported_types.rs +++ b/migration-engine/migration-engine-tests/tests/migrations/unsupported_types.rs @@ -217,11 +217,10 @@ fn using_unsupported_and_ignore_should_work(api: TestApi) { let dm = &format!( r#" model UnsupportedModel {{ - field Unsupported("{}") + field Unsupported("{unsupported_type}") @@ignore }} - "#, - unsupported_type + "# ); api.schema_push_w_datasource(dm).send().assert_green(); diff --git a/migration-engine/migration-engine-tests/tests/native_types/mssql.rs b/migration-engine/migration-engine-tests/tests/native_types/mssql.rs index 311c401dc937..6f3ca2d6754e 100644 --- a/migration-engine/migration-engine-tests/tests/native_types/mssql.rs +++ b/migration-engine/migration-engine-tests/tests/native_types/mssql.rs @@ -1898,7 +1898,7 @@ fn with_params(r#type: &str) -> &str { fn safe_casts_with_existing_data_should_work(api: TestApi) { for (from, seed, casts) in SAFE_CASTS.iter() { for to in *casts { - println!("From `{}` to `{}` with seed `{:?}`", from, to, seed); + println!("From `{from}` to `{to}` with seed `{seed:?}`"); let kind = from.split('(').next().unwrap(); @@ -1956,7 +1956,7 @@ fn safe_casts_with_existing_data_should_work(api: TestApi) { fn risky_casts_with_existing_data_should_warn(api: TestApi) { for (from, seed, casts) in RISKY_CASTS.iter() { for to in *casts { - println!("From `{}` to `{}` with seed `{:?}`", from, to, seed); + println!("From `{from}` to `{to}` with seed `{seed:?}`"); let kind = from.split('(').next().unwrap(); @@ -2022,7 +2022,7 @@ fn risky_casts_with_existing_data_should_warn(api: TestApi) { fn not_castable_with_existing_data_should_warn(api: TestApi) { for (from, seed, casts) in NOT_CASTABLE.iter() { for to in *casts { - println!("From `{}` to `{}` with seed `{:?}`", from, to, seed); + println!("From `{from}` to `{to}` with seed `{seed:?}`"); let kind = match from.split('(').next() { Some(a) => a, diff --git a/migration-engine/migration-engine-tests/tests/native_types/mysql.rs b/migration-engine/migration-engine-tests/tests/native_types/mysql.rs index ca58bb27cb36..03af3711b84b 100644 --- a/migration-engine/migration-engine-tests/tests/native_types/mysql.rs +++ b/migration-engine/migration-engine-tests/tests/native_types/mysql.rs @@ -620,7 +620,7 @@ fn native_type_name_to_prisma_scalar_type_name(scalar_type: &str) -> &'static st let idx = TYPES_MAP .binary_search_by_key(&scalar_type, |(native, _prisma)| native) - .map_err(|_err| format!("Could not find {} in TYPES_MAP", scalar_type)) + .map_err(|_err| format!("Could not find {scalar_type} in TYPES_MAP")) .unwrap(); TYPES_MAP[idx].1 @@ -632,7 +632,7 @@ fn colnames_for_cases(cases: Cases) -> Vec { std::iter::repeat(()) .enumerate() .take(max_colname) - .map(|(idx, _)| format!("col{}", idx)) + .map(|(idx, _)| format!("col{idx}")) .collect() } diff --git a/migration-engine/migration-engine-tests/tests/native_types/postgres.rs b/migration-engine/migration-engine-tests/tests/native_types/postgres.rs index 17188648ffcc..65280f16ce0e 100644 --- a/migration-engine/migration-engine-tests/tests/native_types/postgres.rs +++ b/migration-engine/migration-engine-tests/tests/native_types/postgres.rs @@ -784,9 +784,9 @@ fn safe_casts_with_existing_data_should_work(api: TestApi) { let mut next_assertions = vec![]; for (idx, to) in casts.iter().enumerate() { - println!("From `{}` to `{}` with seed `{:?}`", from, to, seed); + println!("From `{from}` to `{to}` with seed `{seed:?}`"); - let column_name = format!("column_{}", idx); + let column_name = format!("column_{idx}"); writeln!( previous_columns, @@ -816,10 +816,9 @@ fn safe_casts_with_existing_data_should_work(api: TestApi) { r#" model A {{ id Int @id @default(autoincrement()) @db.Integer - {columns} + {previous_columns} }} "#, - columns = previous_columns, ); tracing::info!(dm = dm1.as_str()); @@ -843,10 +842,9 @@ fn safe_casts_with_existing_data_should_work(api: TestApi) { r#" model A {{ id Int @id @default(autoincrement()) @db.Integer - {columns} + {next_columns} }} - "#, - columns = next_columns + "# ); api.schema_push_w_datasource(&dm2).send().assert_green(); @@ -876,9 +874,9 @@ fn risky_casts_with_existing_data_should_warn(api: TestApi) { let mut warnings = vec![]; for (idx, to) in casts.iter().enumerate() { - println!("From `{}` to `{}` with seed `{:?}`", from, to, seed); + println!("From `{from}` to `{to}` with seed `{seed:?}`"); - let column_name = format!("column_{}", idx); + let column_name = format!("column_{idx}"); writeln!( previous_columns, @@ -902,9 +900,6 @@ fn risky_casts_with_existing_data_should_warn(api: TestApi) { warnings.push( format!( "You are about to alter the column `{column_name}` on the `A` table, which contains 1 non-null values. The data in that column will be cast from `{from}` to `{to}`.", - column_name = column_name, - from = from, - to = to, ).into()); previous_assertions.push((column_name.clone(), *from)); @@ -915,10 +910,9 @@ fn risky_casts_with_existing_data_should_warn(api: TestApi) { r#" model A {{ id Int @id @default(autoincrement()) @db.Integer - {columns} + {previous_columns} }} "#, - columns = previous_columns, ); api.schema_push_w_datasource(&dm1).send().assert_green(); @@ -941,10 +935,9 @@ fn risky_casts_with_existing_data_should_warn(api: TestApi) { r#" model A {{ id Int @id @default(autoincrement()) @db.Integer - {columns} + {next_columns} }} "#, - columns = next_columns, ); api.schema_push_w_datasource(&dm2) @@ -979,9 +972,9 @@ fn not_castable_with_existing_data_should_warn(api: TestApi) { warnings.clear(); for (idx, to) in casts.iter().enumerate() { - println!("From `{}` to `{}` with seed `{:?}`", from, to, seed); + println!("From `{from}` to `{to}` with seed `{seed:?}`"); - let column_name = format!("column_{}", idx); + let column_name = format!("column_{idx}"); writeln!( previous_columns, @@ -1007,7 +1000,6 @@ fn not_castable_with_existing_data_should_warn(api: TestApi) { warnings.push( format!( "The `{column_name}` column on the `A` table would be dropped and recreated. This will lead to data loss.", - column_name = column_name, // from = from, // to = to, ) @@ -1021,10 +1013,9 @@ fn not_castable_with_existing_data_should_warn(api: TestApi) { r#" model A {{ id Int @id @default(autoincrement()) @db.Integer - {columns} + {previous_columns} }} "#, - columns = previous_columns, ); api.schema_push_w_datasource(&dm1).send().assert_green(); @@ -1046,10 +1037,9 @@ fn not_castable_with_existing_data_should_warn(api: TestApi) { r#" model A {{ id Int @id @default(autoincrement()) @db.Integer - {columns} + {next_columns} }} "#, - columns = next_columns, ); // todo we could force here and then check that the db really returns not castable @@ -1168,9 +1158,9 @@ fn safe_casts_from_array_with_existing_data_should_work(api: TestApi) { let mut next_assertions = vec![]; for (idx, (from, seed)) in from.iter().enumerate() { - println!("From `{}` to `{}` with seed `{:?}`", from, to, seed); + println!("From `{from}` to `{to}` with seed `{seed:?}`"); - let column_name = format!("column_{}", idx); + let column_name = format!("column_{idx}"); writeln!( previous_columns, @@ -1200,10 +1190,9 @@ fn safe_casts_from_array_with_existing_data_should_work(api: TestApi) { r#" model A {{ id Int @id @default(autoincrement()) @db.Integer - {columns} + {previous_columns} }} "#, - columns = previous_columns, ); api.schema_push_w_datasource(&dm1).send().assert_green(); @@ -1225,10 +1214,9 @@ fn safe_casts_from_array_with_existing_data_should_work(api: TestApi) { r#" model A {{ id Int @id @default(autoincrement()) @db.Integer - {columns} + {next_columns} }} "#, - columns = next_columns, ); api.schema_push_w_datasource(&dm2).send().assert_green(); diff --git a/migration-engine/migration-engine-tests/tests/schema_push/mod.rs b/migration-engine/migration-engine-tests/tests/schema_push/mod.rs index 432678c04d14..133ec20a9542 100644 --- a/migration-engine/migration-engine-tests/tests/schema_push/mod.rs +++ b/migration-engine/migration-engine-tests/tests/schema_push/mod.rs @@ -200,7 +200,7 @@ fn alter_constraint_name_push(api: TestApi) { let custom_dm = format!( r#" model A {{ - id Int @id{} + id Int @id{singular_id} name String @unique(map: "CustomUnique") a String b String @@ -212,12 +212,11 @@ fn alter_constraint_name_push(api: TestApi) { a String b String aId Int - A A @relation("AtoB", fields: [aId], references: [id]{}) + A A @relation("AtoB", fields: [aId], references: [id]{no_named_fk}) @@index([a,b], map: "AnotherCustomIndex") - @@id([a, b]{}) + @@id([a, b]{compound_id}) }} - "#, - singular_id, no_named_fk, compound_id + "# ); api.schema_push_w_datasource(custom_dm).send().assert_green(); diff --git a/migration-engine/migration-engine-tests/tests/single_migration_tests.rs b/migration-engine/migration-engine-tests/tests/single_migration_tests.rs index e56fc4011d79..7d83e538fc7b 100644 --- a/migration-engine/migration-engine-tests/tests/single_migration_tests.rs +++ b/migration-engine/migration-engine-tests/tests/single_migration_tests.rs @@ -50,8 +50,7 @@ fn run_single_migration_test(test_file_path: &str, test_function_name: &'static let expected_tags_prefix = "// tags="; assert!( first_line.starts_with(expected_tags_prefix), - "The first line of a single migration test test must start with \"{}\"", - expected_tags_prefix + "The first line of a single migration test test must start with \"{expected_tags_prefix}\"" ); let tags = first_line.trim_start_matches(expected_tags_prefix); test_setup::tags_from_comma_separated_list(tags) diff --git a/migration-engine/qe-setup/src/mssql.rs b/migration-engine/qe-setup/src/mssql.rs index f56e4c229495..5840b4b6c14e 100644 --- a/migration-engine/qe-setup/src/mssql.rs +++ b/migration-engine/qe-setup/src/mssql.rs @@ -4,7 +4,7 @@ use quaint::{prelude::*, single::Quaint}; use std::str::FromStr; pub(crate) async fn mssql_setup(url: String, prisma_schema: &str, db_schemas: &[&str]) -> ConnectorResult<()> { - let mut conn = JdbcString::from_str(&format!("jdbc:{}", url)) + let mut conn = JdbcString::from_str(&format!("jdbc:{url}")) .map_err(|e| ConnectorError::from_source(e, "JDBC string parse error"))?; let params = conn.properties_mut(); @@ -25,10 +25,7 @@ pub(crate) async fn mssql_setup(url: String, prisma_schema: &str, db_schemas: &[ api.reset().await.ok(); // Without these, our poor connection gets deadlocks if other schemas // are modified while we introspect. - let allow_snapshot_isolation = format!( - "ALTER DATABASE [{db_name}] SET ALLOW_SNAPSHOT_ISOLATION ON", - db_name = db_name - ); + let allow_snapshot_isolation = format!("ALTER DATABASE [{db_name}] SET ALLOW_SNAPSHOT_ISOLATION ON"); conn.raw_cmd(&allow_snapshot_isolation).await.unwrap(); conn.raw_cmd(&format!( diff --git a/migration-engine/qe-setup/src/postgres.rs b/migration-engine/qe-setup/src/postgres.rs index c341afc09790..05dfdf41c257 100644 --- a/migration-engine/qe-setup/src/postgres.rs +++ b/migration-engine/qe-setup/src/postgres.rs @@ -12,27 +12,25 @@ pub(crate) async fn postgres_setup(url: String, prisma_schema: &str, db_schemas: strip_schema_param_from_url(&mut url); let conn = create_postgres_admin_conn(url.clone()).await?; - let query = format!("DROP DATABASE \"{}\"", db_name); + let query = format!("DROP DATABASE \"{db_name}\""); conn.raw_cmd(&query).await.ok(); - let query = format!("CREATE DATABASE \"{}\"", db_name); + let query = format!("CREATE DATABASE \"{db_name}\""); conn.raw_cmd(&query).await.ok(); } else { strip_schema_param_from_url(&mut url); let conn = create_postgres_admin_conn(url.clone()).await?; - let query = format!("CREATE DATABASE \"{}\"", db_name); + let query = format!("CREATE DATABASE \"{db_name}\""); conn.raw_cmd(&query).await.ok(); // Now create the schema - url.set_path(&format!("/{}", db_name)); + url.set_path(&format!("/{db_name}")); let conn = Quaint::new(url.as_ref()).await.unwrap(); - let drop_and_recreate_schema = format!( - "DROP SCHEMA IF EXISTS \"{schema}\" CASCADE;\nCREATE SCHEMA \"{schema}\";", - schema = schema - ); + let drop_and_recreate_schema = + format!("DROP SCHEMA IF EXISTS \"{schema}\" CASCADE;\nCREATE SCHEMA \"{schema}\";"); conn.raw_cmd(&drop_and_recreate_schema) .await .map_err(|e| ConnectorError::from_source(e, ""))?; @@ -51,7 +49,7 @@ pub(crate) async fn postgres_teardown(url: &str, db_schemas: &[&str]) -> Connect let conn = create_postgres_admin_conn(url.clone()).await?; let db_name = url.path().strip_prefix('/').unwrap(); - let query = format!("DROP DATABASE \"{}\" CASCADE", db_name); + let query = format!("DROP DATABASE \"{db_name}\" CASCADE"); conn.raw_cmd(&query).await.ok(); } @@ -66,7 +64,7 @@ async fn create_postgres_admin_conn(mut url: Url) -> ConnectorResult { fn strip_schema_param_from_url(url: &mut Url) { let mut params: HashMap = url.query_pairs().into_owned().collect(); params.remove("schema"); - let params: Vec = params.into_iter().map(|(k, v)| format!("{}={}", k, v)).collect(); + let params: Vec = params.into_iter().map(|(k, v)| format!("{k}={v}")).collect(); let params: String = params.join("&"); url.set_query(Some(¶ms)); } diff --git a/prisma-fmt/build.rs b/prisma-fmt/build.rs index d9a74f6b8c89..2e8fe20c0503 100644 --- a/prisma-fmt/build.rs +++ b/prisma-fmt/build.rs @@ -3,7 +3,7 @@ use std::process::Command; fn store_git_commit_hash() { let output = Command::new("git").args(["rev-parse", "HEAD"]).output().unwrap(); let git_hash = String::from_utf8(output.stdout).unwrap(); - println!("cargo:rustc-env=GIT_HASH={}", git_hash); + println!("cargo:rustc-env=GIT_HASH={git_hash}"); } fn main() { diff --git a/prisma-fmt/src/actions.rs b/prisma-fmt/src/actions.rs index 9b2ac1b4a81c..8e02718c7bb5 100644 --- a/prisma-fmt/src/actions.rs +++ b/prisma-fmt/src/actions.rs @@ -10,7 +10,7 @@ pub(crate) fn run(schema: &str) -> String { .active_connector .referential_actions() .iter() - .map(|act| format!("{:?}", act)) + .map(|act| format!("{act:?}")) .collect::>(); serde_json::to_string(&available_referential_actions).expect("Failed to render JSON") diff --git a/prisma-fmt/src/code_actions/multi_schema.rs b/prisma-fmt/src/code_actions/multi_schema.rs index f15f8e96704b..353eeb6095d6 100644 --- a/prisma-fmt/src/code_actions/multi_schema.rs +++ b/prisma-fmt/src/code_actions/multi_schema.rs @@ -5,12 +5,12 @@ use psl::{ Configuration, }; -pub(super) fn add_schema_block_attribute_model<'a>( +pub(super) fn add_schema_block_attribute_model( actions: &mut Vec, params: &CodeActionParams, schema: &str, config: &Configuration, - model: ModelWalker<'a>, + model: ModelWalker<'_>, ) { let datasource = match config.datasources.first() { Some(ds) => ds, @@ -57,12 +57,12 @@ pub(super) fn add_schema_block_attribute_model<'a>( actions.push(CodeActionOrCommand::CodeAction(action)) } -pub(super) fn add_schema_block_attribute_enum<'a>( +pub(super) fn add_schema_block_attribute_enum( actions: &mut Vec, params: &CodeActionParams, schema: &str, config: &Configuration, - enumerator: EnumWalker<'a>, + enumerator: EnumWalker<'_>, ) { let datasource = match config.datasources.first() { Some(ds) => ds, diff --git a/prisma-fmt/src/get_config.rs b/prisma-fmt/src/get_config.rs index 3198f14a93a8..d0ef744a6157 100644 --- a/prisma-fmt/src/get_config.rs +++ b/prisma-fmt/src/get_config.rs @@ -25,7 +25,7 @@ pub(crate) fn get_config(params: &str) -> Result { let params: GetConfigParams = match serde_json::from_str(params) { Ok(params) => params, Err(serde_err) => { - panic!("Failed to deserialize GetConfigParams: {}", serde_err,); + panic!("Failed to deserialize GetConfigParams: {serde_err}",); } }; diff --git a/prisma-fmt/tests/code_actions/test_api.rs b/prisma-fmt/tests/code_actions/test_api.rs index a88918c3cd33..2be0c978aa82 100644 --- a/prisma-fmt/tests/code_actions/test_api.rs +++ b/prisma-fmt/tests/code_actions/test_api.rs @@ -47,7 +47,7 @@ pub(crate) fn test_scenario(scenario_name: &str) { let mut path = String::with_capacity(SCENARIOS_PATH.len() + 12); let schema = { - write!(path, "{}/{}/schema.prisma", SCENARIOS_PATH, scenario_name).unwrap(); + write!(path, "{SCENARIOS_PATH}/{scenario_name}/schema.prisma").unwrap(); std::fs::read_to_string(&path).unwrap() }; @@ -59,7 +59,7 @@ pub(crate) fn test_scenario(scenario_name: &str) { }; path.clear(); - write!(path, "{}/{}/result.json", SCENARIOS_PATH, scenario_name).unwrap(); + write!(path, "{SCENARIOS_PATH}/{scenario_name}/result.json").unwrap(); let expected_result = std::fs::read_to_string(&path).unwrap_or_else(|_| String::new()); let params = lsp_types::CodeActionParams { @@ -111,8 +111,8 @@ fn format_chunks(chunks: Vec) -> String { for chunk in chunks { let formatted = match chunk { dissimilar::Chunk::Equal(text) => text.into(), - dissimilar::Chunk::Delete(text) => format!("\x1b[41m{}\x1b[0m", text), - dissimilar::Chunk::Insert(text) => format!("\x1b[42m{}\x1b[0m", text), + dissimilar::Chunk::Delete(text) => format!("\x1b[41m{text}\x1b[0m"), + dissimilar::Chunk::Insert(text) => format!("\x1b[42m{text}\x1b[0m"), }; buf.push_str(&formatted); } diff --git a/prisma-fmt/tests/text_document_completion/test_api.rs b/prisma-fmt/tests/text_document_completion/test_api.rs index e74e90616c79..2284b179269f 100644 --- a/prisma-fmt/tests/text_document_completion/test_api.rs +++ b/prisma-fmt/tests/text_document_completion/test_api.rs @@ -9,12 +9,12 @@ pub(crate) fn test_scenario(scenario_name: &str) { let mut path = String::with_capacity(SCENARIOS_PATH.len() + 12); let schema = { - write!(path, "{}/{}/schema.prisma", SCENARIOS_PATH, scenario_name).unwrap(); + write!(path, "{SCENARIOS_PATH}/{scenario_name}/schema.prisma").unwrap(); std::fs::read_to_string(&path).unwrap() }; path.clear(); - write!(path, "{}/{}/result.json", SCENARIOS_PATH, scenario_name).unwrap(); + write!(path, "{SCENARIOS_PATH}/{scenario_name}/result.json").unwrap(); let expected_result = std::fs::read_to_string(&path).unwrap_or_else(|_| String::new()); let (cursor_position, schema) = take_cursor(&schema); @@ -65,8 +65,8 @@ fn format_chunks(chunks: Vec) -> String { for chunk in chunks { let formatted = match chunk { dissimilar::Chunk::Equal(text) => text.into(), - dissimilar::Chunk::Delete(text) => format!("\x1b[41m{}\x1b[0m", text), - dissimilar::Chunk::Insert(text) => format!("\x1b[42m{}\x1b[0m", text), + dissimilar::Chunk::Delete(text) => format!("\x1b[41m{text}\x1b[0m"), + dissimilar::Chunk::Insert(text) => format!("\x1b[42m{text}\x1b[0m"), }; buf.push_str(&formatted); } diff --git a/psl/builtin-connectors/src/cockroach_datamodel_connector.rs b/psl/builtin-connectors/src/cockroach_datamodel_connector.rs index 09d44beb9acc..11cd75d5e582 100644 --- a/psl/builtin-connectors/src/cockroach_datamodel_connector.rs +++ b/psl/builtin-connectors/src/cockroach_datamodel_connector.rs @@ -137,7 +137,7 @@ impl Connector for CockroachDatamodelConnector { .iter() .find(|(st, _)| st == scalar_type) .map(|(_, native_type)| native_type) - .ok_or_else(|| format!("Could not find scalar type {:?} in SCALAR_TYPE_DEFAULTS", scalar_type)) + .ok_or_else(|| format!("Could not find scalar type {scalar_type:?} in SCALAR_TYPE_DEFAULTS")) .unwrap(); NativeTypeInstance::new::(*native_type) diff --git a/psl/builtin-connectors/src/mongodb/mongodb_types.rs b/psl/builtin-connectors/src/mongodb/mongodb_types.rs index 967d27b27be7..c75a7f0b3bd4 100644 --- a/psl/builtin-connectors/src/mongodb/mongodb_types.rs +++ b/psl/builtin-connectors/src/mongodb/mongodb_types.rs @@ -50,5 +50,5 @@ static DEFAULT_MAPPING: Lazy> = Lazy::new(|| { pub(crate) fn default_for(scalar_type: &ScalarType) -> &MongoDbType { DEFAULT_MAPPING .get(scalar_type) - .unwrap_or_else(|| panic!("MongoDB native type mapping missing for '{:?}'", scalar_type)) + .unwrap_or_else(|| panic!("MongoDB native type mapping missing for '{scalar_type:?}'")) } diff --git a/psl/builtin-connectors/src/mssql_datamodel_connector.rs b/psl/builtin-connectors/src/mssql_datamodel_connector.rs index b0f53885a7d1..fd0da4f981b1 100644 --- a/psl/builtin-connectors/src/mssql_datamodel_connector.rs +++ b/psl/builtin-connectors/src/mssql_datamodel_connector.rs @@ -141,7 +141,7 @@ impl Connector for MsSqlDatamodelConnector { .iter() .find(|(st, _)| st == scalar_type) .map(|(_, native_type)| native_type) - .ok_or_else(|| format!("Could not find scalar type {:?} in SCALAR_TYPE_DEFAULTS", scalar_type)) + .ok_or_else(|| format!("Could not find scalar type {scalar_type:?} in SCALAR_TYPE_DEFAULTS")) .unwrap(); NativeTypeInstance::new::(*nt) } diff --git a/psl/builtin-connectors/src/mysql_datamodel_connector.rs b/psl/builtin-connectors/src/mysql_datamodel_connector.rs index dfccd424ebbf..fffcf1b43b26 100644 --- a/psl/builtin-connectors/src/mysql_datamodel_connector.rs +++ b/psl/builtin-connectors/src/mysql_datamodel_connector.rs @@ -150,7 +150,7 @@ impl Connector for MySqlDatamodelConnector { .iter() .find(|(st, _)| st == scalar_type) .map(|(_, native_type)| native_type) - .ok_or_else(|| format!("Could not find scalar type {:?} in SCALAR_TYPE_DEFAULTS", scalar_type)) + .ok_or_else(|| format!("Could not find scalar type {scalar_type:?} in SCALAR_TYPE_DEFAULTS")) .unwrap(); NativeTypeInstance::new::(*native_type) diff --git a/psl/builtin-connectors/src/postgres_datamodel_connector.rs b/psl/builtin-connectors/src/postgres_datamodel_connector.rs index 19984e25dfe0..bd0c90fb5c85 100644 --- a/psl/builtin-connectors/src/postgres_datamodel_connector.rs +++ b/psl/builtin-connectors/src/postgres_datamodel_connector.rs @@ -320,7 +320,7 @@ impl Connector for PostgresDatamodelConnector { .iter() .find(|(st, _)| st == scalar_type) .map(|(_, native_type)| native_type) - .ok_or_else(|| format!("Could not find scalar type {:?} in SCALAR_TYPE_DEFAULTS", scalar_type)) + .ok_or_else(|| format!("Could not find scalar type {scalar_type:?} in SCALAR_TYPE_DEFAULTS")) .unwrap(); NativeTypeInstance::new::(*native_type) diff --git a/psl/builtin-connectors/src/sqlite_datamodel_connector.rs b/psl/builtin-connectors/src/sqlite_datamodel_connector.rs index 775ed4377968..54e655752bc7 100644 --- a/psl/builtin-connectors/src/sqlite_datamodel_connector.rs +++ b/psl/builtin-connectors/src/sqlite_datamodel_connector.rs @@ -108,7 +108,7 @@ impl Connector for SqliteDatamodelConnector { }; if let Some(path) = set_root(url.trim_start_matches("file:")) { - return Cow::Owned(format!("file:{}", path)); + return Cow::Owned(format!("file:{path}")); }; Cow::Borrowed(url) diff --git a/psl/diagnostics/src/error.rs b/psl/diagnostics/src/error.rs index dacf7f03e4da..66da6b40c4eb 100644 --- a/psl/diagnostics/src/error.rs +++ b/psl/diagnostics/src/error.rs @@ -88,8 +88,7 @@ impl DatamodelError { span: Span, ) -> DatamodelError { let msg = format!( - "Native type {} is not compatible with declared field type {}, expected field type {}.", - native_type, field_type, expected_types + "Native type {native_type} is not compatible with declared field type {field_type}, expected field type {expected_types}.", ); Self::new(msg, span) } @@ -100,10 +99,7 @@ impl DatamodelError { expected: &str, span: Span, ) -> DatamodelError { - let msg = format!( - "Invalid argument for type {}: {}. Allowed values: {}.", - native_type, got, expected - ); + let msg = format!("Invalid argument for type {native_type}: {got}. Allowed values: {expected}."); Self::new(msg, span) } @@ -113,7 +109,7 @@ impl DatamodelError { suggestion: &str, span: Span, ) -> DatamodelError { - let msg = format!("The prefix {} is invalid. It must be equal to the name of an existing datasource e.g. {}. Did you mean to use {}?", given_prefix, expected_prefix, suggestion); + let msg = format!("The prefix {given_prefix} is invalid. It must be equal to the name of an existing datasource e.g. {expected_prefix}. Did you mean to use {suggestion}?"); DatamodelError::new(msg, span) } @@ -137,10 +133,7 @@ impl DatamodelError { existing_model_name: &str, span: Span, ) -> DatamodelError { - let msg = format!("The model with database name \"{}\" could not be defined because another model or view with this name exists: \"{}\"", - model_database_name, - existing_model_name - ); + let msg = format!("The model with database name \"{model_database_name}\" could not be defined because another model or view with this name exists: \"{existing_model_name}\""); Self::new(msg, span) } @@ -149,23 +142,19 @@ impl DatamodelError { existing_model_name: &str, span: Span, ) -> DatamodelError { - let msg = format!("The view with database name \"{}\" could not be defined because another model or view with this name exists: \"{}\"", - model_database_name, - existing_model_name - ); + let msg = format!("The view with database name \"{model_database_name}\" could not be defined because another model or view with this name exists: \"{existing_model_name}\""); Self::new(msg, span) } pub fn new_duplicate_top_error(name: &str, top_type: &str, existing_top_type: &str, span: Span) -> DatamodelError { let msg = format!( - "The {} \"{}\" cannot be defined because a {} with that name already exists.", - top_type, name, existing_top_type + "The {top_type} \"{name}\" cannot be defined because a {existing_top_type} with that name already exists.", ); Self::new(msg, span) } pub fn new_duplicate_config_key_error(conf_block_name: &str, key_name: &str, span: Span) -> DatamodelError { - let msg = format!("Key \"{}\" is already defined in {}.", key_name, conf_block_name); + let msg = format!("Key \"{key_name}\" is already defined in {conf_block_name}."); Self::new(msg, span) } @@ -183,7 +172,7 @@ impl DatamodelError { } pub fn new_duplicate_enum_value_error(enum_name: &str, value_name: &str, span: Span) -> DatamodelError { - let msg = format!("Value \"{}\" is already defined on enum \"{}\".", value_name, enum_name); + let msg = format!("Value \"{value_name}\" is already defined on enum \"{enum_name}\".",); Self::new(msg, span) } @@ -228,10 +217,7 @@ impl DatamodelError { } pub fn new_composite_type_validation_error(message: &str, composite_type_name: &str, span: Span) -> DatamodelError { - let msg = format!( - "Error validating composite type \"{}\": {}", - composite_type_name, message - ); + let msg = format!("Error validating composite type \"{composite_type_name}\": {message}",); Self::new(msg, span) } @@ -282,9 +268,9 @@ impl DatamodelError { span: Span, ) -> DatamodelError { let msg = format!( - "Native type {} takes {} optional arguments, but received {}.", - native_type, optional_count, given_count + "Native type {native_type} takes {optional_count} optional arguments, but received {given_count}.", ); + DatamodelError::new(msg, span) } @@ -332,7 +318,7 @@ impl DatamodelError { } pub fn new_invalid_model_error(msg: &str, span: Span) -> DatamodelError { - DatamodelError::new(format!("Invalid model: {}", msg), span) + DatamodelError::new(format!("Invalid model: {msg}"), span) } pub fn new_datasource_provider_not_known_error(provider: &str, span: Span) -> DatamodelError { @@ -350,8 +336,7 @@ impl DatamodelError { span: Span, ) -> DatamodelError { let msg = format!( - "The preview feature \"{}\" is not known. Expected one of: {}", - preview_feature, expected_preview_features + "The preview feature \"{preview_feature}\" is not known. Expected one of: {expected_preview_features}", ); Self::new(msg, span) } @@ -372,10 +357,7 @@ impl DatamodelError { } pub fn new_native_type_name_unknown(connector_name: &str, native_type: &str, span: Span) -> DatamodelError { - let msg = format!( - "Native type {} is not supported for {} connector.", - native_type, connector_name - ); + let msg = format!("Native type {native_type} is not supported for {connector_name} connector."); DatamodelError::new(msg, span) } diff --git a/psl/diagnostics/src/pretty_print.rs b/psl/diagnostics/src/pretty_print.rs index 89bdb7e0143b..d8a9dd8c0dab 100644 --- a/psl/diagnostics/src/pretty_print.rs +++ b/psl/diagnostics/src/pretty_print.rs @@ -43,7 +43,7 @@ pub(crate) fn pretty_print( colorer.primary_color(colorer.title()).bold(), description.bold() )?; - writeln!(f, " {} {}", arrow, file_path)?; + writeln!(f, " {arrow} {file_path}")?; writeln!(f, "{}", format_line_number(0))?; writeln!(f, "{}", format_line_number_with_line(start_line_number, &file_lines))?; @@ -83,7 +83,7 @@ fn format_line_number_with_line(line_number: usize, lines: &[&str]) -> colored:: fn format_line_number(line_number: usize) -> colored::ColoredString { if line_number > 0 { - format!("{:2} | ", line_number).bold().bright_blue() + format!("{line_number:2} | ").bold().bright_blue() } else { " | ".bold().bright_blue() } diff --git a/psl/parser-database/src/attributes.rs b/psl/parser-database/src/attributes.rs index 6117198f6c56..4c8c0a7a8a56 100644 --- a/psl/parser-database/src/attributes.rs +++ b/psl/parser-database/src/attributes.rs @@ -300,8 +300,7 @@ fn visit_field_unique(field_id: ast::FieldId, model_attributes: &mut ModelAttrib Some("Asc") => Some(SortOrder::Asc), Some(other) => { ctx.push_attribute_validation_error(&format!( - "The `sort` argument can only be `Asc` or `Desc` you provided: {}.", - other + "The `sort` argument can only be `Asc` or `Desc` you provided: {other}." )); None } @@ -522,7 +521,7 @@ fn model_index(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Con Some("SpGist") => Some(IndexAlgorithm::SpGist), Some("Brin") => Some(IndexAlgorithm::Brin), Some(other) => { - ctx.push_attribute_validation_error(&format!("Unknown index type: {}.", other)); + ctx.push_attribute_validation_error(&format!("Unknown index type: {other}.")); None } None => None, @@ -1083,8 +1082,7 @@ fn validate_client_name(span: Span, object_name: &str, name: StringId, attribute ctx.push_error(DatamodelError::new_model_validation_error( &format!( - "The `name` property within the `{}` attribute only allows for the following characters: `_a-zA-Z0-9`.", - attribute + "The `name` property within the `{attribute}` attribute only allows for the following characters: `_a-zA-Z0-9`." ), "model", object_name, diff --git a/psl/parser-database/src/attributes/id.rs b/psl/parser-database/src/attributes/id.rs index 31a3f5bb4522..8c8ab2a20317 100644 --- a/psl/parser-database/src/attributes/id.rs +++ b/psl/parser-database/src/attributes/id.rs @@ -173,8 +173,7 @@ pub(super) fn field<'db>( Some("Asc") => Some(SortOrder::Asc), Some(other) => { ctx.push_attribute_validation_error(&format!( - "The `sort` argument can only be `Asc` or `Desc` you provided: {}.", - other + "The `sort` argument can only be `Asc` or `Desc` you provided: {other}." )); None } diff --git a/psl/parser-database/src/names.rs b/psl/parser-database/src/names.rs index 00e1df294385..6e1ea30fcbd4 100644 --- a/psl/parser-database/src/names.rs +++ b/psl/parser-database/src/names.rs @@ -191,17 +191,17 @@ fn validate_attribute_identifiers(with_attrs: &dyn WithAttributes, ctx: &mut Con fn validate_identifier(ident: &ast::Identifier, schema_item: &str, ctx: &mut Context<'_>) { if ident.name.is_empty() { ctx.push_error(DatamodelError::new_validation_error( - &format!("The name of a {} must not be empty.", schema_item), + &format!("The name of a {schema_item} must not be empty."), ident.span, )) } else if ident.name.chars().next().unwrap().is_numeric() { ctx.push_error(DatamodelError::new_validation_error( - &format!("The name of a {} must not start with a number.", schema_item), + &format!("The name of a {schema_item} must not start with a number."), ident.span, )) } else if ident.name.contains('-') { ctx.push_error(DatamodelError::new_validation_error( - &format!("The character `-` is not allowed in {} names.", schema_item), + &format!("The character `-` is not allowed in {schema_item} names."), ident.span, )) } diff --git a/psl/parser-database/src/relations.rs b/psl/parser-database/src/relations.rs index 34a6215d692b..b776b0f37bf6 100644 --- a/psl/parser-database/src/relations.rs +++ b/psl/parser-database/src/relations.rs @@ -485,12 +485,14 @@ impl ReferentialAction { "SetNull" => Some(ReferentialAction::SetNull), "SetDefault" => Some(ReferentialAction::SetDefault), s => { - let message = format!("Invalid referential action: `{}`", s); + let message = format!("Invalid referential action: `{s}`"); + diagnostics.push_error(DatamodelError::new_attribute_validation_error( &message, "@relation", expr.span(), )); + None } } diff --git a/psl/parser-database/src/types.rs b/psl/parser-database/src/types.rs index d0e0caaac1ac..e4c6457dd707 100644 --- a/psl/parser-database/src/types.rs +++ b/psl/parser-database/src/types.rs @@ -568,7 +568,7 @@ fn visit_composite_type<'db>(ct_id: ast::CompositeTypeId, ct: &'db ast::Composit } Ok(FieldType::Model(referenced_model_id)) => { let referenced_model_name = ctx.ast[referenced_model_id].name(); - ctx.push_error(DatamodelError::new_composite_type_validation_error(&format!("{} refers to a model, making this a relation field. Relation fields inside composite types are not supported.", referenced_model_name), ct.name(), ast_field.field_type.span())) + ctx.push_error(DatamodelError::new_composite_type_validation_error(&format!("{referenced_model_name} refers to a model, making this a relation field. Relation fields inside composite types are not supported."), ct.name(), ast_field.field_type.span())) } Err(supported) => ctx.push_error(DatamodelError::new_type_not_found_error( supported, diff --git a/psl/parser-database/src/walkers/relation_field.rs b/psl/parser-database/src/walkers/relation_field.rs index 26e061e5579f..20990b55b03c 100644 --- a/psl/parser-database/src/walkers/relation_field.rs +++ b/psl/parser-database/src/walkers/relation_field.rs @@ -250,9 +250,9 @@ impl<'db> std::hash::Hash for RelationName<'db> { impl<'db> RelationName<'db> { pub(crate) fn generated(model_a: &str, model_b: &str) -> Self { if model_a < model_b { - Self::Generated(format!("{}To{}", model_a, model_b)) + Self::Generated(format!("{model_a}To{model_b}")) } else { - Self::Generated(format!("{}To{}", model_b, model_a)) + Self::Generated(format!("{model_b}To{model_a}")) } } } diff --git a/psl/psl-core/src/configuration/configuration_struct.rs b/psl/psl-core/src/configuration/configuration_struct.rs index 48893ea62e09..ee4fcccac3f6 100644 --- a/psl/psl-core/src/configuration/configuration_struct.rs +++ b/psl/psl-core/src/configuration/configuration_struct.rs @@ -72,9 +72,8 @@ impl Configuration { super::UrlValidationError::EmptyEnvValue(env_var) => { Err(DatamodelError::new_source_validation_error( &format!( - "You must provide a nonempty direct URL. The environment variable `{}` resolved to an empty string.", - env_var - ), + "You must provide a nonempty direct URL. The environment variable `{env_var}` resolved to an empty string." + ), &datasource.name, *span, )) diff --git a/psl/psl-core/src/configuration/datasource.rs b/psl/psl-core/src/configuration/datasource.rs index 6aa9f436bbba..f7d9198ad082 100644 --- a/psl/psl-core/src/configuration/datasource.rs +++ b/psl/psl-core/src/configuration/datasource.rs @@ -107,10 +107,7 @@ impl Datasource { } UrlValidationError::EmptyEnvValue(env_var) => { return Err(DatamodelError::new_source_validation_error( - &format!( - "You must provide a nonempty URL. The environment variable `{}` resolved to an empty string.", - env_var - ), + &format!("You must provide a nonempty URL. The environment variable `{env_var}` resolved to an empty string."), &self.name, self.url_span, ) @@ -167,9 +164,8 @@ impl Datasource { UrlValidationError::EmptyEnvValue(env_var) => { Err(DatamodelError::new_source_validation_error( &format!( - "You must provide a nonempty direct URL. The environment variable `{}` resolved to an empty string.", - env_var - ), + "You must provide a nonempty direct URL. The environment variable `{env_var}` resolved to an empty string." + ), &self.name, span, ) diff --git a/psl/psl-core/src/datamodel_connector.rs b/psl/psl-core/src/datamodel_connector.rs index 6f8c25ce5c79..a23b9dad7b6c 100644 --- a/psl/psl-core/src/datamodel_connector.rs +++ b/psl/psl-core/src/datamodel_connector.rs @@ -383,15 +383,13 @@ impl ConstraintScope { Cow::from("global for primary keys, foreign keys and default constraints") } ConstraintScope::ModelKeyIndex => { - Cow::from(format!("on model `{}` for indexes and unique constraints", model_name)) + Cow::from(format!("on model `{model_name}` for indexes and unique constraints")) } ConstraintScope::ModelPrimaryKeyKeyIndex => Cow::from(format!( - "on model `{}` for primary key, indexes and unique constraints", - model_name + "on model `{model_name}` for primary key, indexes and unique constraints" )), ConstraintScope::ModelPrimaryKeyKeyIndexForeignKey => Cow::from(format!( - "on model `{}` for primary key, indexes, unique constraints and foreign keys", - model_name + "on model `{model_name}` for primary key, indexes, unique constraints and foreign keys" )), } } diff --git a/psl/psl-core/src/datamodel_connector/constraint_names.rs b/psl/psl-core/src/datamodel_connector/constraint_names.rs index 69b202ec788e..ddf3db5d910d 100644 --- a/psl/psl-core/src/datamodel_connector/constraint_names.rs +++ b/psl/psl-core/src/datamodel_connector/constraint_names.rs @@ -43,7 +43,7 @@ impl ConstraintNames { table_name }; - format!("{}{}", table_name, suffix) + format!("{table_name}{suffix}") } pub fn unique_index_name(table_name: &str, column_names: &[&str], connector: &dyn Connector) -> String { @@ -84,14 +84,14 @@ impl ConstraintNames { pub fn default_name(table_name: &str, column_name: &str, connector: &dyn Connector) -> String { let limit = connector.max_identifier_length(); - let mut joined = format!("{}_{}", table_name, column_name); + let mut joined = format!("{table_name}_{column_name}"); if joined.len() >= limit - 3 { let split = floor_char_boundary(&joined, limit - 3); joined.truncate(split); } - format!("{}_df", joined) + format!("{joined}_df") } /// Params: @@ -102,14 +102,14 @@ impl ConstraintNames { let fk_suffix = "_fkey"; let limit = connector.max_identifier_length(); - let mut joined = format!("{}_{}", table_name, column_names.join("_")); + let mut joined = format!("{table_name}_{}", column_names.join("_")); if joined.len() >= limit - 5 { let split = floor_char_boundary(&joined, limit - 5); joined.truncate(split); } - format!("{}{}", joined, fk_suffix) + format!("{joined}{fk_suffix}") } pub fn is_db_name_too_long( @@ -124,7 +124,7 @@ impl ConstraintNames { if name.len() > connector.max_identifier_length() { let ats = if double_at { "@@" } else { "@" }; return Some(DatamodelError::new_model_validation_error( - &format!("The constraint name '{}' specified in the `map` argument for the `{}{}` constraint is too long for your chosen provider. The maximum allowed length is {} bytes.", name, ats, attribute, connector.max_identifier_length()), + &format!("The constraint name '{name}' specified in the `map` argument for the `{ats}{attribute}` constraint is too long for your chosen provider. The maximum allowed length is {} bytes.", connector.max_identifier_length()), "model", object_name, span, diff --git a/psl/psl-core/src/validate/datasource_loader.rs b/psl/psl-core/src/validate/datasource_loader.rs index 1e996a91f752..561e0454cba5 100644 --- a/psl/psl-core/src/validate/datasource_loader.rs +++ b/psl/psl-core/src/validate/datasource_loader.rs @@ -234,7 +234,7 @@ fn get_relation_mode( let supported_values = connector .allowed_relation_mode_settings() .iter() - .map(|v| format!(r#""{}""#, v)) + .map(|v| format!(r#""{v}""#)) .collect::>() .join(", "); diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs b/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs index dfda498f0262..3c57b32d44dd 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs @@ -46,8 +46,7 @@ pub(super) fn detect_composite_cycles(ctx: &mut Context<'_>) { } ScalarFieldType::CompositeType(ctid) if visited.first() == Some(ctid) => { let msg = format!( - "The types cause an endless cycle in the path {}. Please change one of the fields to be either optional or a list to break the cycle.", - path, + "The types cause an endless cycle in the path {path}. Please change one of the fields to be either optional or a list to break the cycle." ); errors.push(( @@ -156,7 +155,7 @@ impl<'db> fmt::Display for CompositeTypePath<'db> { let path = traversed .into_iter() .map(|w| w.name()) - .map(|n| format!("`{}`", n)) + .map(|n| format!("`{n}`")) .collect::>() .join(" → "); diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/default_value.rs b/psl/psl-core/src/validate/validation_pipeline/validations/default_value.rs index e8cf3bf0ae67..0ac90e8c65d0 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/default_value.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/default_value.rs @@ -86,11 +86,7 @@ pub(super) fn validate_default_value( Err(details) => details, }; - let message = format!( - "Parse error: \"{bad_value}\" is not a valid rfc3339 datetime string. ({details})", - details = details, - bad_value = value, - ); + let message = format!("Parse error: \"{value}\" is not a valid rfc3339 datetime string. ({details})"); ctx.push_error(DatamodelError::new_attribute_validation_error( &message, "@default", *span, diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs index 7923d97ea501..7d16ce78414d 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs @@ -55,8 +55,7 @@ pub(super) fn unique_index_has_a_unique_custom_name_per_model( .local_custom_name_scope_violations(model.model_id(), name.as_ref()) { let message = format!( - "The given custom name `{}` has to be unique on the model. Please provide a different name for the `name` argument.", - name, + "The given custom name `{name}` has to be unique on the model. Please provide a different name for the `name` argument." ); let from_arg = index.ast_attribute().span_for_argument("name"); diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/models.rs b/psl/psl-core/src/validate/validation_pipeline/validations/models.rs index 77b8d3ae8065..bc03a848a2b6 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/models.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/models.rs @@ -44,7 +44,7 @@ pub(super) fn has_a_strict_unique_criteria(model: ModelWalker<'_>, ctx: &mut Con loose_criterias.collect::>().join("\n"), ); - Cow::from(format!("{} {}", msg, suffix)) + Cow::from(format!("{msg} {suffix}")) } else { Cow::from(msg) }; @@ -118,8 +118,7 @@ pub(super) fn has_a_unique_custom_primary_key_name_per_model( .local_custom_name_scope_violations(model.model_id(), name.as_ref()) { let message = format!( - "The given custom name `{}` has to be unique on the model. Please provide a different name for the `name` argument.", - name, + "The given custom name `{name}` has to be unique on the model. Please provide a different name for the `name` argument." ); let span = pk diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs b/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs index 96846478e55d..47f3d05a6f24 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs @@ -31,7 +31,7 @@ impl<'db> fmt::Display for Fields<'db> { .fields .iter() .map(|field_id| self.model.relation_field(*field_id).name()) - .map(|name| format!("`{}`", name)); + .map(|name| format!("`{name}`")); match fields.len() { x if x < 2 => f.write_str(&fields.join(", ")), @@ -309,15 +309,15 @@ mod tests { fn test_is_left_wise_included() { let item = vec![1, 2]; let group = vec![1, 2, 3, 4]; - assert_eq!(is_leftwise_included_it(item.iter(), group.iter()), true); + assert!(is_leftwise_included_it(item.iter(), group.iter())); let item = vec![1, 2, 3, 4]; let group = vec![1, 2, 3, 4]; - assert_eq!(is_leftwise_included_it(item.iter(), group.iter()), true); + assert!(is_leftwise_included_it(item.iter(), group.iter())); let item = vec![1, 2, 3, 4]; let group = vec![1, 2]; - assert_eq!(is_leftwise_included_it(item.iter(), group.iter()), false); + assert!(!is_leftwise_included_it(item.iter(), group.iter())); let item = vec![2, 3]; let group = vec![1, 2, 3, 4]; - assert_eq!(is_leftwise_included_it(item.iter(), group.iter()), false); + assert!(!is_leftwise_included_it(item.iter(), group.iter())); } } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs b/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs index 7f63bef1e2c1..25fb6917a7c8 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs @@ -265,8 +265,7 @@ pub(super) fn cycles(relation: CompleteInlineRelationWalker<'_>, ctx: &mut Conte if related_model.id == parent_model.id { let msg = format!( - "Reference causes a cycle. One of the @relation attributes in this cycle must have `onDelete` and `onUpdate` referential actions set to `NoAction`. Cycle path: {}.", - visited_relations + "Reference causes a cycle. One of the @relation attributes in this cycle must have `onDelete` and `onUpdate` referential actions set to `NoAction`. Cycle path: {visited_relations}." ); ctx.push_error(cascade_error_with_default_values( @@ -411,10 +410,7 @@ pub(super) fn multiple_cascading_paths(relation: CompleteInlineRelationWalker<'_ } } - let models = reachable - .iter() - .map(|model_name| format!("`{}`", model_name)) - .join(", "); + let models = reachable.iter().map(|model_name| format!("`{model_name}`")).join(", "); #[allow(clippy::comparison_chain)] // match looks horrible here... if reachable.len() == 1 { diff --git a/psl/psl/build.rs b/psl/psl/build.rs index e3151459050a..509b60875998 100644 --- a/psl/psl/build.rs +++ b/psl/psl/build.rs @@ -53,7 +53,7 @@ fn find_all_schemas(prefix: &str, all_schemas: &mut Vec, root_dir: &'sta let entry = entry.unwrap(); let file_name = entry.file_name(); let file_name = file_name.to_str().unwrap(); - let entry_path = format!("{}/{}", prefix, file_name); + let entry_path = format!("{prefix}/{file_name}"); let file_type = entry.file_type().unwrap(); if file_name == "." || file_name == ".." { diff --git a/psl/psl/tests/base/base_types.rs b/psl/psl/tests/base/base_types.rs index ea15cb4c4595..bccb691cc843 100644 --- a/psl/psl/tests/base/base_types.rs +++ b/psl/psl/tests/base/base_types.rs @@ -180,7 +180,7 @@ fn json_type_must_work_for_some_connectors() { .assert_has_scalar_field("json") .assert_base_type(&ScalarType::Json); - let error = parse_unwrap_err(&format!("{}\n{}", SQLITE_SOURCE, dml)); + let error = parse_unwrap_err(&format!("{SQLITE_SOURCE}\n{dml}")); let expectation = expect![[r#" error: Error validating field `json` in model `User`: Field `json` in model `User` can't be of type Json. The current connector does not support the Json type. @@ -195,13 +195,13 @@ fn json_type_must_work_for_some_connectors() { expectation.assert_eq(&error); // Postgres does support it - parse(&format!("{}\n{}", POSTGRES_SOURCE, dml)) + parse(&format!("{POSTGRES_SOURCE}\n{dml}")) .assert_has_model("User") .assert_has_scalar_field("json") .assert_base_type(&ScalarType::Json); // MySQL does support it - parse(&format!("{}\n{}", MYSQL_SOURCE, dml)) + parse(&format!("{MYSQL_SOURCE}\n{dml}")) .assert_has_model("User") .assert_has_scalar_field("json") .assert_base_type(&ScalarType::Json); diff --git a/psl/psl/tests/common/mod.rs b/psl/psl/tests/common/mod.rs index 327b166d13b2..85e05709182f 100644 --- a/psl/psl/tests/common/mod.rs +++ b/psl/psl/tests/common/mod.rs @@ -257,27 +257,27 @@ impl RelationFieldAsserts for dml::RelationField { impl DatamodelAsserts for dml::Datamodel { fn assert_has_model(&self, t: &str) -> &dml::Model { - self.find_model(t).unwrap_or_else(|| panic!("Model {} not found", t)) + self.find_model(t).unwrap_or_else(|| panic!("Model {t} not found")) } fn assert_has_enum(&self, t: &str) -> &dml::Enum { - self.find_enum(t).unwrap_or_else(|| panic!("Enum {} not found", t)) + self.find_enum(t).unwrap_or_else(|| panic!("Enum {t} not found")) } fn assert_has_composite_type(&self, t: &str) -> &dml::CompositeType { self.find_composite_type(t) - .unwrap_or_else(|| panic!("Composite type {} not found", t)) + .unwrap_or_else(|| panic!("Composite type {t} not found")) } } impl ModelAsserts for dml::Model { fn assert_has_scalar_field(&self, t: &str) -> &dml::ScalarField { self.find_scalar_field(t) - .unwrap_or_else(|| panic!("Field {} not found", t)) + .unwrap_or_else(|| panic!("Field {t} not found")) } fn assert_has_relation_field(&self, t: &str) -> &dml::RelationField { self.find_relation_field(t) - .unwrap_or_else(|| panic!("Field {} not found", t)) + .unwrap_or_else(|| panic!("Field {t} not found")) } fn assert_with_db_name(&self, t: &str) -> &Self { @@ -346,25 +346,25 @@ impl CompositeTypeAsserts for dml::CompositeType { fn assert_has_scalar_field(&self, t: &str) -> &dml::CompositeTypeField { self.scalar_fields() .find(|field| field.name == t) - .unwrap_or_else(|| panic!("Field {} not found", t)) + .unwrap_or_else(|| panic!("Field {t} not found")) } fn assert_has_enum_field(&self, t: &str) -> &dml::CompositeTypeField { self.enum_fields() .find(|field| field.name == t) - .unwrap_or_else(|| panic!("Field {} not found", t)) + .unwrap_or_else(|| panic!("Field {t} not found")) } fn assert_has_composite_type_field(&self, t: &str) -> &dml::CompositeTypeField { self.composite_type_fields() .find(|field| field.name == t) - .unwrap_or_else(|| panic!("Field {} not found", t)) + .unwrap_or_else(|| panic!("Field {t} not found")) } fn assert_has_unsupported_field(&self, t: &str) -> &dml::CompositeTypeField { self.unsupported_fields() .find(|field| field.name == t) - .unwrap_or_else(|| panic!("Field {} not found", t)) + .unwrap_or_else(|| panic!("Field {t} not found")) } } @@ -372,7 +372,7 @@ impl EnumAsserts for dml::Enum { fn assert_has_value(&self, t: &str) -> &dml::EnumValue { self.values() .find(|x| x.name == t) - .unwrap_or_else(|| panic!("Enum Value {} not found", t)) + .unwrap_or_else(|| panic!("Enum Value {t} not found")) } fn assert_with_documentation(&self, t: &str) -> &Self { diff --git a/psl/psl/tests/datamodel_tests.rs b/psl/psl/tests/datamodel_tests.rs index b7d8a4c28799..b950ff6fc2fd 100644 --- a/psl/psl/tests/datamodel_tests.rs +++ b/psl/psl/tests/datamodel_tests.rs @@ -39,7 +39,7 @@ fn with_header(dm: &str, provider: Provider, preview_features: &[&str]) -> Strin "previewFeatures = [{}]", preview_features .iter() - .map(|f| format!("\"{}\"", f)) + .map(|f| format!("\"{f}\"")) .collect::>() .join(", ") ) @@ -62,5 +62,5 @@ fn with_header(dm: &str, provider: Provider, preview_features: &[&str]) -> Strin preview_features ); - format!("{}\n{}", header, dm) + format!("{header}\n{dm}") } diff --git a/psl/psl/tests/panic_with_diff/mod.rs b/psl/psl/tests/panic_with_diff/mod.rs index d6301e4d1049..a66b81643fdc 100644 --- a/psl/psl/tests/panic_with_diff/mod.rs +++ b/psl/psl/tests/panic_with_diff/mod.rs @@ -20,8 +20,8 @@ fn format_chunks(chunks: Vec>) -> String { for chunk in chunks { let formatted = match chunk { dissimilar::Chunk::Equal(text) => text.into(), - dissimilar::Chunk::Delete(text) => format!("\x1b[41m{}\x1b[0m", text), - dissimilar::Chunk::Insert(text) => format!("\x1b[42m{}\x1b[0m", text), + dissimilar::Chunk::Delete(text) => format!("\x1b[41m{text}\x1b[0m"), + dissimilar::Chunk::Insert(text) => format!("\x1b[42m{text}\x1b[0m"), }; buf.push_str(&formatted); } diff --git a/psl/psl/tests/reformat_tests.rs b/psl/psl/tests/reformat_tests.rs index ea6207894047..c945ad53c077 100644 --- a/psl/psl/tests/reformat_tests.rs +++ b/psl/psl/tests/reformat_tests.rs @@ -7,7 +7,7 @@ const TESTS_ROOT: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/reformatter #[inline(never)] // we want to compile fast fn run_reformat_test(test_file_path: &str) { let file_path = path::Path::new(TESTS_ROOT).join(test_file_path); - let text = fs::read_to_string(&file_path).unwrap(); + let text = fs::read_to_string(file_path).unwrap(); let reformatted_text: String = reformat(&text); let snapshot_file_name = path::Path::new(TESTS_ROOT).join(format!( diff --git a/psl/schema-ast/src/ast/expression.rs b/psl/schema-ast/src/ast/expression.rs index 78172808b780..6c239adf92fb 100644 --- a/psl/schema-ast/src/ast/expression.rs +++ b/psl/schema-ast/src/ast/expression.rs @@ -25,11 +25,11 @@ impl fmt::Display for Expression { Expression::ConstantValue(val, _) => fmt::Display::fmt(val, f), Expression::Function(fun, args, _) => { let args = args.iter().map(ToString::to_string).collect::>().join(","); - write!(f, "{}({})", fun, args) + write!(f, "{fun}({args})") } Expression::Array(vals, _) => { let vals = vals.iter().map(ToString::to_string).collect::>().join(","); - write!(f, "[{}]", vals) + write!(f, "[{vals}]") } } } diff --git a/psl/schema-ast/src/parser/parse_arguments.rs b/psl/schema-ast/src/parser/parse_arguments.rs index ed207a0072d9..67b5d930f83b 100644 --- a/psl/schema-ast/src/parser/parse_arguments.rs +++ b/psl/schema-ast/src/parser/parse_arguments.rs @@ -56,6 +56,6 @@ fn parse_named_arg(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> ast::Argume value, span: ast::Span::from(pair_span), }, - _ => panic!("Encountered impossible attribute arg during parsing: {:?}", pair_str), + _ => panic!("Encountered impossible attribute arg during parsing: {pair_str:?}"), } } diff --git a/psl/schema-ast/src/parser/parse_enum.rs b/psl/schema-ast/src/parser/parse_enum.rs index f73a8f1dec03..3fdc8b7d4c70 100644 --- a/psl/schema-ast/src/parser/parse_enum.rs +++ b/psl/schema-ast/src/parser/parse_enum.rs @@ -88,9 +88,6 @@ fn parse_enum_value( documentation: comment, span: Span::from(pair_span), }), - _ => panic!( - "Encountered impossible enum value declaration during parsing, name is missing: {:?}", - pair_str - ), + _ => panic!("Encountered impossible enum value declaration during parsing, name is missing: {pair_str:?}",), } } diff --git a/psl/schema-ast/src/parser/parse_schema.rs b/psl/schema-ast/src/parser/parse_schema.rs index f84913ab05c8..6782caab9e44 100644 --- a/psl/schema-ast/src/parser/parse_schema.rs +++ b/psl/schema-ast/src/parser/parse_schema.rs @@ -101,7 +101,7 @@ fn get_expected_from_error(positives: &[Rule]) -> String { let mut out = String::with_capacity(positives.len() * 6); for positive in positives { - write!(out, "{:?}", positive).unwrap(); + write!(out, "{positive:?}").unwrap(); } out diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/string.rs b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/string.rs index c43cd43e5c9b..091b68601a06 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/string.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/string.rs @@ -1,5 +1,5 @@ pub fn enclose(input: &str, with: &str) -> String { - format!("{}{}{}", with, input, with) + format!("{with}{input}{with}") } pub fn enclose_all(input: Vec, with: &str) -> Vec diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs index 01ab6294d079..62c4e3005f71 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/assertion_violation_error.rs @@ -10,21 +10,16 @@ mod raw_params { let ids: Vec = (1..n + 1).collect(); // "$1,$2,...,$n" - let params: String = ids - .iter() - .map(|id| format!("${}", id)) - .collect::>() - .join(","); + let params: String = ids.iter().map(|id| format!("${id}")).collect::>().join(","); let mutation = format!( r#" mutation {{ queryRaw( - query: "SELECT * FROM \"TestModel\" WHERE id IN ({})", - parameters: "{:?}" + query: "SELECT * FROM \"TestModel\" WHERE id IN ({params})", + parameters: "{ids:?}" ) }}"#, - params, ids, ); assert_error!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs index 55c37f7ad6e5..7442b1172265 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/interactive_tx.rs @@ -85,7 +85,7 @@ mod interactive_tx { let error = res.err().unwrap(); let known_err = error.as_known().unwrap(); - println!("KNOWN ERROR {:?}", known_err); + println!("KNOWN ERROR {known_err:?}"); assert_eq!(known_err.error_code, Cow::Borrowed("P2028")); assert!(known_err diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs index aad63919146a..72fe36f3e2e5 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs @@ -103,19 +103,13 @@ mod max_integer { async fn unfitted_int_should_fail(runner: Runner) -> TestResult<()> { assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ id: 1, int: {} }}) {{ id int }} }}", - I32_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ id: 1, int: {I32_OVERFLOW_MAX} }}) {{ id int }} }}"), 0 ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ id: 1, int: {} }}) {{ id int }} }}", - I32_OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ id: 1, int: {I32_OVERFLOW_MIN} }}) {{ id int }} }}"), 0 ); @@ -140,19 +134,13 @@ mod max_integer { // int assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ int: {} }}) {{ id }} }}", - I32_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ int: {I32_OVERFLOW_MAX} }}) {{ id }} }}"), None, "Unable to fit integer value '2147483648' into an INT4 (32-bit signed integer)." ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ int: {} }}) {{ id }} }}", - I32_OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ int: {I32_OVERFLOW_MIN} }}) {{ id }} }}"), None, "Unable to fit integer value '-2147483649' into an INT4 (32-bit signed integer)." ); @@ -160,19 +148,13 @@ mod max_integer { // smallint assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ smallint: {} }}) {{ id }} }}", - I16_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ smallint: {I16_OVERFLOW_MAX} }}) {{ id }} }}"), None, "Unable to fit integer value '32768' into an INT2 (16-bit signed integer)." ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ smallint: {} }}) {{ id }} }}", - I16_OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ smallint: {I16_OVERFLOW_MIN} }}) {{ id }} }}"), None, "Unable to fit integer value '-32769' into an INT2 (16-bit signed integer)." ); @@ -180,19 +162,13 @@ mod max_integer { //oid assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ oid: {} }}) {{ id }} }}", - U32_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ oid: {U32_OVERFLOW_MAX} }}) {{ id }} }}"), None, "Unable to fit integer value '4294967296' into an OID (32-bit unsigned integer)." ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ oid: {} }}) {{ id }} }}", - OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ oid: {OVERFLOW_MIN} }}) {{ id }} }}"), None, "Unable to fit integer value '-1' into an OID (32-bit unsigned integer)." ); @@ -260,19 +236,13 @@ mod max_integer { // tinyint assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ tinyint: {} }}) {{ id }} }}", - I8_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ tinyint: {I8_OVERFLOW_MAX} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'tinyint'" ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ tinyint: {} }}) {{ id }} }}", - I8_OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ tinyint: {I8_OVERFLOW_MIN} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'tinyint'" ); @@ -280,19 +250,13 @@ mod max_integer { // smallint assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ smallint: {} }}) {{ id }} }}", - I16_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ smallint: {I16_OVERFLOW_MAX} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'smallint'" ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ smallint: {} }}) {{ id }} }}", - I16_OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ smallint: {I16_OVERFLOW_MIN} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'smallint'" ); @@ -300,19 +264,13 @@ mod max_integer { // mediumint assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ mediumint: {} }}) {{ id }} }}", - I24_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ mediumint: {I24_OVERFLOW_MAX} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'mediumint'" ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ mediumint: {} }}) {{ id }} }}", - I24_OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ mediumint: {I24_OVERFLOW_MIN} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'mediumint'" ); @@ -320,19 +278,13 @@ mod max_integer { // int assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ int: {} }}) {{ id }} }}", - I32_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ int: {I32_OVERFLOW_MAX} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'int'" ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ int: {} }}) {{ id }} }}", - I32_OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ int: {I32_OVERFLOW_MIN} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'int'" ); @@ -355,19 +307,13 @@ mod max_integer { // unsigned tinyint assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ unsigned_tinyint: {} }}) {{ id }} }}", - U8_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ unsigned_tinyint: {U8_OVERFLOW_MAX} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'unsigned_tinyint'" ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ unsigned_tinyint: {} }}) {{ id }} }}", - OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ unsigned_tinyint: {OVERFLOW_MIN} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'unsigned_tinyint'" ); @@ -375,19 +321,13 @@ mod max_integer { // unsigned smallint assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ unsigned_smallint: {} }}) {{ id }} }}", - U16_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ unsigned_smallint: {U16_OVERFLOW_MAX} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'unsigned_smallint'" ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ unsigned_smallint: {} }}) {{ id }} }}", - OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ unsigned_smallint: {OVERFLOW_MIN} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'unsigned_smallint'" ); @@ -395,19 +335,13 @@ mod max_integer { // unsigned mediumint assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ unsigned_mediumint: {} }}) {{ id }} }}", - U24_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ unsigned_mediumint: {U24_OVERFLOW_MAX} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'unsigned_mediumint'" ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ unsigned_mediumint: {} }}) {{ id }} }}", - OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ unsigned_mediumint: {OVERFLOW_MIN} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'unsigned_mediumint'" ); @@ -415,19 +349,13 @@ mod max_integer { // unsigned int assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ unsigned_int: {} }}) {{ id }} }}", - U32_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ unsigned_int: {U32_OVERFLOW_MAX} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'unsigned_int'" ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ unsigned_int: {} }}) {{ id }} }}", - OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ unsigned_int: {OVERFLOW_MIN} }}) {{ id }} }}"), 2020, "Value out of range for the type. Out of range value for column 'unsigned_int'" ); @@ -548,19 +476,13 @@ mod max_integer { // tinyint assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ tinyint: {} }}) {{ id }} }}", - U8_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ tinyint: {U8_OVERFLOW_MAX} }}) {{ id }} }}"), None, "Arithmetic overflow error converting expression to data type tinyint" ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ tinyint: {} }}) {{ id }} }}", - OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ tinyint: {OVERFLOW_MIN} }}) {{ id }} }}"), None, "Arithmetic overflow error converting expression to data type tinyint" ); @@ -568,19 +490,13 @@ mod max_integer { // smallint assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ smallint: {} }}) {{ id }} }}", - I16_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ smallint: {I16_OVERFLOW_MAX} }}) {{ id }} }}"), None, "Arithmetic overflow error converting expression to data type smallint" ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ smallint: {} }}) {{ id }} }}", - I16_OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ smallint: {I16_OVERFLOW_MIN} }}) {{ id }} }}"), None, "Arithmetic overflow error converting expression to data type smallint." ); @@ -588,19 +504,13 @@ mod max_integer { // int assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ int: {} }}) {{ id }} }}", - I32_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ int: {I32_OVERFLOW_MAX} }}) {{ id }} }}"), None, "Arithmetic overflow error converting expression to data type int" ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ int: {} }}) {{ id }} }}", - I32_OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ int: {I32_OVERFLOW_MIN} }}) {{ id }} }}"), None, "Arithmetic overflow error converting expression to data type int" ); @@ -661,19 +571,13 @@ mod max_integer { // int4 assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ id: 1, int4: {} }}) {{ id }} }}", - I32_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ id: 1, int4: {I32_OVERFLOW_MAX} }}) {{ id }} }}"), None, "Unable to fit integer value '2147483648' into an INT4 (32-bit signed integer)." ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ id: 1, int4: {} }}) {{ id }} }}", - I32_OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ id: 1, int4: {I32_OVERFLOW_MIN} }}) {{ id }} }}"), None, "Unable to fit integer value '-2147483649' into an INT4 (32-bit signed integer)." ); @@ -681,19 +585,13 @@ mod max_integer { // int2 assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ id: 1, int2: {} }}) {{ id }} }}", - I16_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ id: 1, int2: {I16_OVERFLOW_MAX} }}) {{ id }} }}"), None, "Unable to fit integer value '32768' into an INT2 (16-bit signed integer)." ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ id: 1, int2: {} }}) {{ id }} }}", - I16_OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ id: 1, int2: {I16_OVERFLOW_MIN} }}) {{ id }} }}"), None, "Unable to fit integer value '-32769' into an INT2 (16-bit signed integer)." ); @@ -701,19 +599,13 @@ mod max_integer { //oid assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ id: 1, oid: {} }}) {{ id }} }}", - U32_OVERFLOW_MAX - ), + format!("mutation {{ createOneTest(data: {{ id: 1, oid: {U32_OVERFLOW_MAX} }}) {{ id }} }}"), None, "Unable to fit integer value '4294967296' into an OID (32-bit unsigned integer)." ); assert_error!( runner, - format!( - "mutation {{ createOneTest(data: {{ id: 1, oid: {} }}) {{ id }} }}", - OVERFLOW_MIN - ), + format!("mutation {{ createOneTest(data: {{ id: 1, oid: {OVERFLOW_MIN} }}) {{ id }} }}"), None, "Unable to fit integer value '-1' into an OID (32-bit unsigned integer)." ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15467.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15467.rs index 87f87e8d3dbd..d10e4abf6d2f 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15467.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15467.rs @@ -36,12 +36,11 @@ mod mongodb { format!( r#" mutation {{ - updateManyStanding(data:{{awayLosses:{{set: 0}}, teamId:{{set: 972030012}}, leagueId:{{set: 2363725}}}}, where:{{id: {{equals: {} }}}}) {{ + updateManyStanding(data:{{awayLosses:{{set: 0}}, teamId:{{set: 972030012}}, leagueId:{{set: 2363725}}}}, where:{{id: {{equals: {object_id} }}}}) {{ count }} }} - "#, - object_id + "# ) ); let logs = runner.get_logs().await; @@ -51,7 +50,7 @@ mod mongodb { db.Standing.updateMany({{ _id: {{ $in: [ - ObjectId({}), + ObjectId({object_id}), ], }}, }},[ @@ -75,25 +74,20 @@ db.Standing.updateMany({{ $literal: 0, }}, }}, -}}])"#, - object_id +}}])"# ); let expected_query = query.trim(); assert!( last_log_line.contains(expected_query), - "{} should have contained {}", - last_log_line, - expected_query, + r#"{last_log_line} should have contained {expected_query}"#, ); // Piggybacking assertion reproducing https://github.com/prisma/prisma/issues/14378 let expected_duration_field = "duration_ms"; assert!( last_log_line.contains(expected_duration_field), - "{} should have contained {}", - last_log_line, - expected_duration_field + r#"{last_log_line} should have contained {expected_duration_field}"# ); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/avg.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/avg.rs index bb62606b0cfb..a155090c7d56 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/avg.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/avg.rs @@ -75,7 +75,7 @@ mod aggregation_avg { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) @@ -168,7 +168,7 @@ mod decimal_aggregation_avg { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/combination_spec.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/combination_spec.rs index cef994206bcf..46bdd77ddb58 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/combination_spec.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/combination_spec.rs @@ -283,7 +283,7 @@ mod combinations { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneItem(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneItem(data: {data}) {{ id }} }}")) .await? .assert_success(); @@ -519,7 +519,7 @@ mod decimal_combinations { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneItem(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneItem(data: {data}) {{ id }} }}")) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/count.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/count.rs index 7c52161d7951..3d5572650c13 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/count.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/count.rs @@ -89,7 +89,7 @@ mod aggregation_count { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by.rs index 8e153c2661b4..fd99fb4385dc 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by.rs @@ -567,7 +567,7 @@ mod aggregation_group_by { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneA(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneA(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by_having.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by_having.rs index e04ebd5869e0..15d11967178e 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by_having.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by_having.rs @@ -412,7 +412,7 @@ mod aggr_group_by_having { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) @@ -657,7 +657,7 @@ mod decimal_aggregation_group_by_having { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/many_count_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/many_count_relation.rs index 077cbb5325d9..54dbccefe7cc 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/many_count_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/many_count_relation.rs @@ -646,7 +646,7 @@ mod many_count_rel { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOnePost(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOnePost(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/max.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/max.rs index b27f18eda38b..d4ef72ee3cf6 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/max.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/max.rs @@ -72,7 +72,7 @@ mod aggregation_max { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) @@ -162,7 +162,7 @@ mod decimal_aggregation_max { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/min.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/min.rs index 273d2bcd2282..1927beba7ea5 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/min.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/min.rs @@ -72,7 +72,7 @@ mod aggregation_min { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) @@ -162,7 +162,7 @@ mod decimal_aggregation_min { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/sum.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/sum.rs index dd6db334f444..59a89cdff930 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/sum.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/sum.rs @@ -72,7 +72,7 @@ mod aggregation_sum { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) @@ -162,7 +162,7 @@ mod decimal_aggregation_sum { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/uniq_count_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/uniq_count_relation.rs index 2d3decd52122..4d21189bf125 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/uniq_count_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/uniq_count_relation.rs @@ -216,7 +216,7 @@ mod uniq_count_rel { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOnePost(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOnePost(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/in_selection_batching.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/in_selection_batching.rs index 36c14f275773..e2b21fc215ed 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/in_selection_batching.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/in_selection_batching.rs @@ -146,7 +146,7 @@ mod isb { async fn create_a(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneA(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneA(data: {data}) {{ id }} }}")) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/select_one_singular.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/select_one_singular.rs index 80eaf43f7668..f29aba4edeea 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/select_one_singular.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batch/select_one_singular.rs @@ -276,7 +276,7 @@ mod singular_batch { res.to_string(), @r###"{"batchResult":[{"data":{"findUniqueTestModelOrThrow":{"id":1}}},{"data":{"findUniqueTestModelOrThrow":{"id":2}}}]}"### ); - assert_eq!(compact_doc.is_compact(), false); + assert!(!compact_doc.is_compact()); // Failing case let (res, compact_doc) = compact_batch( @@ -291,7 +291,7 @@ mod singular_batch { res.to_string(), @r###"{"batchResult":[{"data":{"findUniqueTestModelOrThrow":{"id":2}}},{"errors":[{"error":"Error occurred during query execution:\nConnectorError(ConnectorError { user_facing_error: Some(KnownError { message: \"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.\", meta: Object {\"cause\": String(\"Expected a record, found none.\")}, error_code: \"P2025\" }), kind: RecordDoesNotExist, transient: false })","user_facing_error":{"is_panic":false,"message":"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.","meta":{"cause":"Expected a record, found none."},"error_code":"P2025"}}]}]}"### ); - assert_eq!(compact_doc.is_compact(), false); + assert!(!compact_doc.is_compact()); // Mix of findUnique & findUniqueOrThrow let (res, compact_doc) = compact_batch( @@ -306,7 +306,7 @@ mod singular_batch { res.to_string(), @r###"{"batchResult":[{"data":{"findUniqueTestModel":null}},{"data":{"findUniqueTestModelOrThrow":{"id":2}}}]}"### ); - assert_eq!(compact_doc.is_compact(), false); + assert!(!compact_doc.is_compact()); // Mix of findUnique & findUniqueOrThrow let (res, compact_doc) = compact_batch( @@ -321,7 +321,7 @@ mod singular_batch { res.to_string(), @r###"{"batchResult":[{"data":{"findUniqueTestModel":{"id":2}}},{"errors":[{"error":"Error occurred during query execution:\nConnectorError(ConnectorError { user_facing_error: Some(KnownError { message: \"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.\", meta: Object {\"cause\": String(\"Expected a record, found none.\")}, error_code: \"P2025\" }), kind: RecordDoesNotExist, transient: false })","user_facing_error":{"is_panic":false,"message":"An operation failed because it depends on one or more records that were required but not found. Expected a record, found none.","meta":{"cause":"Expected a record, found none."},"error_code":"P2025"}}]}]}"### ); - assert_eq!(compact_doc.is_compact(), false); + assert!(!compact_doc.is_compact()); // Mix of findUnique & findUniqueOrThrow let (res, compact_doc) = compact_batch( @@ -336,7 +336,7 @@ mod singular_batch { res.to_string(), @r###"{"batchResult":[{"data":{"findUniqueTestModelOrThrow":{"id":2}}},{"data":{"findUniqueTestModel":null}}]}"### ); - assert_eq!(compact_doc.is_compact(), false); + assert!(!compact_doc.is_compact()); Ok(()) } diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/distinct.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/distinct.rs index 83b565c55fdd..14179dec72e5 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/distinct.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/distinct.rs @@ -226,7 +226,7 @@ mod distinct { async fn test_user(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneUser(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneUser(data: {data}) {{ id }} }}")) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/composite/mod.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/composite/mod.rs index 3e42d302625c..9d596d4c5622 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/composite/mod.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/composite/mod.rs @@ -479,7 +479,7 @@ async fn create_relation_combination_test_data(runner: &Runner) -> TestResult<() async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/having_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/having_filter.rs index 9ce817211b14..a38a4d2b665f 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/having_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/having_filter.rs @@ -79,7 +79,7 @@ mod having_filter { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/json_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/json_filter.rs index 4116d4f77ab3..b0ab7da42a7e 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/json_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/json_filter.rs @@ -457,7 +457,7 @@ mod json_filter { } fn jsonq(filter: String) -> String { - format!(r#"query {{ findManyTestModel(where: {{ {} }} ) {{ id }} }}"#, filter) + format!(r#"query {{ findManyTestModel(where: {{ {filter} }} ) {{ id }} }}"#) } fn json_path(runner: &Runner) -> &'static str { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/relation_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/relation_filter.rs index 3f2f8e4f98da..4581ea0bf6e2 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/relation_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/field_reference/relation_filter.rs @@ -371,7 +371,7 @@ mod relation_filter { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/filter_regression.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/filter_regression.rs index d3adaab1ed8d..150e1d34cddf 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/filter_regression.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/filter_regression.rs @@ -217,7 +217,7 @@ mod fr_m_to_n { async fn test_location(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneLocation(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneLocation(data: {data}) {{ id }} }}")) .await? .assert_success(); @@ -226,7 +226,7 @@ async fn test_location(runner: &Runner, data: &str) -> TestResult<()> { async fn test_company(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneCompany(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneCompany(data: {data}) {{ id }} }}")) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/filters.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/filters.rs index 8630b48ab6d3..90c55e62999e 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/filters.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/filters.rs @@ -356,7 +356,7 @@ mod filter_spec { } fn user_uniques_query(filter: &str) -> String { - format!(r#"query {{ findManyUser{} {{ unique }} }}"#, filter) + format!(r#"query {{ findManyUser{filter} {{ unique }} }}"#) } async fn user_uniques(runner: &Runner, filter: &str) -> TestResult { @@ -368,7 +368,7 @@ mod filter_spec { async fn vehicle_uniques(runner: &Runner, filter: &str) -> TestResult { let result = runner - .query(format!(r#"query {{ findManyVehicle{} {{ unique }} }}"#, filter)) + .query(format!(r#"query {{ findManyVehicle{filter} {{ unique }} }}"#)) .await?; result.assert_success(); @@ -377,7 +377,7 @@ mod filter_spec { async fn lot_uniques(runner: &Runner, filter: &str) -> TestResult { let result = runner - .query(format!(r#"query {{ findManyParkingLot{} {{ unique }} }}"#, filter)) + .query(format!(r#"query {{ findManyParkingLot{filter} {{ unique }} }}"#)) .await?; result.assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/insensitive_filters.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/insensitive_filters.rs index d7e11b8b4f7b..84a55910dffb 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/insensitive_filters.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/insensitive_filters.rs @@ -185,8 +185,7 @@ mod insensitive { async fn create_row(runner: &Runner, s: &str) -> TestResult<()> { runner .query(format!( - r#"mutation {{ createOneTestModel(data: {{ str: "{}" }}) {{ id }} }}"#, - s + r#"mutation {{ createOneTestModel(data: {{ str: "{s}" }}) {{ id }} }}"# )) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs index 3ea8cede3d3e..22378dd7d20b 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json.rs @@ -235,7 +235,7 @@ mod json { runner .query(jNull!( caps, - format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data) + format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}") )) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json_filters.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json_filters.rs index 701f7bf99544..4b5573f67170 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json_filters.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/json_filters.rs @@ -871,14 +871,11 @@ mod json_filters { async fn create_row(runner: &Runner, id: u32, data: &str, nested: bool) -> TestResult<()> { let json = if nested { - format!(r#"{{ \"a\": {{ \"b\": {} }} }}"#, data) + format!(r#"{{ \"a\": {{ \"b\": {data} }} }}"#) } else { data.to_owned() }; - let q = format!( - r#"mutation {{ createOneTestModel(data: {{ id: {}, json: "{}" }}) {{ id }} }}"#, - id, json - ); + let q = format!(r#"mutation {{ createOneTestModel(data: {{ id: {id}, json: "{json}" }}) {{ id }} }}"#); runner.query(q).await?.assert_success(); Ok(()) @@ -887,19 +884,13 @@ mod json_filters { fn jsonq(runner: &Runner, filter: &str, path: Option<&str>) -> String { let path = path.unwrap_or_else(|| json_path(runner)); - format!( - r#"query {{ findManyTestModel(where: {{ json: {{ {}, {} }} }} ) {{ id }} }}"#, - filter, path - ) + format!(r#"query {{ findManyTestModel(where: {{ json: {{ {filter}, {path} }} }} ) {{ id }} }}"#) } fn not_jsonq(runner: &Runner, filter: &str, path: Option<&str>) -> String { let path = path.unwrap_or_else(|| json_path(runner)); - format!( - r#"query {{ findManyTestModel(where: {{ NOT: {{ json: {{ {}, {} }} }} }} ) {{ id }} }}"#, - filter, path - ) + format!(r#"query {{ findManyTestModel(where: {{ NOT: {{ json: {{ {filter}, {path} }} }} }} ) {{ id }} }}"#) } fn json_path(runner: &Runner) -> &'static str { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/list_filters.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/list_filters.rs index e759fc28bf0c..16b9a0ab0437 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/list_filters.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/list_filters.rs @@ -1257,13 +1257,12 @@ async fn list_query(runner: &Runner, field: &str, operation: &str, comparator: & format!( r#"query {{ findManyTestModel(where: {{ - {}: {{ {}: {} }} + {field}: {{ {operation}: {comparator} }} }}) {{ id }} }} - "#, - field, operation, comparator + "# ) ); @@ -1277,13 +1276,12 @@ async fn not_list_query(runner: &Runner, field: &str, operation: &str, comparato r#" query {{ findManyTestModel(where: {{ - NOT: {{ {}: {{ {}: {} }} }} + NOT: {{ {field}: {{ {operation}: {comparator} }} }} }}) {{ id }} }} - "#, - field, operation, comparator + "# ) ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/ported_filters.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/ported_filters.rs index 589fa239d3fb..67e5d8ff7bcf 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/ported_filters.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/ported_filters.rs @@ -1333,7 +1333,7 @@ mod ported { datetime: &str, ) -> TestResult<()> { let string = match string { - Some(string) => format!(r#""{}""#, string), + Some(string) => format!(r#""{string}""#), None => String::from("null"), }; @@ -1342,18 +1342,17 @@ mod ported { r#" mutation {{ createOneModelA(data: {{ - idTest: "{}", - optString: {}, - optInt: {}, - optFloat: {}, - optBoolean: {}, - optEnum: {}, - optDateTime: "{}" + idTest: "{id}", + optString: {string}, + optInt: {int}, + optFloat: {float}, + optBoolean: {boolean}, + optEnum: {enum_}, + optDateTime: "{datetime}" b: {{ connect: {{ int: 1 }} }} }}) {{ id }} }} - "#, - id, string, int, float, boolean, enum_, datetime + "# ); runner.query(query).await?.assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/search_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/search_filter.rs index 2cd590cf197b..218ecb7eb877 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/search_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/search_filter.rs @@ -124,7 +124,7 @@ async fn create_test_data(runner: &Runner) -> TestResult<()> { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/nested_multi_order_pagination.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/nested_multi_order_pagination.rs index b123333e8ec7..cf14f3e8bb45 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/nested_multi_order_pagination.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/nested_multi_order_pagination.rs @@ -133,7 +133,7 @@ mod paging_one2m_stable_order { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) @@ -276,7 +276,7 @@ mod paging_one2m_unstable_order { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/nested_pagination.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/nested_pagination.rs index 67430a2ee58d..60567aae1f15 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/nested_pagination.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/nested_pagination.rs @@ -838,7 +838,7 @@ mod nested_pagination { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTop(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTop(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by.rs index f7a55609873d..b6f42a55a014 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by.rs @@ -188,7 +188,7 @@ mod basic_order_by { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneOrderTest(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneOrderTest(data: {data}) {{ id }} }}")) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs index 0116cb8ff995..c21517823309 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs @@ -846,7 +846,7 @@ mod order_by_aggr { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneUser(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneUser(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_composite.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_composite.rs index f324ce85ae86..9a4907b8ba30 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_composite.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_composite.rs @@ -819,7 +819,7 @@ mod mixed { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent.rs index d2bc15b337ce..93dcc263e025 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent.rs @@ -413,7 +413,7 @@ mod order_by_dependent { c_to_a: Option, ) -> TestResult<()> { let (follow_up, inline) = match c_to_a { - Some(id) if id != a_id => (None, Some(format!("a: {{ create: {{ id: {} }} }}", id))), + Some(id) if id != a_id => (None, Some(format!("a: {{ create: {{ id: {id} }} }}"))), Some(id) => ( Some(format!( "mutation {{ updateOneModelC(where: {{ id: {} }}, data: {{ a_id: {} }}) {{ id }} }}", @@ -431,14 +431,14 @@ mod order_by_dependent { }; let model_b = match b_id { - Some(id) => format!("b: {{ create: {{ id: {}\n {} }} }}", id, model_c), + Some(id) => format!("b: {{ create: {{ id: {id}\n {model_c} }} }}"), None => "".to_string(), }; - let model_a = format!("{{ id: {} \n {} }}", a_id, model_b); + let model_a = format!("{{ id: {a_id} \n {model_b} }}"); runner - .query(format!("mutation {{ createOneModelA(data: {}) {{ id }} }}", model_a)) + .query(format!("mutation {{ createOneModelA(data: {model_a}) {{ id }} }}")) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent_pagination.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent_pagination.rs index 6b26c261cc00..8823ae42ad26 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent_pagination.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_dependent_pagination.rs @@ -349,7 +349,7 @@ mod order_by_dependent_pag { c_to_a: Option, ) -> TestResult<()> { let (follow_up, inline) = match c_to_a { - Some(id) if id != a_id => (None, Some(format!("a: {{ create: {{ id: {} }} }}", id))), + Some(id) if id != a_id => (None, Some(format!("a: {{ create: {{ id: {id} }} }}"))), Some(id) => ( Some(format!( "mutation {{ updateOneModelC(where: {{ id: {} }}, data: {{ a_id: {} }}) {{ id }} }}", @@ -367,14 +367,14 @@ mod order_by_dependent_pag { }; let model_b = match b_id { - Some(id) => format!("b: {{ create: {{ id: {}\n {} }} }}", id, model_c), + Some(id) => format!("b: {{ create: {{ id: {id}\n {model_c} }} }}"), None => "".to_string(), }; - let model_a = format!("{{ id: {} \n {} }}", a_id, model_b); + let model_a = format!("{{ id: {a_id} \n {model_b} }}"); runner - .query(format!("mutation {{ createOneModelA(data: {}) {{ id }} }}", model_a)) + .query(format!("mutation {{ createOneModelA(data: {model_a}) {{ id }} }}")) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_nulls.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_nulls.rs index 5e184eefed73..318446ffe071 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_nulls.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_nulls.rs @@ -447,7 +447,7 @@ mod order_by_nulls { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_relevance.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_relevance.rs index 7525179762eb..3048fbf9a0f3 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_relevance.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_relevance.rs @@ -398,7 +398,7 @@ async fn create_test_data(runner: &Runner) -> TestResult<()> { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/pagination.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/pagination.rs index 5279de815200..f0874cae02c8 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/pagination.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/pagination.rs @@ -874,7 +874,7 @@ mod pagination { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/pagination_regression.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/pagination_regression.rs index b7dc5ce5e919..6324b0d7bbb9 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/pagination_regression.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/pagination_regression.rs @@ -251,7 +251,7 @@ mod pagination_regr { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/prisma_4088.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/prisma_4088.rs index 6dfaca4a8222..b7597a578762 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/prisma_4088.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/prisma_4088.rs @@ -200,7 +200,7 @@ mod prisma_4088 { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/prisma_8389.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/prisma_8389.rs index 9c37e003a4cf..1e6f3ce236b3 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/prisma_8389.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/prisma_8389.rs @@ -42,14 +42,13 @@ mod prisma_8389 { async fn create_test_data(runner: &Runner) -> TestResult<()> { let data = (1..104) - .map(|n| format!("{{ id: {} }}", n)) + .map(|n| format!("{{ id: {n} }}")) .collect::>() .join(", "); runner .query(format!( - "mutation {{ createManyTestModel(data: [{}]) {{ count }} }}", - data + "mutation {{ createManyTestModel(data: [{data}]) {{ count }} }}" )) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/prisma_933.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/prisma_933.rs index 8598344a5986..ab8d4bd1b1ec 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/prisma_933.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/regressions/prisma_933.rs @@ -57,7 +57,7 @@ mod prisma_933_spec { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneBuyer(data: {}) {{ buyer_id }} }}", data)) + .query(format!("mutation {{ createOneBuyer(data: {data}) {{ buyer_id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/relations/inline_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/relations/inline_relation.rs index b78cde454f29..397b7529b4b1 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/relations/inline_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/relations/inline_relation.rs @@ -47,7 +47,7 @@ mod inline_rel { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneModelA(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneModelA(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/composite_default_value.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/composite_default_value.rs index 7ce0be09b886..57c196b84a55 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/composite_default_value.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/composite_default_value.rs @@ -62,6 +62,6 @@ mod default_value { fn run_command_raw(command: serde_json::Value) -> String { let command = command.to_string().replace('\"', "\\\""); - format!(r#"mutation {{ runCommandRaw(command: "{}") }}"#, command) + format!(r#"mutation {{ runCommandRaw(command: "{command}") }}"#) } } diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/find_first.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/find_first.rs index b04091071d8a..64d46d102561 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/find_first.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/find_first.rs @@ -60,7 +60,7 @@ mod find_first_query { async fn test_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/find_first_or_throw.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/find_first_or_throw.rs index 00fb131bb747..518fc0ddfad4 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/find_first_or_throw.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/find_first_or_throw.rs @@ -61,7 +61,7 @@ mod find_first_or_throw_query { async fn test_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/find_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/find_many.rs index d0cff449ea31..a27f05a28c38 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/find_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/find_many.rs @@ -41,7 +41,7 @@ mod find_many { async fn test_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/mongo_incorrect_fields.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/mongo_incorrect_fields.rs index 55e9b65add9e..12048d3a0660 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/mongo_incorrect_fields.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/mongo_incorrect_fields.rs @@ -60,6 +60,6 @@ mod mongo_incorrect_fields { fn run_command_raw(command: serde_json::Value) -> String { let command = command.to_string().replace('\"', "\\\""); - format!(r#"mutation {{ runCommandRaw(command: "{}") }}"#, command) + format!(r#"mutation {{ runCommandRaw(command: "{command}") }}"#) } } diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/multi_field_unique.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/multi_field_unique.rs index 626c2ee414ee..568190504a18 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/multi_field_unique.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/multi_field_unique.rs @@ -267,7 +267,7 @@ mod multi_field_unique { async fn create_user(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneUser(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneUser(data: {data}) {{ id }} }}")) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/raw_mongo.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/raw_mongo.rs index 98af28faf570..e0a76ae5ace9 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/raw_mongo.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/simple/raw_mongo.rs @@ -272,6 +272,6 @@ mod raw_mongo { fn run_command_raw(command: serde_json::Value) -> String { let command = command.to_string().replace('\"', "\\\""); - format!(r#"mutation {{ runCommandRaw(command: "{}") }}"#, command) + format!(r#"mutation {{ runCommandRaw(command: "{command}") }}"#) } } diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/views.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/views.rs index 895499fc3a9b..cb4d567e3989 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/views.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/views.rs @@ -114,7 +114,7 @@ mod views { } async fn create_test_data(runner: &Runner, schema_name: &str) -> TestResult<()> { - migrate_view(&runner, schema_name).await?; + migrate_view(runner, schema_name).await?; create_test_model(runner, r#"{ id: 1, firstName: "John", lastName: "Doe" }"#).await?; create_test_model(runner, r#"{ id: 2, firstName: "Jane", lastName: "Doe" }"#).await?; @@ -158,7 +158,7 @@ mod views { async fn create_test_model(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) @@ -166,7 +166,7 @@ mod views { async fn create_child(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneChild(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneChild(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/typed_output.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/typed_output.rs index bc11be656f4d..c3687ddd9f3e 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/typed_output.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/raw/sql/typed_output.rs @@ -600,7 +600,7 @@ mod typed_output { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/composites/list.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/composites/list.rs index 5a1590926f1d..ddb1a4fa02aa 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/composites/list.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/composites/list.rs @@ -1689,7 +1689,7 @@ mod update { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/composites/single.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/composites/single.rs index ae9d8816e1f8..25fb3858093d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/composites/single.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/composites/single.rs @@ -1011,7 +1011,7 @@ mod update { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/datetime/datetime.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/datetime/datetime.rs index b9e9ea105b42..f6cb38098fc5 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/datetime/datetime.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/datetime/datetime.rs @@ -72,7 +72,7 @@ mod datetime { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOnePerson(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOnePerson(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/datetime/where_and_datetime.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/datetime/where_and_datetime.rs index d2949f15c69b..50f867228a58 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/datetime/where_and_datetime.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/datetime/where_and_datetime.rs @@ -38,18 +38,17 @@ mod where_and_datetime { createOneNote( data: {{ outerString: "Outer String" - outerDateTime: "{}" + outerDateTime: "{outer_where}" todos: {{ create: [ - {{ innerString: "Inner String", innerDateTime: "{}" }} + {{ innerString: "Inner String", innerDateTime: "{inner_where}" }} ] }} }} ){{ id }} - }}"#, - outer_where, inner_where + }}"# ) ); @@ -58,12 +57,12 @@ mod where_and_datetime { format!( r#"mutation {{ updateOneNote( - where: {{ outerDateTime: "{}" }} + where: {{ outerDateTime: "{outer_where}" }} data: {{ outerString: {{ set: "Changed Outer String" }} todos: {{ update: [{{ - where: {{ innerDateTime: "{}" }}, + where: {{ innerDateTime: "{inner_where}" }}, data:{{ innerString: {{ set: "Changed Inner String" }} }} }}] }} @@ -71,18 +70,17 @@ mod where_and_datetime { ){{ id }} - }}"#, - outer_where, inner_where + }}"# ) ); insta::assert_snapshot!( - run_query!(&runner, format!(r#"query{{findUniqueNote(where:{{outerDateTime: "{}" }}){{outerString, outerDateTime}} }}"#, outer_where)), + run_query!(&runner, format!(r#"query{{findUniqueNote(where:{{outerDateTime: "{outer_where}" }}){{outerString, outerDateTime}} }}"#)), @r###"{"data":{"findUniqueNote":{"outerString":"Changed Outer String","outerDateTime":"2018-12-05T12:34:23.000Z"}}}"### ); insta::assert_snapshot!( - run_query!(&runner, format!(r#"query{{findUniqueTodo(where:{{innerDateTime: "{}" }}){{innerString, innerDateTime}} }}"#, inner_where)), + run_query!(&runner, format!(r#"query{{findUniqueTodo(where:{{innerDateTime: "{inner_where}" }}){{innerString, innerDateTime}} }}"#)), @r###"{"data":{"findUniqueTodo":{"innerString":"Changed Inner String","innerDateTime":"2019-12-05T12:34:23.000Z"}}}"### ); @@ -102,18 +100,17 @@ mod where_and_datetime { createOneNote( data: {{ outerString: "Outer String" - outerDateTime: "{}" + outerDateTime: "{outer_where}" todos: {{ create: [ - {{ innerString: "Inner String", innerDateTime: "{}" }} + {{ innerString: "Inner String", innerDateTime: "{inner_where}" }} ] }} }} ){{ id }} - }}"#, - outer_where, inner_where + }}"# ) ); @@ -122,12 +119,12 @@ mod where_and_datetime { format!( r#"mutation {{ updateOneNote( - where: {{ outerDateTime: "{}" }} + where: {{ outerDateTime: "{outer_where}" }} data: {{ outerString: {{ set: "Changed Outer String" }} todos: {{ update: [{{ - where: {{ innerDateTime: "{}" }}, + where: {{ innerDateTime: "{inner_where}" }}, data:{{ innerString: {{ set: "Changed Inner String" }} }} }}] }} @@ -135,18 +132,17 @@ mod where_and_datetime { ){{ id }} - }}"#, - outer_where, inner_where + }}"# ) ); insta::assert_snapshot!( - run_query!(&runner, format!(r#"query{{findUniqueNote(where:{{outerDateTime: "{}" }}){{outerString, outerDateTime}} }}"#, outer_where)), + run_query!(&runner, format!(r#"query{{findUniqueNote(where:{{outerDateTime: "{outer_where}" }}){{outerString, outerDateTime}} }}"#)), @r###"{"data":{"findUniqueNote":{"outerString":"Changed Outer String","outerDateTime":"2018-01-03T11:27:38.000Z"}}}"### ); insta::assert_snapshot!( - run_query!(&runner, format!(r#"query{{findUniqueTodo(where:{{innerDateTime: "{}" }}){{innerString, innerDateTime}} }}"#, inner_where)), + run_query!(&runner, format!(r#"query{{findUniqueTodo(where:{{innerDateTime: "{inner_where}" }}){{innerString, innerDateTime}} }}"#)), @r###"{"data":{"findUniqueTodo":{"innerString":"Changed Inner String","innerDateTime":"2018-01-03T11:27:38.000Z"}}}"### ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/datetime/where_and_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/datetime/where_and_update.rs index a47fbf55f117..8b8e86361f9f 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/datetime/where_and_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/datetime/where_and_update.rs @@ -37,7 +37,7 @@ mod where_and_update { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTest(data: {}) {{ unique }} }}", data)) + .query(format!("mutation {{ createOneTest(data: {data}) {{ unique }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/base.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/base.rs index 5e25dfc976ae..ad887c1689d2 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/base.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/base.rs @@ -34,7 +34,7 @@ mod basic_types { run_query!(&runner, format!(r#"mutation {{ createOneScalarModel(data: {{ id: 1, - strings: {{ set: ["test{}"] }} + strings: {{ set: ["test{TROUBLE_CHARS}"] }} ints: {{ set: [1337, 12] }} floats: {{ set: [1.234, 1.45] }} booleans: {{ set: [true, false] }} @@ -50,7 +50,7 @@ mod basic_types { dateTimes bytes }} - }}"#, TROUBLE_CHARS)), + }}"#)), @r###"{"data":{"createOneScalarModel":{"strings":["test¥฿😀😁😂😃😄😅😆😇😈😉😊😋😌😍😎😏😐😑😒😓😔😕😖😗😘😙😚😛😜😝😞😟😠😡😢😣😤😥😦😧😨😩😪😫😬😭😮😯😰😱😲😳😴😵😶😷😸😹😺😻😼😽😾😿🙀🙁🙂🙃🙄🙅🙆🙇🙈🙉🙊🙋🙌🙍🙎🙏ऀँंःऄअआइईउऊऋऌऍऎएऐऑऒओऔकखगघङचछजझञटठडढणतथदधनऩपफबभमयर€₭₮₯₰₱₲₳₴₵₶₷₸₹₺₻₼₽₾₿⃀"],"ints":[1337,12],"floats":[1.234,1.45],"booleans":[true,false],"enums":["A","A"],"dateTimes":["2016-07-31T23:59:01.000Z","2017-07-31T23:59:01.000Z"],"bytes":["dGVzdA==","dA=="]}}}"### ); @@ -65,7 +65,7 @@ mod basic_types { run_query!(&runner, format!(r#"mutation {{ createOneScalarModel(data: {{ id: 1, - strings: {{ set: ["test{}"] }} + strings: {{ set: ["test{TROUBLE_CHARS}"] }} ints: {{ set: [1337, 12] }} floats: {{ set: [1.234, 1.45] }} booleans: {{ set: [true, false] }} @@ -81,7 +81,7 @@ mod basic_types { dateTimes bytes }} - }}"#, TROUBLE_CHARS)), + }}"#)), @r###"{"data":{"createOneScalarModel":{"strings":["test¥฿😀😁😂😃😄😅😆😇😈😉😊😋😌😍😎😏😐😑😒😓😔😕😖😗😘😙😚😛😜😝😞😟😠😡😢😣😤😥😦😧😨😩😪😫😬😭😮😯😰😱😲😳😴😵😶😷😸😹😺😻😼😽😾😿🙀🙁🙂🙃🙄🙅🙆🙇🙈🙉🙊🙋🙌🙍🙎🙏ऀँंःऄअआइईउऊऋऌऍऎएऐऑऒओऔकखगघङचछजझञटठडढणतथदधनऩपफबभमयर€₭₮₯₰₱₲₳₴₵₶₷₸₹₺₻₼₽₾₿⃀"],"ints":[1337,12],"floats":[1.234,1.45],"booleans":[true,false],"enums":["A","A"],"dateTimes":["2016-07-31T23:59:01.000Z","2017-07-31T23:59:01.000Z"],"bytes":["dGVzdA==","dA=="]}}}"### ); @@ -164,7 +164,7 @@ mod basic_types { run_query!(&runner, format!(r#"mutation {{ createOneScalarModel(data: {{ id: 1 - strings: ["test{}"] + strings: ["test{TROUBLE_CHARS}"] ints: [1337, 12] floats: [1.234, 1.45] booleans: [true, false] @@ -180,7 +180,7 @@ mod basic_types { dateTimes bytes }} - }}"#, TROUBLE_CHARS)), + }}"#)), @r###"{"data":{"createOneScalarModel":{"strings":["test¥฿😀😁😂😃😄😅😆😇😈😉😊😋😌😍😎😏😐😑😒😓😔😕😖😗😘😙😚😛😜😝😞😟😠😡😢😣😤😥😦😧😨😩😪😫😬😭😮😯😰😱😲😳😴😵😶😷😸😹😺😻😼😽😾😿🙀🙁🙂🙃🙄🙅🙆🙇🙈🙉🙊🙋🙌🙍🙎🙏ऀँंःऄअआइईउऊऋऌऍऎएऐऑऒओऔकखगघङचछजझञटठडढणतथदधनऩपफबभमयर€₭₮₯₰₱₲₳₴₵₶₷₸₹₺₻₼₽₾₿⃀"],"ints":[1337,12],"floats":[1.234,1.45],"booleans":[true,false],"enums":["A","A"],"dateTimes":["2016-07-31T23:59:01.000Z","2017-07-31T23:59:01.000Z"],"bytes":["dGVzdA==","dA=="]}}}"### ); @@ -326,7 +326,7 @@ mod basic_types { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneScalarModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneScalarModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/decimal.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/decimal.rs index 4ae4ab0fe3a9..4b04b0de1180 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/decimal.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/decimal.rs @@ -138,7 +138,7 @@ mod decimal { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneScalarModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneScalarModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/json.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/json.rs index 34e109cdf8e9..bcf30139f35c 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/json.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/scalar_list/json.rs @@ -136,7 +136,7 @@ mod json { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneScalarModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneScalarModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/filters/delete_many_rel_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/filters/delete_many_rel_filter.rs index 9f2e9a184b46..155144d27051 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/filters/delete_many_rel_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/filters/delete_many_rel_filter.rs @@ -145,7 +145,7 @@ mod delete_many_rel_filter { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTop(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTop(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/filters/update_many_rel_filter.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/filters/update_many_rel_filter.rs index cc579718d504..d3c7e794f997 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/filters/update_many_rel_filter.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/filters/update_many_rel_filter.rs @@ -150,7 +150,7 @@ mod update_many_rel_filter { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTop(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTop(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/ids/byoid.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/ids/byoid.rs index 2f2b39027040..69fed7fc3c76 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/ids/byoid.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/ids/byoid.rs @@ -66,7 +66,7 @@ mod byoid { createOneParent(data: {p: "Parent2", id: "Own Id"}){p, id} }"#, 2002, - format!("Unique constraint failed on the {}", error_target) + format!("Unique constraint failed on the {error_target}") ); Ok(()) @@ -94,7 +94,7 @@ mod byoid { createOneParent(data: {p: "Parent2", id: "Own Id"}){p, id} }"#, 2002, - format!("Unique constraint failed on the {}", error_target) + format!("Unique constraint failed on the {error_target}") ); Ok(()) @@ -152,7 +152,7 @@ mod byoid { createOneParent(data: {p: "Parent 2", id: "Own Id 2", childOpt:{create:{c:"Child 2", id: "Own Child Id"}}}){p, id, childOpt { c, id} } }"#, 2002, - format!("Unique constraint failed on the {}", error_target) + format!("Unique constraint failed on the {error_target}") ); Ok(()) @@ -180,7 +180,7 @@ mod byoid { createOneParent(data: {p: "Parent 2", id: "Own Id 2", childOpt:{create:{c:"Child 2", id: "Own Child Id"}}}){p, id, childOpt { c, id} } }"#, 2002, - format!("Unique constraint failed on the {}", error_target) + format!("Unique constraint failed on the {error_target}") ); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_create.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_create.rs index 1dc38e5c78a7..0fdc90e8376b 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_create.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_create.rs @@ -35,13 +35,13 @@ mod connect_inside_create { run_query!(runner, format!(r#"mutation {{ createOneParent(data:{{ p: "p2", p_1:"p", p_2: "2", - childOpt: {{ connect: {child} }} + childOpt: {{ connect: {child_1} }} }}){{ childOpt {{ c }} }} - }}"#, child = child_1)), + }}"#)), @r###"{"data":{"createOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -71,13 +71,13 @@ mod connect_inside_create { run_query!(runner, format!(r#"mutation {{ createOneParent(data:{{ p: "p1", p_1:"p", p_2: "1", - childOpt: {{ connect: {connect} }} + childOpt: {{ connect: {child_1} }} }}){{ childOpt {{ c }} }} - }}"#, connect = child_1)), + }}"#)), @r###"{"data":{"createOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -108,13 +108,13 @@ mod connect_inside_create { run_query!(runner, format!(r#"mutation {{ createOneParent(data:{{ p: "p1", p_1:"p", p_2: "1", - childOpt: {{ connect: {connect} }} + childOpt: {{ connect: {child_1} }} }}){{ childOpt {{ c }} }} - }}"#, connect = child_1)), + }}"#)), @r###"{"data":{"createOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -154,8 +154,7 @@ mod connect_inside_create { c }} }} - }}"#, - child = child + }}"# ), 2025, "An operation failed because it depends on one or more records that were required but not found." @@ -199,7 +198,7 @@ mod connect_inside_create { c }} }} - }}"#, child = child)), + }}"#)), @r###"{"data":{"createOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); @@ -241,7 +240,7 @@ mod connect_inside_create { c }} }} - }}"#, child = child)), + }}"#)), @r###"{"data":{"createOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -284,7 +283,7 @@ mod connect_inside_create { c }} }} - }}"#, child = child)), + }}"#)), @r###"{"data":{"createOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -402,7 +401,7 @@ mod connect_inside_create { c }} }} - }}"#, child_id = child_id)), + }}"#)), @r###"{"data":{"createOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); @@ -443,8 +442,7 @@ mod connect_inside_create { c }} }} - }}"#, - child = child + }}"# ), 2018, "The required connected records were not found. Expected 2 records to be connected after connect operation on one-to-many relation 'ChildToParent', found 1." @@ -486,8 +484,7 @@ mod connect_inside_create { c }} }} - }}"#, - child = child + }}"# ), 2018, "The required connected records were not found. Expected 1 records to be connected after connect operation on one-to-many relation 'ChildToParent', found 0." @@ -791,8 +788,7 @@ mod connect_inside_create { c }} }} - }}"#, - child = child + }}"# ), 2025, "An operation failed because it depends on one or more records that were required but not found. Expected 1 records to be connected, found only 0." diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_update.rs index 61bb59667621..563c0ee3bf9b 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_update.rs @@ -91,7 +91,7 @@ mod connect_inside_update { c }} }} - }}"#, parent_3 = parent_3, child_3 = child_3)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"c":"c3"}}}}"### ); @@ -102,7 +102,7 @@ mod connect_inside_update { c }} }} - }}"#, other_parent_with_child = other_parent_with_child)), + }}"#)), @r###"{"data":{"findUniqueParent":{"childOpt":{"c":"otherChild"}}}}"### ); @@ -111,7 +111,7 @@ mod connect_inside_update { findUniqueChild(where: {loose_child}){{ c }} - }}"#, loose_child = loose_child)), + }}"#)), @r###"{"data":{"findUniqueChild":{"c":"looseChild"}}}"### ); @@ -158,7 +158,7 @@ mod connect_inside_update { c }} }} - }}"#, parent_1 = parent_1, child_1 = child_1)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -210,7 +210,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -264,7 +264,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent, child_id = child_id)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -318,7 +318,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); @@ -329,7 +329,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); @@ -414,7 +414,7 @@ mod connect_inside_update { c }} }} - }}"#, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c2"},{"c":"c3"}]}}}"### ); @@ -428,8 +428,7 @@ mod connect_inside_update { c }} }} - }}"#, - other_parent_with_child = other_parent_with_child + }}"# ), &["data", "findUniqueParent", "childrenOpt", "[0]", "c"] ) @@ -496,7 +495,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent, child = child), + }}"#), 2014, "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." ); @@ -538,7 +537,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"c":"c2"}}}}"### ); @@ -579,7 +578,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"c":"c2"}}}}"### ); @@ -638,7 +637,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -690,7 +689,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"}]}}}"### ); @@ -745,7 +744,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); @@ -807,7 +806,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childReq":{"c":"c1"}}}}"### ); @@ -867,7 +866,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childReq":{"c":"c1"}}}}"### ); @@ -929,7 +928,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -984,7 +983,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -1051,7 +1050,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent, children = children)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"},{"c":"c3"},{"c":"c4"}]}}}"### ); @@ -1106,7 +1105,7 @@ mod connect_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_create_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_create_inside_update.rs index 86c71d152b33..a1e8b43c83af 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_create_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_create_inside_update.rs @@ -48,7 +48,7 @@ mod create_inside_update { c }} }} - }}"#, parent_id = parent_id)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"c":"SomeC"}}}}"### ); @@ -85,7 +85,7 @@ mod create_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"c":"SomeC"}}}}"### ); @@ -129,7 +129,7 @@ mod create_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"}]}}}"### ); @@ -174,7 +174,7 @@ mod create_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2014, "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." ); @@ -218,7 +218,7 @@ mod create_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -262,7 +262,7 @@ mod create_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"},{"c":"c3"}]}}}"### ); @@ -314,7 +314,7 @@ mod create_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childReq":{"c":"c2"}}}}"### ); @@ -366,7 +366,7 @@ mod create_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"c":"c2"}}}}"### ); @@ -415,7 +415,7 @@ mod create_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"},{"c":"c3"}]}}}"### ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_update.rs index 9ca170c073c7..8e74d354f4ef 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_update.rs @@ -54,7 +54,7 @@ mod delete_inside_update { insta::assert_snapshot!( run_query!(runner, format!(r#"mutation {{ updateOneParent( - where: {parent} + where: {parent_2} data:{{ childOpt: {{delete: true}} }}){{ @@ -62,7 +62,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent_2)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":null}}}"### ); @@ -75,7 +75,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"findUniqueParent":{"childOpt":{"c":"existingChild"}}}}"### ); @@ -128,7 +128,7 @@ mod delete_inside_update { insta::assert_snapshot!( run_query!(runner, format!(r#"mutation {{ updateOneParent( - where: {parent} + where: {parent_2} data:{{ childOpt: {{ delete: {{ non_unique: "0" }} }} }}){{ @@ -136,7 +136,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent_2)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":null}}}"### ); @@ -148,7 +148,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"findUniqueParent":{"childOpt":{"c":"existingChild"}}}}"### ); @@ -185,7 +185,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested delete on relation 'ChildToParent'." ); @@ -227,7 +227,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested delete on relation 'ChildToParent'." ); @@ -292,7 +292,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c2"}]}}}"### ); @@ -353,7 +353,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c2"}]}}}"### ); @@ -461,7 +461,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":null}}}"### ); @@ -502,7 +502,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":null}}}"### ); @@ -543,7 +543,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested delete on relation 'ChildToParent'." ); @@ -603,7 +603,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c2"}]}}}"### ); @@ -659,7 +659,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c2"}]}}}"### ); @@ -768,7 +768,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2009, "`Mutation.updateOneParent.data.ParentUpdateInput.childReq.ChildUpdateOneRequiredWithoutParentsOptNestedInput.delete`: Field does not exist on enclosing type." ); @@ -813,7 +813,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":null}}}"### ); @@ -859,7 +859,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":null}}}"### ); @@ -901,7 +901,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested delete on relation 'ChildToParent'." ); @@ -943,7 +943,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested delete on relation 'ChildToParent'." ); @@ -1020,10 +1020,7 @@ mod delete_inside_update { c }} }} - }}"#, - parent = parent, - child_1 = child_1, - child_2 = child_2 + }}"# ), 2017, "The records for relation `ChildToParent` between the `Parent` and `Child` models are not connected." @@ -1040,7 +1037,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent, child_2 = child_2)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c3"},{"c":"c4"}]}}}"### ); @@ -1177,7 +1174,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent, child_2 = child_2)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c3"},{"c":"c4"}]}}}"### ); @@ -1259,10 +1256,7 @@ mod delete_inside_update { c }} }} - }}"#, - parent = parent, - child_1 = child_1, - child_2 = child_2 + }}"# ), 2017, "The records for relation `ChildToParent` between the `Parent` and `Child` models are not connected" @@ -1342,7 +1336,7 @@ mod delete_inside_update { c }} }} - }}"#, parent = parent, child_2 = child_2, child_3 = child_3)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c4"}]}}}"### ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_upsert.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_upsert.rs index 6754b52e40c5..563a1eec5964 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_upsert.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_upsert.rs @@ -52,7 +52,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childOpt":null}}}"### ); Ok(()) @@ -98,7 +98,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childOpt":null}}}"### ); Ok(()) @@ -137,7 +137,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested delete on relation 'ChildToParent'." ); @@ -187,7 +187,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested delete on relation 'ChildToParent'." ); @@ -238,7 +238,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childrenOpt":[]}}}"### ); Ok(()) @@ -283,7 +283,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childrenOpt":[]}}}"### ); Ok(()) @@ -323,8 +323,7 @@ mod delete_inside_upsert { c }} }} - }}"#, - parent = parent + }}"# ), 2017, "The records for relation `ChildToParent` between the `Parent` and `Child` models are not connected." @@ -375,8 +374,7 @@ mod delete_inside_upsert { c }} }} - }}"#, - parent = parent + }}"# ), 2017, "The records for relation `ChildToParent` between the `Parent` and `Child` models are not connected." @@ -428,7 +426,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childOpt":null}}}"### ); @@ -474,7 +472,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childOpt":null}}}"### ); @@ -514,7 +512,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested delete on relation 'ChildToParent'." ); @@ -564,7 +562,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested delete on relation 'ChildToParent'." ); @@ -615,7 +613,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); @@ -661,7 +659,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); @@ -702,8 +700,7 @@ mod delete_inside_upsert { c }} }} - }}"#, - parent = parent + }}"# ), 2017, "The records for relation `ChildToParent` between the `Parent` and `Child` models are not connected." @@ -754,8 +751,7 @@ mod delete_inside_upsert { c }} }} - }}"#, - parent = parent + }}"# ), 2017, "The records for relation `ChildToParent` between the `Parent` and `Child` models are not connected." @@ -812,7 +808,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent), + }}"#), 2009, "`Mutation.upsertOneParent.update.ParentUpdateInput.childReq.ChildUpdateOneRequiredWithoutParentsOptNestedInput.delete`: Field does not exist on enclosing type." ); @@ -865,7 +861,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent), + }}"#), 2009, "`Mutation.upsertOneParent.update.ParentUpdateInput.childReq.ChildUpdateOneRequiredWithoutParentsOptNestedInput.delete`: Field does not exist on enclosing type." ); @@ -920,7 +916,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childOpt":null}}}"### ); @@ -976,7 +972,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childOpt":null}}}"### ); @@ -1031,7 +1027,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childrenOpt":[]}}}"### ); @@ -1082,7 +1078,7 @@ mod delete_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childrenOpt":[]}}}"### ); @@ -1128,8 +1124,7 @@ mod delete_inside_upsert { c }} }} - }}"#, - parent = parent + }}"# ), 2017, "The records for relation `ChildToParent` between the `Parent` and `Child` models are not connected." @@ -1180,8 +1175,7 @@ mod delete_inside_upsert { c }} }} - }}"#, - parent = parent + }}"# ), 2017, "The records for relation `ChildToParent` between the `Parent` and `Child` models are not connected." diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_many_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_many_inside_update.rs index 77bd2a5f6346..2d9286519a18 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_many_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_many_inside_update.rs @@ -36,7 +36,7 @@ mod delete_many_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2009, "`Mutation.updateOneParent.data.ParentUpdateInput.childOpt.ChildUpdateOneWithoutParentOptNestedInput.deleteMany`: Field does not exist on enclosing type." ); @@ -64,8 +64,7 @@ mod delete_many_inside_update { }} }} - }}"#, - parent_1 = parent_1 + }}"# ) ); @@ -87,7 +86,7 @@ mod delete_many_inside_update { format!( r#"mutation {{ updateOneParent( - where: {parent} + where: {parent_1} data:{{ childrenOpt: {{deleteMany: {{ c: {{ contains:"c" }} @@ -99,8 +98,7 @@ mod delete_many_inside_update { }} }} - }}"#, - parent = parent_1 + }}"# ) ); @@ -122,7 +120,7 @@ mod delete_many_inside_update { format!( r#"mutation {{ updateOneParent( - where: {parent} + where: {parent_1} data:{{ childrenOpt: {{deleteMany: [ {{ c: {{ contains:"1" }} }}, @@ -133,8 +131,7 @@ mod delete_many_inside_update { c }} }} - }}"#, - parent = parent_1 + }}"# ) ); @@ -156,7 +153,7 @@ mod delete_many_inside_update { format!( r#"mutation {{ updateOneParent( - where: {parent} + where: {parent_1} data:{{ childrenOpt: {{ deleteMany: [{{}}] }} }}){{ @@ -164,8 +161,7 @@ mod delete_many_inside_update { c }} }} - }}"#, - parent = parent_1 + }}"# ) ); @@ -187,7 +183,7 @@ mod delete_many_inside_update { format!( r#"mutation {{ updateOneParent( - where: {parent} + where: {parent_1} data:{{ childrenOpt: {{deleteMany: [ {{ c: {{ contains:"3" }} }}, @@ -198,8 +194,7 @@ mod delete_many_inside_update { c }} }} - }}"#, - parent = parent_1 + }}"# ) ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_update.rs index 12eb2ef59bce..0c04ffb67955 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_update.rs @@ -44,7 +44,7 @@ mod disconnect_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":null}}}"### ); @@ -91,7 +91,7 @@ mod disconnect_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":null}}}"### ); @@ -139,7 +139,7 @@ mod disconnect_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -192,7 +192,7 @@ mod disconnect_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":null}}}"### ); @@ -287,7 +287,7 @@ mod disconnect_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2014, "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." ); @@ -330,13 +330,13 @@ mod disconnect_inside_update { updateOneParent( where: {parent} data:{{ - childrenOpt: {{disconnect: [{child}]}} + childrenOpt: {{disconnect: [{second_child}]}} }}){{ childrenOpt {{ c }} }} - }}"#, parent = parent, child = second_child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); Ok(()) @@ -382,7 +382,7 @@ mod disconnect_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); @@ -398,7 +398,7 @@ mod disconnect_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); @@ -447,7 +447,7 @@ mod disconnect_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":null}}}"### ); @@ -531,7 +531,7 @@ mod disconnect_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"}]}}}"### ); @@ -548,10 +548,7 @@ mod disconnect_inside_update { c }} }} - }}"#, - parent = parent, - first_child = first_child, - other_child = other_child + }}"# ) ); @@ -636,16 +633,13 @@ mod disconnect_inside_update { updateOneParent( where: {parent} data:{{ - childrenOpt: {{disconnect: [{child_1}, {child_2}]}} + childrenOpt: {{disconnect: [{child_1}, {other_child}]}} }}){{ childrenOpt{{ c }} }} - }}"#, - parent = parent, - child_1 = child_1, - child_2 = other_child + }}"# ) ); @@ -707,13 +701,13 @@ mod disconnect_inside_update { updateOneParent( where: {parent} data:{{ - childrenOpt: {{disconnect: [{child}]}} + childrenOpt: {{disconnect: [{child_1}]}} }}){{ childrenOpt{{ c }} }} - }}"#, parent = parent, child = child_1)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c2"}]}}}"### ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_upsert.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_upsert.rs index 2f6e8f582bcf..064e4833b7b4 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_upsert.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_upsert.rs @@ -50,7 +50,7 @@ mod disconnect_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childOpt":null}}}"### ); @@ -103,7 +103,7 @@ mod disconnect_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childOpt":null}}}"### ); @@ -157,7 +157,7 @@ mod disconnect_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -212,7 +212,7 @@ mod disconnect_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childOpt":null}}}"### ); Ok(()) @@ -258,7 +258,7 @@ mod disconnect_inside_upsert { c }} }} - }}"#, parent = parent), + }}"#), 2014, "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." ); @@ -305,7 +305,7 @@ mod disconnect_inside_upsert { c }} }} - }}"#, parent = parent), + }}"#), 2014, "The change you are trying to make would violate the required relation 'ChildToParent' between the `Child` and `Parent` models." ); @@ -351,7 +351,7 @@ mod disconnect_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); @@ -401,7 +401,7 @@ mod disconnect_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childOpt":null}}}"### ); @@ -452,7 +452,7 @@ mod disconnect_inside_upsert { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childrenOpt":[{"c":"c2"}]}}}"### ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_set_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_set_inside_update.rs index 84834295fd5c..153681922274 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_set_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_set_inside_update.rs @@ -54,7 +54,7 @@ mod set_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"}]}}}"### ); @@ -110,7 +110,7 @@ mod set_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"}]}}}"### ); @@ -165,7 +165,7 @@ mod set_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[]}}}"### ); @@ -215,7 +215,7 @@ mod set_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); @@ -285,7 +285,7 @@ mod set_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"}]}}}"### ); @@ -342,7 +342,7 @@ mod set_inside_update { c }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"}]}}}"### ); @@ -413,7 +413,7 @@ mod set_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[]}}}"### ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_update_many_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_update_many_inside_update.rs index c4e56120487b..331449fbceae 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_update_many_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_update_many_inside_update.rs @@ -72,8 +72,7 @@ mod um_inside_update { non_unique }} }} - }}"#, - parent = parent + }}"# ) ); @@ -107,8 +106,7 @@ mod um_inside_update { non_unique }} }} - }}"#, - parent = parent + }}"# ) ); @@ -142,8 +140,7 @@ mod um_inside_update { non_unique }} }} - }}"#, - parent = parent + }}"# ) ); @@ -183,8 +180,7 @@ mod um_inside_update { non_unique }} }} - }}"#, - parent = parent + }}"# ) ); @@ -220,8 +216,7 @@ mod um_inside_update { non_unique }} }} - }}"#, - parent = parent + }}"# ) ); @@ -261,8 +256,7 @@ mod um_inside_update { non_unique }} }} - }}"#, - parent = parent + }}"# ) ); @@ -304,8 +298,7 @@ mod um_inside_update { non_unique }} }} - }}"#, - parent = parent + }}"# ) ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_upsert_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_upsert_inside_update.rs index fb7213229154..b71142d71b6d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_upsert_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_upsert_inside_update.rs @@ -46,7 +46,7 @@ mod upsert_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"updated C"}]}}}"### ); @@ -94,7 +94,7 @@ mod upsert_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"},{"c":"new C"}]}}}"### ); @@ -142,7 +142,7 @@ mod upsert_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c2"},{"c":"updated C"}]}}}"### ); @@ -192,7 +192,7 @@ mod upsert_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"},{"c":"updated C"}]}}}"### ); @@ -245,7 +245,7 @@ mod upsert_inside_update { c }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"},{"c":"updated C"}]}}}"### ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/nested_atomic_number_ops.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/nested_atomic_number_ops.rs index 5910a1c14979..c325fccb6d64 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/nested_atomic_number_ops.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/nested_atomic_number_ops.rs @@ -479,18 +479,15 @@ mod atomic_number_ops { rel: {{ create: {{ id: {id} - optInt: {int} - optFloat: {float} + optInt: {i} + optFloat: {f} }} }} }} ) {{ id }} - }}"#, - id = id, - int = i, - float = f + }}"# ) ); @@ -516,11 +513,7 @@ mod atomic_number_ops { {field} }} }} - }}"#, - id = id, - field = field, - op = op, - value = value + }}"# ), &["data", "updateOneTestModel", "rel"] ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_connect_inside_upsert.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_connect_inside_upsert.rs index 94cc607d9219..23aecbe1ab2b 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_connect_inside_upsert.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_connect_inside_upsert.rs @@ -38,7 +38,7 @@ mod connect_inside_upsert { run_query!(&runner, format!(r#"mutation{{upsertOneParent(where: {{id: "5beea4aa6183dd734b2dbd9b"}}, create: {{p: "p1", childOpt:{{connect:{{id:{child_id}}}}}}}, update: {{p: {{ set: "p-new" }}}}) {{ childOpt{{ c }} }} - }}"#, child_id = child_id)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childOpt":{"c":"c1"}}}}"### ); @@ -60,10 +60,10 @@ mod connect_inside_upsert { ); insta::assert_snapshot!( - run_query!(&runner, format!(r#"mutation{{upsertOneParent(where: {{id: {parent}}}, create: {{p: "p new"}}, update: {{p: {{ set: "p updated" }},childOpt:{{connect:{{id: {child}}}}}}}) {{ + run_query!(&runner, format!(r#"mutation{{upsertOneParent(where: {{id: {parent_id}}}, create: {{p: "p new"}}, update: {{p: {{ set: "p updated" }},childOpt:{{connect:{{id: {child_id}}}}}}}) {{ childOpt{{c}} }} - }}"#, parent = parent_id, child = child_id)), + }}"#)), @r###"{"data":{"upsertOneParent":{"childOpt":{"c":"c1"}}}}"### ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs index f62ab073f934..966aeb6d0df9 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs @@ -118,9 +118,7 @@ mod nested_create_many { // "Nested createMany" should "allow creating a large number of records (horizontal partitioning check)" #[connector_test(exclude(Sqlite))] async fn allow_create_large_number_records(runner: Runner) -> TestResult<()> { - let records: Vec<_> = (1..=1000) - .map(|i| format!(r#"{{ id: {}, str1: "{}" }}"#, i, i)) - .collect(); + let records: Vec<_> = (1..=1000).map(|i| format!(r#"{{ id: {i}, str1: "{i}" }}"#)).collect(); run_query!( runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_update_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_update_inside_update.rs index 43e654c44708..b8990df38191 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_update_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_update_inside_update.rs @@ -51,7 +51,7 @@ mod update_inside_update { non_unique }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"non_unique":"updated"}}}}"### ); @@ -99,7 +99,7 @@ mod update_inside_update { non_unique }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childOpt":{"non_unique":"updated"}}}}"### ); @@ -136,7 +136,7 @@ mod update_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested update on relation 'ChildToParent'." ); @@ -178,7 +178,7 @@ mod update_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested update on relation 'ChildToParent'." ); @@ -234,7 +234,7 @@ mod update_inside_update { non_unique }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"non_unique":"updated"},{"non_unique":null}]}}}"### ); @@ -282,7 +282,7 @@ mod update_inside_update { non_unique }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"non_unique":"updated"},{"non_unique":"1"}]}}}"### ); @@ -324,7 +324,7 @@ mod update_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested update on relation 'ChildToParent'." ); @@ -371,7 +371,7 @@ mod update_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested update on relation 'ChildToParent'." ); @@ -426,7 +426,7 @@ mod update_inside_update { non_unique }} }} - }}"#, parent = parent, child = child)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"non_unique":"updated"},{"non_unique":null}]}}}"### ); @@ -473,7 +473,7 @@ mod update_inside_update { non_unique }} }} - }}"#, parent = parent)), + }}"#)), @r###"{"data":{"updateOneParent":{"childrenOpt":[{"non_unique":"updated"},{"non_unique":"1"}]}}}"### ); @@ -515,7 +515,7 @@ mod update_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested update on relation 'ChildToParent'." ); @@ -562,7 +562,7 @@ mod update_inside_update { c }} }} - }}"#, parent = parent), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Child' record was found for a nested update on relation 'ChildToParent'." ); @@ -631,19 +631,19 @@ mod update_inside_update { ){{ text }} - }}"#, note_id = note_id), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Todo' record was found for a nested update on relation 'NoteToTodo'." // No Node for the model Todo with value DOES NOT EXIST for id found. ); insta::assert_snapshot!( - run_query!(&runner, format!(r#"query{{findUniqueNote(where:{{id: {note_id}}}){{text}}}}"#, note_id = note_id)), + run_query!(&runner, format!(r#"query{{findUniqueNote(where:{{id: {note_id}}}){{text}}}}"#)), @r###"{"data":{"findUniqueNote":{"text":"Some Text"}}}"### ); insta::assert_snapshot!( - run_query!(&runner, format!(r#"query{{findUniqueTodo(where:{{id: {todo_id}}}){{title}}}}"#, todo_id = todo_id)), + run_query!(&runner, format!(r#"query{{findUniqueTodo(where:{{id: {todo_id}}}){{title}}}}"#)), @r###"{"data":{"findUniqueTodo":{"title":"the title"}}}"### ); @@ -690,7 +690,7 @@ mod update_inside_update { ){{ text }} - }}"#, note_id = note_id), + }}"#), 2025, "An operation failed because it depends on one or more records that were required but not found. No 'Todo' record was found for a nested update on relation 'NoteToTodo'." ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/rel_defaults.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/rel_defaults.rs index 1b7ee72cfb71..775f0d46f70b 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/rel_defaults.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/rel_defaults.rs @@ -215,7 +215,7 @@ mod rel_defaults { } async fn count_items(runner: &Runner, name: &str) -> TestResult { - let res = run_query_json!(runner, format!(r#"query {{ {} {{ id }} }}"#, name)); + let res = run_query_json!(runner, format!(r#"query {{ {name} {{ id }} }}"#)); let data = &res["data"][name]; match data { @@ -226,7 +226,7 @@ mod rel_defaults { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneList(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneList(data: {data}) {{ id }} }}")) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/rel_design.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/rel_design.rs index f1496114db47..11c83c5c2771 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/rel_design.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/rel_design.rs @@ -76,7 +76,7 @@ mod rel_design { } async fn count_items(runner: &Runner, name: &str) -> TestResult { - let res = run_query_json!(runner, format!(r#"query {{ {} {{ id }} }}"#, name)); + let res = run_query_json!(runner, format!(r#"query {{ {name} {{ id }} }}"#)); let data = &res["data"][name]; match data { @@ -87,7 +87,7 @@ mod rel_design { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneList(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneList(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/rel_graphql.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/rel_graphql.rs index acd13599599c..5979c1e45f9d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/rel_graphql.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/rel_graphql.rs @@ -87,15 +87,13 @@ mod rel_graphql { match model_name { "Cat" => runner .query(format!( - "mutation {{ createOneCat(data: {{ catName: \"{}\" }}) {{ id }} }}", - name + "mutation {{ createOneCat(data: {{ catName: \"{name}\" }}) {{ id }} }}" )) .await? .assert_success(), "Owner" => runner .query(format!( - "mutation {{ createOneOwner(data: {{ ownerName: \"{}\" }}) {{ id }} }}", - name + "mutation {{ createOneOwner(data: {{ ownerName: \"{name}\" }}) {{ id }} }}" )) .await? .assert_success(), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/same_model_self_rel_without_back_rel.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/same_model_self_rel_without_back_rel.rs index cebf7ea12288..314bba37f2ed 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/same_model_self_rel_without_back_rel.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/relations/same_model_self_rel_without_back_rel.rs @@ -110,7 +110,7 @@ mod self_rel_no_back_rel { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOnePost(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOnePost(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs index 7e76c929d652..81b642719afa 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create.rs @@ -46,7 +46,7 @@ mod create { r#"mutation {{ createOneScalarModel(data: {{ id: "1", - optString: "lala{}", + optString: "lala{TROUBLE_CHARS}", optInt: 1337, optFloat: 1.234, optBoolean: true, @@ -55,8 +55,7 @@ mod create { }}) {{ id, optString, optInt, optFloat, optBoolean, optEnum, optDateTime }} - }}"#, - TROUBLE_CHARS + }}"# )) }, 5 diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs index dedacef37ee5..17aabae8b9db 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs @@ -198,7 +198,7 @@ mod create_many { let mut records: Vec = vec![]; for i in 1..=1000 { - records.push(format!("{{ id: {} }}", i)); + records.push(format!("{{ id: {i} }}")); } insta::assert_snapshot!( @@ -236,7 +236,7 @@ mod create_many { let mut records: Vec = vec![]; for i in 1..=2000 { - records.push(format!("{{ id: {}, a: {}, b: {}, c: {} }}", i, i, i, i)); + records.push(format!("{{ id: {i}, a: {i}, b: {i}, c: {i} }}")); } insta::assert_snapshot!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete.rs index 2939ff533794..030beced7d91 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete.rs @@ -134,7 +134,7 @@ mod delete { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneScalarModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneScalarModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many.rs index 2065714b6473..4011011c895f 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many.rs @@ -248,7 +248,7 @@ mod delete_many { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTodo(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTodo(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/insert_null_in_required_field.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/insert_null_in_required_field.rs index 43edac1e4797..f8c9954424be 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/insert_null_in_required_field.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/insert_null_in_required_field.rs @@ -150,7 +150,7 @@ mod insert_null { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneA(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneA(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/non_embedded_upsert.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/non_embedded_upsert.rs index 45865ae1ef07..39dd6aa4e787 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/non_embedded_upsert.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/non_embedded_upsert.rs @@ -270,7 +270,7 @@ mod non_embedded_upsert { } async fn count_items(runner: &Runner, name: &str) -> TestResult { - let res = run_query_json!(runner, format!("query {{ {} {{ id }} }}", name)); + let res = run_query_json!(runner, format!("query {{ {name} {{ id }} }}")); let count = &res["data"][name]; match count { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs index 16c3dd60b5f0..b33fa940be95 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update.rs @@ -161,7 +161,7 @@ mod update { updateOneTestModel( where: {{ id: 1 }} data: {{ - optString: {{ set: "test{}" }} + optString: {{ set: "test{TROUBLE_CHARS}" }} optInt: {{ set: 1337 }} optFloat: {{ set: 1.234 }} optBoolean: {{ set: true }} @@ -174,7 +174,7 @@ mod update { optBoolean optDateTime }} - }}"#, TROUBLE_CHARS)), + }}"#)), @r###"{"data":{"updateOneTestModel":{"optString":"test¥฿😀😁😂😃😄😅😆😇😈😉😊😋😌😍😎😏😐😑😒😓😔😕😖😗😘😙😚😛😜😝😞😟😠😡😢😣😤😥😦😧😨😩😪😫😬😭😮😯😰😱😲😳😴😵😶😷😸😹😺😻😼😽😾😿🙀🙁🙂🙃🙄🙅🙆🙇🙈🙉🙊🙋🙌🙍🙎🙏ऀँंःऄअआइईउऊऋऌऍऎएऐऑऒओऔकखगघङचछजझञटठडढणतथदधनऩपफबभमयर€₭₮₯₰₱₲₳₴₵₶₷₸₹₺₻₼₽₾₿⃀","optInt":1337,"optFloat":1.234,"optBoolean":true,"optDateTime":"2016-07-31T23:59:01.000Z"}}}"### ); @@ -196,7 +196,7 @@ mod update { updateOneTestModel( where: {{ id: 1 }} data: {{ - optString: "test{}", + optString: "test{TROUBLE_CHARS}", optInt: 1337, optFloat: 1.234, optBoolean: true, @@ -209,7 +209,7 @@ mod update { optBoolean optDateTime }} - }}"#, TROUBLE_CHARS)), + }}"#)), @r###"{"data":{"updateOneTestModel":{"optString":"test¥฿😀😁😂😃😄😅😆😇😈😉😊😋😌😍😎😏😐😑😒😓😔😕😖😗😘😙😚😛😜😝😞😟😠😡😢😣😤😥😦😧😨😩😪😫😬😭😮😯😰😱😲😳😴😵😶😷😸😹😺😻😼😽😾😿🙀🙁🙂🙃🙄🙅🙆🙇🙈🙉🙊🙋🙌🙍🙎🙏ऀँंःऄअआइईउऊऋऌऍऎएऐऑऒओऔकखगघङचछजझञटठडढणतथदधनऩपफबभमयर€₭₮₯₰₱₲₳₴₵₶₷₸₹₺₻₼₽₾₿⃀","optInt":1337,"optFloat":1.234,"optBoolean":true,"optDateTime":"2016-07-31T23:59:01.000Z"}}}"### ); @@ -642,13 +642,12 @@ mod update { format!( r#"mutation {{ updateOneTestModel( - where: {{ id: {} }} - data: {{ {}: {{ {}: {} }} }} + where: {{ id: {id} }} + data: {{ {field}: {{ {op}: {value} }} }} ){{ - {} + {field} }} - }}"#, - id, field, op, value, field + }}"# ) ); @@ -678,7 +677,7 @@ mod update { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs index af48a96c7cbb..2d77728e795e 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/update_many.rs @@ -286,12 +286,11 @@ mod update_many { r#"mutation {{ updateManyTestModel( where: {{}} - data: {{ {}: {{ {}: {} }} }} + data: {{ {field}: {{ {op}: {value} }} }} ){{ count }} - }}"#, - field, op, value + }}"# ) ); @@ -302,13 +301,13 @@ mod update_many { assert_eq!(count, 3); } - let res = run_query!(runner, format!(r#"{{ findManyTestModel {{ {} }} }}"#, field)); + let res = run_query!(runner, format!(r#"{{ findManyTestModel {{ {field} }} }}"#)); Ok(res) } async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/upsert.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/upsert.rs index 5a675df03630..3f931352fa46 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/upsert.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/upsert.rs @@ -717,14 +717,13 @@ mod upsert { format!( r#"mutation {{ upsertOneTestModel( - where: {{ id: {} }} - create: {{ id: {} }} - update: {{ {}: {{ {}: {} }} }} + where: {{ id: {id} }} + create: {{ id: {id} }} + update: {{ {field}: {{ {op}: {value} }} }} ){{ - {} + {field} }} - }}"#, - id, id, field, op, value, field + }}"# ) ); @@ -733,7 +732,7 @@ mod upsert { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneTestModel(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneTestModel(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/uniques_and_node_selectors/multi_field_uniq_mutation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/uniques_and_node_selectors/multi_field_uniq_mutation.rs index 050e3c4040a8..9a052dcadebf 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/uniques_and_node_selectors/multi_field_uniq_mutation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/uniques_and_node_selectors/multi_field_uniq_mutation.rs @@ -515,7 +515,7 @@ mod multi_field_uniq_mut { async fn create_user(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneUser(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneUser(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) @@ -523,7 +523,7 @@ mod multi_field_uniq_mut { async fn create_blog(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneBlog(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneBlog(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/uniques_and_node_selectors/non_embedded_setting_node_selector_to_null.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/uniques_and_node_selectors/non_embedded_setting_node_selector_to_null.rs index 958a38b1a267..333c25547ff4 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/uniques_and_node_selectors/non_embedded_setting_node_selector_to_null.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/uniques_and_node_selectors/non_embedded_setting_node_selector_to_null.rs @@ -73,7 +73,7 @@ mod non_embedded_node_sel_to_null { async fn create_row(runner: &Runner, data: &str) -> TestResult<()> { runner - .query(format!("mutation {{ createOneA(data: {}) {{ id }} }}", data)) + .query(format!("mutation {{ createOneA(data: {data}) {{ id }} }}")) .await? .assert_success(); Ok(()) diff --git a/query-engine/connector-test-kit-rs/query-test-macros/src/args/connector_test.rs b/query-engine/connector-test-kit-rs/query-test-macros/src/args/connector_test.rs index 3a6ff369ec18..50d475193678 100644 --- a/query-engine/connector-test-kit-rs/query-test-macros/src/args/connector_test.rs +++ b/query-engine/connector-test-kit-rs/query-test-macros/src/args/connector_test.rs @@ -70,7 +70,7 @@ impl darling::FromMeta for RelationMode { match value.to_lowercase().as_str() { "prisma" => Ok(Self::Prisma), "foreignkeys" => Ok(Self::ForeignKeys), - _ => Err(darling::Error::custom(format!("Invalid value: {}", value))), + _ => Err(darling::Error::custom(format!("Invalid value: {value}"))), } } } @@ -181,7 +181,7 @@ impl darling::FromMeta for ExcludeFeatures { } fn strings_to_list(name: &str, items: &[syn::NestedMeta]) -> Result, darling::Error> { - let error = format!("{} can only be string literals.", name); + let error = format!("{name} can only be string literals."); items .iter() .map(|i| match i { diff --git a/query-engine/connector-test-kit-rs/query-test-macros/src/connector_test.rs b/query-engine/connector-test-kit-rs/query-test-macros/src/connector_test.rs index 233fa445fbc5..66aa11aa3237 100644 --- a/query-engine/connector-test-kit-rs/query-test-macros/src/connector_test.rs +++ b/query-engine/connector-test-kit-rs/query-test-macros/src/connector_test.rs @@ -47,14 +47,14 @@ pub fn connector_test_impl(attr: TokenStream, input: TokenStream) -> TokenStream let test_fn_ident = test_function.sig.ident.clone(); // Rename original test function to run_. - let runner_fn_ident = Ident::new(&format!("run_{}", test_fn_ident), Span::call_site()); + let runner_fn_ident = Ident::new(&format!("run_{test_fn_ident}"), Span::call_site()); test_function.sig.ident = runner_fn_ident.clone(); // The test database name is the name used as the database for data source rendering. // Combination of test name and test mod name. let test_name = test_fn_ident.to_string(); let suite_name = args.suite.expect("A test must have a test suite."); - let test_database_name = format!("{}_{}", suite_name, test_name); + let test_database_name = format!("{suite_name}_{test_name}"); let capabilities = args.capabilities.idents; let referential_override = match args.relation_mode.or(args.referential_integrity) { diff --git a/query-engine/connector-test-kit-rs/query-test-macros/src/lib.rs b/query-engine/connector-test-kit-rs/query-test-macros/src/lib.rs index 34b1a04dd932..8a2bd4df5eef 100644 --- a/query-engine/connector-test-kit-rs/query-test-macros/src/lib.rs +++ b/query-engine/connector-test-kit-rs/query-test-macros/src/lib.rs @@ -37,9 +37,9 @@ trait IntoDarlingError { impl IntoDarlingError for std::result::Result { fn into_darling_error(self, span: &Span) -> std::result::Result { self.map_err(|err| match err { - TestError::ParseError(msg) => darling::Error::custom(format!("Parsing error: {}.", msg)).with_span(span), + TestError::ParseError(msg) => darling::Error::custom(format!("Parsing error: {msg}.")).with_span(span), TestError::ConfigError(msg) => { - darling::Error::custom(format!("Configuration error: {}.", msg)).with_span(span) + darling::Error::custom(format!("Configuration error: {msg}.")).with_span(span) } err => unimplemented!("{:?} not yet handled for test setup compilation", err), }) diff --git a/query-engine/connector-test-kit-rs/query-test-macros/src/relation_link_test.rs b/query-engine/connector-test-kit-rs/query-test-macros/src/relation_link_test.rs index 3ad2057d34ae..073c38f2ae57 100644 --- a/query-engine/connector-test-kit-rs/query-test-macros/src/relation_link_test.rs +++ b/query-engine/connector-test-kit-rs/query-test-macros/src/relation_link_test.rs @@ -79,7 +79,7 @@ pub fn relation_link_test_impl(attr: TokenStream, input: TokenStream) -> TokenSt let test_fn_ident = test_function.sig.ident.clone(); // Rename original test function to run_. - let runner_fn_ident = Ident::new(&format!("run_{}", test_fn_ident), Span::call_site()); + let runner_fn_ident = Ident::new(&format!("run_{test_fn_ident}"), Span::call_site()); test_function.sig.ident = runner_fn_ident.clone(); // The test database name is the name used as the database for data source rendering. @@ -110,12 +110,12 @@ pub fn relation_link_test_impl(attr: TokenStream, input: TokenStream) -> TokenSt let test_shells = datamodels.into_iter().enumerate().map(|(i, dm)| { // The shell function retains the name of the original test definition. - let test_fn_ident = Ident::new(&format!("{}_{}", test_fn_ident, i), Span::call_site()); + let test_fn_ident = Ident::new(&format!("{test_fn_ident}_{i}"), Span::call_site()); let datamodel: proc_macro2::TokenStream = format!(r#""{}""#, dm.datamodel()) .parse() .expect("Could not parse the datamodel"); let dm_with_params: String = dm.try_into().expect("Could not serialize json"); - let test_database = format!("{}_{}_{}", suite_name, test_name, i); + let test_database = format!("{suite_name}_{test_name}_{i}"); let required_capabilities = required_capabilities .get(i) .expect("Could not find some required capabilities") diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/cockroachdb.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/cockroachdb.rs index eb4ade02d2c5..ed07c2469ac6 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/cockroachdb.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/cockroachdb.rs @@ -25,9 +25,9 @@ impl ConnectorTagInterface for CockroachDbConnectorTag { // Use the same database and schema name for CockroachDB - unfortunately CockroachDB // can't handle 1 schema per test in a database well at this point in time. if is_ci { - format!("postgresql://prisma@test-db-cockroachdb:26257/{0}?schema={0}", database) + format!("postgresql://prisma@test-db-cockroachdb:26257/{database}?schema={database}") } else { - format!("postgresql://prisma@127.0.0.1:26257/{0}?schema={0}", database) + format!("postgresql://prisma@127.0.0.1:26257/{database}?schema={database}") } } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs index e1c1df61174d..b7d99905a346 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mod.rs @@ -108,7 +108,7 @@ impl fmt::Display for ConnectorTag { Self::Cockroach(_) => "CockroachDB", }; - write!(f, "{}", printable) + write!(f, "{printable}") } } @@ -133,13 +133,13 @@ impl fmt::Display for ConnectorVersion { }, Self::Sqlite => "SQLite".to_string(), Self::Vitess(v) => match v { - Some(v) => format!("Vitess ({})", v), + Some(v) => format!("Vitess ({v})"), None => "Vitess (unknown)".to_string(), }, Self::CockroachDb => "CockroachDB".to_string(), }; - write!(f, "{}", printable) + write!(f, "{printable}") } } @@ -167,7 +167,7 @@ impl ConnectorTag { ) -> bool { let current_connector = config.test_connector_tag().unwrap(); if !enabled.contains(¤t_connector) { - println!("Skipping test '{}', current test connector is not enabled.", test_name); + println!("Skipping test '{test_name}', current test connector is not enabled."); return false; } @@ -176,8 +176,7 @@ impl ConnectorTag { .any(|cap| !current_connector.capabilities().contains(cap)) { println!( - "Skipping test '{}', current test connector doesn't offer one or more capabilities that are required.", - test_name + "Skipping test '{test_name}', current test connector doesn't offer one or more capabilities that are required." ); return false; } @@ -208,7 +207,7 @@ impl TryFrom<(&str, Option<&str>)> for ConnectorTag { "mysql" => Self::MySql(MySqlConnectorTag::new(version)?), "mongodb" => Self::MongoDb(MongoDbConnectorTag::new(version)?), "vitess" => Self::Vitess(VitessConnectorTag::new(version)?), - _ => return Err(TestError::parse_error(format!("Unknown connector tag `{}`", connector))), + _ => return Err(TestError::parse_error(format!("Unknown connector tag `{connector}`"))), }; Ok(tag) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mongodb.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mongodb.rs index e21d05e71943..d2c5692fb24f 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mongodb.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mongodb.rs @@ -26,34 +26,22 @@ impl ConnectorTagInterface for MongoDbConnectorTag { ) -> String { match self.version { Some(MongoDbVersion::V4_2) if is_ci => format!( - "mongodb://prisma:prisma@test-db-mongodb-4-2:27016/{}?authSource=admin&retryWrites=true", - database + "mongodb://prisma:prisma@test-db-mongodb-4-2:27016/{database}?authSource=admin&retryWrites=true" ), Some(MongoDbVersion::V4_2) => { - format!( - "mongodb://prisma:prisma@127.0.0.1:27016/{}?authSource=admin&retryWrites=true", - database - ) + format!("mongodb://prisma:prisma@127.0.0.1:27016/{database}?authSource=admin&retryWrites=true") } Some(MongoDbVersion::V4_4) if is_ci => format!( - "mongodb://prisma:prisma@test-db-mongodb-4-4:27017/{}?authSource=admin&retryWrites=true", - database + "mongodb://prisma:prisma@test-db-mongodb-4-4:27017/{database}?authSource=admin&retryWrites=true" ), Some(MongoDbVersion::V4_4) => { - format!( - "mongodb://prisma:prisma@127.0.0.1:27017/{}?authSource=admin&retryWrites=true", - database - ) + format!("mongodb://prisma:prisma@127.0.0.1:27017/{database}?authSource=admin&retryWrites=true") + } + Some(MongoDbVersion::V5) if is_ci => { + format!("mongodb://prisma:prisma@test-db-mongodb-5:27018/{database}?authSource=admin&retryWrites=true") } - Some(MongoDbVersion::V5) if is_ci => format!( - "mongodb://prisma:prisma@test-db-mongodb-5:27018/{}?authSource=admin&retryWrites=true", - database - ), Some(MongoDbVersion::V5) => { - format!( - "mongodb://prisma:prisma@127.0.0.1:27018/{}?authSource=admin&retryWrites=true", - database - ) + format!("mongodb://prisma:prisma@127.0.0.1:27018/{database}?authSource=admin&retryWrites=true") } None => unreachable!("A versioned connector must have a concrete version to run."), } @@ -138,7 +126,7 @@ impl TryFrom<&str> for MongoDbVersion { "4.4" => Self::V4_4, "4.2" => Self::V4_2, "5" => Self::V5, - _ => return Err(TestError::parse_error(format!("Unknown MongoDB version `{}`", s))), + _ => return Err(TestError::parse_error(format!("Unknown MongoDB version `{s}`"))), }; Ok(version) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mysql.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mysql.rs index f4dffa1d2c1c..4297642dd1e4 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mysql.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/mysql.rs @@ -31,17 +31,17 @@ impl ConnectorTagInterface for MySqlConnectorTag { _: Option<&'static str>, ) -> String { match self.version { - Some(MySqlVersion::V5_6) if is_ci => format!("mysql://root:prisma@test-db-mysql-5-6:3306/{}", database), - Some(MySqlVersion::V5_7) if is_ci => format!("mysql://root:prisma@test-db-mysql-5-7:3306/{}", database), - Some(MySqlVersion::V8) if is_ci => format!("mysql://root:prisma@test-db-mysql-8:3306/{}", database), + Some(MySqlVersion::V5_6) if is_ci => format!("mysql://root:prisma@test-db-mysql-5-6:3306/{database}"), + Some(MySqlVersion::V5_7) if is_ci => format!("mysql://root:prisma@test-db-mysql-5-7:3306/{database}"), + Some(MySqlVersion::V8) if is_ci => format!("mysql://root:prisma@test-db-mysql-8:3306/{database}"), Some(MySqlVersion::MariaDb) if is_ci => { - format!("mysql://root:prisma@test-db-mysql-mariadb:3306/{}", database) + format!("mysql://root:prisma@test-db-mysql-mariadb:3306/{database}") } - Some(MySqlVersion::V5_6) => format!("mysql://root:prisma@127.0.0.1:3309/{}", database), - Some(MySqlVersion::V5_7) => format!("mysql://root:prisma@127.0.0.1:3306/{}", database), - Some(MySqlVersion::V8) => format!("mysql://root:prisma@127.0.0.1:3307/{}", database), + Some(MySqlVersion::V5_6) => format!("mysql://root:prisma@127.0.0.1:3309/{database}"), + Some(MySqlVersion::V5_7) => format!("mysql://root:prisma@127.0.0.1:3306/{database}"), + Some(MySqlVersion::V8) => format!("mysql://root:prisma@127.0.0.1:3307/{database}"), Some(MySqlVersion::MariaDb) => { - format!("mysql://root:prisma@127.0.0.1:3308/{}", database) + format!("mysql://root:prisma@127.0.0.1:3308/{database}") } None => unreachable!("A versioned connector must have a concrete version to run."), @@ -126,7 +126,7 @@ impl TryFrom<&str> for MySqlVersion { "5.7" => Self::V5_7, "8" => Self::V8, "mariadb" => Self::MariaDb, - _ => return Err(TestError::parse_error(format!("Unknown MySQL version `{}`", s))), + _ => return Err(TestError::parse_error(format!("Unknown MySQL version `{s}`"))), }; Ok(version) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/postgres.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/postgres.rs index 55c7578d606d..0030b2c3085d 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/postgres.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/postgres.rs @@ -20,47 +20,45 @@ impl ConnectorTagInterface for PostgresConnectorTag { let database = if is_multi_schema { database.to_string() } else { - format!("db?schema={}", database) + format!("db?schema={database}") }; match self.version { Some(PostgresVersion::V9) if is_ci => { - format!("postgresql://postgres:prisma@test-db-postgres-9:5432/{}", database) + format!("postgresql://postgres:prisma@test-db-postgres-9:5432/{database}") } Some(PostgresVersion::V10) if is_ci => { - format!("postgresql://postgres:prisma@test-db-postgres-10:5432/{}", database) + format!("postgresql://postgres:prisma@test-db-postgres-10:5432/{database}") } Some(PostgresVersion::V11) if is_ci => { - format!("postgresql://postgres:prisma@test-db-postgres-11:5432/{}", database) + format!("postgresql://postgres:prisma@test-db-postgres-11:5432/{database}") } Some(PostgresVersion::V12) if is_ci => { - format!("postgresql://postgres:prisma@test-db-postgres-12:5432/{}", database) + format!("postgresql://postgres:prisma@test-db-postgres-12:5432/{database}") } Some(PostgresVersion::V13) if is_ci => { - format!("postgresql://postgres:prisma@test-db-postgres-13:5432/{}", database) + format!("postgresql://postgres:prisma@test-db-postgres-13:5432/{database}") } Some(PostgresVersion::V14) if is_ci => { - format!("postgresql://postgres:prisma@test-db-postgres-14:5432/{}", database) + format!("postgresql://postgres:prisma@test-db-postgres-14:5432/{database}") } Some(PostgresVersion::V15) if is_ci => { - format!("postgresql://postgres:prisma@test-db-postgres-15:5432/{}", database) + format!("postgresql://postgres:prisma@test-db-postgres-15:5432/{database}") + } + Some(PostgresVersion::PgBouncer) if is_ci => { + format!("postgresql://postgres:prisma@test-db-pgbouncer:6432/{database}&pgbouncer=true") + } + + Some(PostgresVersion::V9) => format!("postgresql://postgres:prisma@127.0.0.1:5431/{database}"), + Some(PostgresVersion::V10) => format!("postgresql://postgres:prisma@127.0.0.1:5432/{database}"), + Some(PostgresVersion::V11) => format!("postgresql://postgres:prisma@127.0.0.1:5433/{database}"), + Some(PostgresVersion::V12) => format!("postgresql://postgres:prisma@127.0.0.1:5434/{database}"), + Some(PostgresVersion::V13) => format!("postgresql://postgres:prisma@127.0.0.1:5435/{database}"), + Some(PostgresVersion::V14) => format!("postgresql://postgres:prisma@127.0.0.1:5437/{database}"), + Some(PostgresVersion::V15) => format!("postgresql://postgres:prisma@127.0.0.1:5438/{database}"), + Some(PostgresVersion::PgBouncer) => { + format!("postgresql://postgres:prisma@127.0.0.1:6432/db?{database}&pgbouncer=true") } - Some(PostgresVersion::PgBouncer) if is_ci => format!( - "postgresql://postgres:prisma@test-db-pgbouncer:6432/{}&pgbouncer=true", - database - ), - - Some(PostgresVersion::V9) => format!("postgresql://postgres:prisma@127.0.0.1:5431/{}", database), - Some(PostgresVersion::V10) => format!("postgresql://postgres:prisma@127.0.0.1:5432/{}", database), - Some(PostgresVersion::V11) => format!("postgresql://postgres:prisma@127.0.0.1:5433/{}", database), - Some(PostgresVersion::V12) => format!("postgresql://postgres:prisma@127.0.0.1:5434/{}", database), - Some(PostgresVersion::V13) => format!("postgresql://postgres:prisma@127.0.0.1:5435/{}", database), - Some(PostgresVersion::V14) => format!("postgresql://postgres:prisma@127.0.0.1:5437/{}", database), - Some(PostgresVersion::V15) => format!("postgresql://postgres:prisma@127.0.0.1:5438/{}", database), - Some(PostgresVersion::PgBouncer) => format!( - "postgresql://postgres:prisma@127.0.0.1:6432/db?{}&pgbouncer=true", - database - ), None => unreachable!("A versioned connector must have a concrete version to run."), } @@ -172,7 +170,7 @@ impl TryFrom<&str> for PostgresVersion { "14" => Self::V14, "15" => Self::V15, "pgbouncer" => Self::PgBouncer, - _ => return Err(TestError::parse_error(format!("Unknown Postgres version `{}`", s))), + _ => return Err(TestError::parse_error(format!("Unknown Postgres version `{s}`"))), }; Ok(version) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sql_server.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sql_server.rs index fa106ec3e45f..379dae80fb2a 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sql_server.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sql_server.rs @@ -25,22 +25,22 @@ impl ConnectorTagInterface for SqlServerConnectorTag { isolation_level: Option<&'static str>, ) -> String { let database = if is_multi_schema { - format!("database={};schema=dbo", database) + format!("database={database};schema=dbo") } else { - format!("database=master;schema={}", database) + format!("database=master;schema={database}") }; let isolation_level = isolation_level.unwrap_or("READ UNCOMMITTED"); match self.version { - Some(SqlServerVersion::V2017) if is_ci => format!("sqlserver://test-db-sqlserver-2017:1433;{};user=SA;password=;trustServerCertificate=true;isolationLevel={isolation_level}", database), - Some(SqlServerVersion::V2017) => format!("sqlserver://127.0.0.1:1434;{};user=SA;password=;trustServerCertificate=true;isolationLevel={isolation_level}", database), + Some(SqlServerVersion::V2017) if is_ci => format!("sqlserver://test-db-sqlserver-2017:1433;{database};user=SA;password=;trustServerCertificate=true;isolationLevel={isolation_level}"), + Some(SqlServerVersion::V2017) => format!("sqlserver://127.0.0.1:1434;{database};user=SA;password=;trustServerCertificate=true;isolationLevel={isolation_level}"), - Some(SqlServerVersion::V2019) if is_ci => format!("sqlserver://test-db-sqlserver-2019:1433;{};user=SA;password=;trustServerCertificate=true;isolationLevel={isolation_level}", database), - Some(SqlServerVersion::V2019) => format!("sqlserver://127.0.0.1:1433;{};user=SA;password=;trustServerCertificate=true;isolationLevel={isolation_level}", database), + Some(SqlServerVersion::V2019) if is_ci => format!("sqlserver://test-db-sqlserver-2019:1433;{database};user=SA;password=;trustServerCertificate=true;isolationLevel={isolation_level}"), + Some(SqlServerVersion::V2019) => format!("sqlserver://127.0.0.1:1433;{database};user=SA;password=;trustServerCertificate=true;isolationLevel={isolation_level}"), - Some(SqlServerVersion::V2022) if is_ci => format!("sqlserver://test-db-sqlserver-2022:1433;{};user=SA;password=;trustServerCertificate=true;isolationLevel={isolation_level}", database), - Some(SqlServerVersion::V2022) => format!("sqlserver://127.0.0.1:1435;{};user=SA;password=;trustServerCertificate=true;isolationLevel={isolation_level}", database), + Some(SqlServerVersion::V2022) if is_ci => format!("sqlserver://test-db-sqlserver-2022:1433;{database};user=SA;password=;trustServerCertificate=true;isolationLevel={isolation_level}"), + Some(SqlServerVersion::V2022) => format!("sqlserver://127.0.0.1:1435;{database};user=SA;password=;trustServerCertificate=true;isolationLevel={isolation_level}"), None => unreachable!("A versioned connector must have a concrete version to run."), } @@ -123,7 +123,7 @@ impl TryFrom<&str> for SqlServerVersion { "2017" => Self::V2017, "2019" => Self::V2019, "2022" => Self::V2022, - _ => return Err(TestError::parse_error(format!("Unknown SqlServer version `{}`", s))), + _ => return Err(TestError::parse_error(format!("Unknown SqlServer version `{s}`"))), }; Ok(version) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs index 70ef316364cc..f9a3ad54c2b8 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/sqlite.rs @@ -27,7 +27,7 @@ impl ConnectorTagInterface for SqliteConnectorTag { .trim_end_matches('/') .to_owned(); - format!("file://{}/db/{}.db", workspace_root, database) + format!("file://{workspace_root}/db/{database}.db") } fn capabilities(&self) -> &[ConnectorCapability] { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/vitess.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/vitess.rs index 8e3b9faaee54..4ba42f9b827c 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/vitess.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/connector_tag/vitess.rs @@ -106,7 +106,7 @@ impl FromStr for VitessVersion { let version = match s { "5.7" => Self::V5_7, "8.0" => Self::V8_0, - _ => return Err(TestError::parse_error(format!("Unknown Vitess version `{}`", s))), + _ => return Err(TestError::parse_error(format!("Unknown Vitess version `{s}`"))), }; Ok(version) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/mod.rs index 93293d174858..f31161e7a002 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/mod.rs @@ -45,7 +45,7 @@ pub fn render_test_datamodel( let is_multi_schema = !db_schemas.is_empty(); let schema_def = if is_multi_schema { - format!("schemas = {:?}", db_schemas) + format!("schemas = {db_schemas:?}") } else { String::default() }; @@ -74,7 +74,7 @@ pub fn render_test_datamodel( let renderer = tag.datamodel_renderer(); let models = process_template(template, renderer); - format!("{}\n\n{}", datasource_with_generator, models) + format!("{datasource_with_generator}\n\n{models}") } fn process_template(template: String, renderer: Box) -> String { @@ -93,13 +93,13 @@ fn process_template(template: String, renderer: Box) -> S } fn render_preview_features(excluded_features: &[&str]) -> String { - let excluded_features: Vec<_> = excluded_features.iter().map(|f| format!(r#""{}""#, f)).collect(); + let excluded_features: Vec<_> = excluded_features.iter().map(|f| format!(r#""{f}""#)).collect(); ALL_PREVIEW_FEATURES .active_features() .iter() .chain(ALL_PREVIEW_FEATURES.hidden_features()) - .map(|f| format!(r#""{}""#, f)) + .map(|f| format!(r#""{f}""#)) .filter(|f| !excluded_features.contains(f)) .join(", ") } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/sql_renderer.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/sql_renderer.rs index 1de515ee707b..967c258cd4aa 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/sql_renderer.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/sql_renderer.rs @@ -16,7 +16,7 @@ impl DatamodelRenderer for SqlDatamodelRenderer { fn render_m2m(&self, m2m: M2mFragment) -> String { let relation_directive = match m2m.relation_name { - Some(name) => format!(r#"@relation(name: "{}")"#, name), + Some(name) => format!(r#"@relation(name: "{name}")"#), None => "".to_owned(), }; diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/logging.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/logging.rs index 1447506a9128..79d92affe426 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/logging.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/logging.rs @@ -74,7 +74,7 @@ impl std::io::Write for PrintWriter { let plain_log = std::str::from_utf8(&plain_bytes).unwrap_or(""); let _ = self.tx.send(plain_log.to_string()); } - eprint!("{}", log); + eprint!("{log}"); Ok(buf.len()) } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs index 12d4cec9a0cc..83855fde1c59 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/query_result.rs @@ -39,7 +39,7 @@ impl QueryResult { return; } - let err_code = err_code.map(|code| format!("P{}", code)); + let err_code = err_code.map(|code| format!("P{code}")); let err_exists = self.errors().into_iter().any(|err| { let code_matches = err.code() == err_code.as_deref(); let msg_matches = match msg_contains.as_ref() { diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/binary.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/binary.rs index faad759a589a..c11f08e5a0b9 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/binary.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/binary.rs @@ -148,7 +148,7 @@ impl RunnerInterface for BinaryRunner { } async fn commit_tx(&self, tx_id: TxId) -> TestResult { - let uri = format!("/transaction/{}/commit", tx_id); + let uri = format!("/transaction/{tx_id}/commit"); let req = Request::builder() .uri(uri.as_str()) @@ -170,7 +170,7 @@ impl RunnerInterface for BinaryRunner { } async fn rollback_tx(&self, tx_id: TxId) -> TestResult { - let uri = format!("/transaction/{}/rollback", tx_id); + let uri = format!("/transaction/{tx_id}/rollback"); let req = Request::builder() .uri(uri.as_str()) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs index f57c7c23b677..ca3a200116a5 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs @@ -89,7 +89,7 @@ impl Runner { "direct" => Self::direct(datamodel, connector_tag, metrics).await, "node-api" => Ok(RunnerType::NodeApi(NodeApiRunner {})), "binary" => Self::binary(datamodel, connector_tag, metrics).await, - unknown => Err(TestError::parse_error(format!("Unknown test runner '{}'", unknown))), + unknown => Err(TestError::parse_error(format!("Unknown test runner '{unknown}'"))), }?; Ok(Self { log_capture, inner }) diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/schema_gen/identifiers.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/schema_gen/identifiers.rs index 0c11490c9146..ef39b14ce31b 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/schema_gen/identifiers.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/schema_gen/identifiers.rs @@ -17,6 +17,6 @@ impl std::fmt::Display for Identifier { Identifier::None => "", }; - write!(f, "{}", name) + write!(f, "{name}") } } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/schema_gen/parse.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/schema_gen/parse.rs index e1686169ab40..509fc188883f 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/schema_gen/parse.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/schema_gen/parse.rs @@ -19,8 +19,7 @@ pub fn walk_json<'a>(json: &'a serde_json::Value, path: &[&str]) -> Result<&'a s match key { Some(val) => Ok(val), None => Err(TestError::parse_error(format!( - "Could not walk the JSON value `{}`. The key `{}` does not exist", - json, p + "Could not walk the JSON value `{json}`. The key `{p}` does not exist" ))), } }) @@ -61,7 +60,7 @@ pub fn parse_compound_id( let arguments = fields .iter() .zip(field_values.iter()) - .map(|(name, value)| format!("{}: {}", name, value)) + .map(|(name, value)| format!("{name}: {value}")) .join(","); Ok(format!( @@ -70,9 +69,7 @@ pub fn parse_compound_id( {arguments} }}, {meta} - }}", - arg_name = arg_name, - arguments = arguments + }}" )) } @@ -93,8 +90,7 @@ pub fn parse_many_compound_ids( Ok(compound_ids) } x => Err(TestError::parse_error(format!( - "An array was expected but we found: `{}` instead", - x + "An array was expected but we found: `{x}` instead" ))), } } @@ -111,8 +107,7 @@ pub fn parse_many_ids(field: &str, json: &serde_json::Value, path: &[&str]) -> R Ok(ids) } x => Err(TestError::parse_error(format!( - "An array was expected but we found: `{}` instead", - x + "An array was expected but we found: `{x}` instead" ))), } } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/schema_gen/relation_field.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/schema_gen/relation_field.rs index 0f17d61cb99e..afddb110d865 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/schema_gen/relation_field.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/schema_gen/relation_field.rs @@ -86,8 +86,7 @@ impl TryFrom<(&str, bool)> for RelationField { "ToMany" => RelationField::ToMany { child }, _ => { return Err(TestError::parse_error(format!( - "Unknown relation field `{}`. Valid names are: ToOneOpt, ToOneReq and ToMany", - name + "Unknown relation field `{name}`. Valid names are: ToOneOpt, ToOneReq and ToMany" ))) } }; diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/templating/parse_models.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/templating/parse_models.rs index fb9821c0eefe..71279c800ddd 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/templating/parse_models.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/templating/parse_models.rs @@ -123,7 +123,7 @@ impl FragmentArgument { FragmentArgument::Value(s) => Ok(s), FragmentArgument::Directive(_) => Err(TemplatingError::argument_error( "unknown", - format!("Expected Value argument, got: {:?}", self), + format!("Expected Value argument, got: {self:?}"), )), } } @@ -132,7 +132,7 @@ impl FragmentArgument { match self { FragmentArgument::Value(_) => Err(TemplatingError::argument_error( "unknown", - format!("Expected Directive argument, got: {:?}", self), + format!("Expected Directive argument, got: {self:?}"), )), FragmentArgument::Directive(dir) => Ok(dir), } diff --git a/query-engine/connectors/mongodb-query-connector/src/cursor.rs b/query-engine/connectors/mongodb-query-connector/src/cursor.rs index 8e772b6774d3..9adbf8c1966b 100644 --- a/query-engine/connectors/mongodb-query-connector/src/cursor.rs +++ b/query-engine/connectors/mongodb-query-connector/src/cursor.rs @@ -62,7 +62,7 @@ impl CursorBuilder { let (left_bind_field_name, right_binding_field_name) = order_data.binding_names(); // For: `"let": { fieldName: "$fieldName" }` bindings for the outer pipeline. - bindings.insert(left_bind_field_name, format!("${}", right_binding_field_name)); + bindings.insert(left_bind_field_name, format!("${right_binding_field_name}")); } let cursor_condition = cursor_conditions(self.order_data, self.reverse); diff --git a/query-engine/connectors/mongodb-query-connector/src/error.rs b/query-engine/connectors/mongodb-query-connector/src/error.rs index a46525e4fb78..fe6d64c17879 100644 --- a/query-engine/connectors/mongodb-query-connector/src/error.rs +++ b/query-engine/connectors/mongodb-query-connector/src/error.rs @@ -95,12 +95,11 @@ impl MongoError { ConnectorError::from_kind(ErrorKind::ConversionError(err.into())) } MongoError::MissingRequiredArgumentError { argument } => ConnectorError::from_kind(ErrorKind::RawApiError( - format!("Missing required argument: '{}'.", argument), + format!("Missing required argument: '{argument}'."), )), MongoError::ArgumentTypeMismatchError { argument, have, want } => { ConnectorError::from_kind(ErrorKind::RawApiError(format!( - "Argument type mismatch for '{}'. Have: {}, want: {}.", - argument, have, want + "Argument type mismatch for '{argument}'. Have: {have}, want: {want}." ))) } @@ -209,17 +208,17 @@ fn driver_error_to_connector_error(err: DriverError) -> ConnectorError { } } - mongodb::error::ErrorKind::BsonDeserialization(err) => ConnectorError::from_kind( - ErrorKind::InternalConversionError(format!("BSON decode error: {}", err)), - ), + mongodb::error::ErrorKind::BsonDeserialization(err) => { + ConnectorError::from_kind(ErrorKind::InternalConversionError(format!("BSON decode error: {err}"))) + } - mongodb::error::ErrorKind::BsonSerialization(err) => ConnectorError::from_kind( - ErrorKind::InternalConversionError(format!("BSON encode error: {}", err)), - ), + mongodb::error::ErrorKind::BsonSerialization(err) => { + ConnectorError::from_kind(ErrorKind::InternalConversionError(format!("BSON encode error: {err}"))) + } _ => ConnectorError::from_kind(ErrorKind::RawDatabaseError { code: "unknown".to_owned(), - message: format!("{}", err), + message: format!("{err}"), }), } } @@ -244,7 +243,7 @@ fn parse_unique_index_violation(message: &str) -> Option { impl From for MongoError { fn from(err: mongodb::bson::oid::Error) -> Self { - MongoError::MalformedObjectId(format!("{}", err)) + MongoError::MalformedObjectId(format!("{err}")) } } diff --git a/query-engine/connectors/mongodb-query-connector/src/filter.rs b/query-engine/connectors/mongodb-query-connector/src/filter.rs index 36ae2d5a61f2..44ca06cf875b 100644 --- a/query-engine/connectors/mongodb-query-connector/src/filter.rs +++ b/query-engine/connectors/mongodb-query-connector/src/filter.rs @@ -1073,7 +1073,7 @@ impl FilterPrefix { } if self.parts.is_empty() { - format!("${}", target) + format!("${target}") } else { format!("${}.{}", self.render(), target) } diff --git a/query-engine/connectors/mongodb-query-connector/src/join.rs b/query-engine/connectors/mongodb-query-connector/src/join.rs index 4856f96ba844..49107e543849 100644 --- a/query-engine/connectors/mongodb-query-connector/src/join.rs +++ b/query-engine/connectors/mongodb-query-connector/src/join.rs @@ -100,7 +100,7 @@ impl JoinStage { .enumerate() .map(|(idx, right_field)| { let right_ref = format!("${}", right_field.db_name()); - let left_var = format!("$$left_{}", idx); + let left_var = format!("$$left_{idx}"); match relation.is_many_to_many() { true if right_field.is_list() => doc! { "$in": [left_var, right_ref] }, @@ -118,7 +118,7 @@ impl JoinStage { // Go through every right field to place in the $addFields operator for right_field in right_scalars.iter() { let right_name = right_field.db_name(); - let right_ref = format!("${}", right_name); + let right_ref = format!("${right_name}"); add_fields.insert( right_name, @@ -153,7 +153,7 @@ impl JoinStage { // If the field is a to-one, add an unwind stage. let unwind_stage = if !from_field.is_list() { Some(doc! { - "$unwind": { "path": format!("${}", as_name), "preserveNullAndEmptyArrays": true } + "$unwind": { "path": format!("${as_name}"), "preserveNullAndEmptyArrays": true } }) } else { None @@ -166,7 +166,7 @@ impl JoinStage { // With the left side, we need to introduce the variable `left_x` pointing to the correct field for (idx, left_field) in left_scalars.iter().enumerate() { - let left_var = format!("left_{}", idx); + let left_var = format!("left_{idx}"); let_vars.insert(left_var, format!("${}", left_field.db_name())); } diff --git a/query-engine/connectors/mongodb-query-connector/src/lib.rs b/query-engine/connectors/mongodb-query-connector/src/lib.rs index 584c4eef4403..c5c023d580aa 100644 --- a/query-engine/connectors/mongodb-query-connector/src/lib.rs +++ b/query-engine/connectors/mongodb-query-connector/src/lib.rs @@ -38,7 +38,7 @@ impl BsonTransform for Bson { Ok(doc) } else { Err(MongoError::ConversionError { - from: format!("{:?}", self), + from: format!("{self:?}"), to: "Bson::Document".to_string(), }) } diff --git a/query-engine/connectors/mongodb-query-connector/src/orderby.rs b/query-engine/connectors/mongodb-query-connector/src/orderby.rs index 72748d6ed921..b92fe30bf4ec 100644 --- a/query-engine/connectors/mongodb-query-connector/src/orderby.rs +++ b/query-engine/connectors/mongodb-query-connector/src/orderby.rs @@ -269,7 +269,7 @@ impl OrderByBuilder { &data, )); - order_aggregate_proj_doc.push(doc! { "$addFields": { field_name.clone(): { "$size": { "$ifNull": [format!("${}", field_name), []] } } } }); + order_aggregate_proj_doc.push(doc! { "$addFields": { field_name.clone(): { "$size": { "$ifNull": [format!("${field_name}"), []] } } } }); } } _ => unimplemented!("Order by aggregate only supports COUNT"), @@ -324,13 +324,12 @@ fn unwind_aggregate_joins( if let Some(next_part) = data.prefix.as_ref().and_then(|prefix| prefix.parts.get(i + 1)) { additional_stages.push(doc! { "$unwind": { - "path": format!("${}", join_name), + "path": format!("${join_name}"), "preserveNullAndEmptyArrays": true } }); - additional_stages - .push(doc! { "$addFields": { join_name: format!("${}.{}", join_name, next_part) } }); + additional_stages.push(doc! { "$addFields": { join_name: format!("${join_name}.{next_part}") } }); } Some(additional_stages) diff --git a/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs b/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs index 09914aec5270..b7667a45825f 100644 --- a/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs +++ b/query-engine/connectors/mongodb-query-connector/src/query_builder/group_by_builder.rs @@ -210,7 +210,7 @@ impl GroupByBuilder { fn aggregation_pair(op: &str, field: &ScalarFieldRef) -> (String, Bson) { ( format!("{}_{}", op, field.db_name()), - doc! { format!("${}", op): format!("${}", field.db_name()) }.into(), + doc! { format!("${op}"): format!("${}", field.db_name()) }.into(), ) } diff --git a/query-engine/connectors/mongodb-query-connector/src/query_strings.rs b/query-engine/connectors/mongodb-query-connector/src/query_strings.rs index d2ca1ee6e3b2..b51adc663f1a 100644 --- a/query-engine/connectors/mongodb-query-connector/src/query_strings.rs +++ b/query-engine/connectors/mongodb-query-connector/src/query_strings.rs @@ -292,7 +292,7 @@ fn fmt_val(buffer: &mut String, val: &Bson, depth: usize) -> std::fmt::Result { match val { Bson::Array(ary) => fmt_list(buffer, ary, depth + 1), Bson::Document(doc) => fmt_doc(buffer, doc, depth + 1), - val => write!(buffer, "{}", val), + val => write!(buffer, "{val}"), } } diff --git a/query-engine/connectors/mongodb-query-connector/src/root_queries/raw.rs b/query-engine/connectors/mongodb-query-connector/src/root_queries/raw.rs index 2a8934443d92..f9923716173e 100644 --- a/query-engine/connectors/mongodb-query-connector/src/root_queries/raw.rs +++ b/query-engine/connectors/mongodb-query-connector/src/root_queries/raw.rs @@ -100,7 +100,7 @@ impl QueryRawParsingExtension for HashMap { .into_iter() .map(|stage| { stage.into_document().map_err(|_| { - MongoError::argument_type_mismatch(key, format!("{:?}", pv), "Json::Array") + MongoError::argument_type_mismatch(key, format!("{pv:?}"), "Json::Array") }) }) .try_collect()?; @@ -134,7 +134,7 @@ impl QueryRawConversionExtension for &PrismaValue { Ok(Bson::Array(bson)) } - x => Err(MongoError::argument_type_mismatch(arg_name, format!("{:?}", x), "Json")), + x => Err(MongoError::argument_type_mismatch(arg_name, format!("{x:?}"), "Json")), } } @@ -145,7 +145,7 @@ impl QueryRawConversionExtension for &PrismaValue { Bson::Document(doc) => Ok(doc), bson => Err(MongoError::argument_type_mismatch( arg_name, - format!("{:?}", bson), + format!("{bson:?}"), "Json::Object", )), } @@ -158,7 +158,7 @@ impl QueryRawConversionExtension for &PrismaValue { Bson::Array(doc) => Ok(doc), bson => Err(MongoError::argument_type_mismatch( arg_name, - format!("{:?}", bson), + format!("{bson:?}"), "Json::Array", )), } diff --git a/query-engine/connectors/mongodb-query-connector/src/value.rs b/query-engine/connectors/mongodb-query-connector/src/value.rs index 52991aaa9cad..6677e353f6a2 100644 --- a/query-engine/connectors/mongodb-query-connector/src/value.rs +++ b/query-engine/connectors/mongodb-query-connector/src/value.rs @@ -189,8 +189,8 @@ impl IntoBson for (&MongoDbType, PrismaValue) { // Unhandled conversions (mdb_type, p_val) => { return Err(MongoError::ConversionError { - from: format!("{:?}", p_val), - to: format!("{:?}", mdb_type), + from: format!("{p_val:?}"), + to: format!("{mdb_type:?}"), }) } }) @@ -275,8 +275,7 @@ impl IntoBson for (&TypeIdentifier, PrismaValue) { (ident, val) => { return Err(MongoError::Unsupported(format!( - "Unhandled and unsupported value mapping for MongoDB: {} as {:?}.", - val, ident, + "Unhandled and unsupported value mapping for MongoDB: {val} as {ident:?}.", ))) } }) @@ -305,8 +304,8 @@ fn read_scalar_value(bson: Bson, meta: &ScalarOutputMeta) -> crate::Result { return Err(MongoError::ConversionError { - from: format!("{}", bson), - to: format!("List of {:?}", type_identifier), + from: format!("{bson}"), + to: format!("List of {type_identifier:?}"), }); } }, @@ -378,7 +377,7 @@ fn read_scalar_value(bson: Bson, meta: &ScalarOutputMeta) -> crate::Result { return Err(MongoError::ConversionError { from: bson.to_string(), - to: format!("{:?}", ident), + to: format!("{ident:?}"), }) } }; @@ -400,7 +399,7 @@ fn read_composite_value(bson: Bson, meta: &CompositeOutputMeta) -> crate::Result _ => { return Err(MongoError::ConversionError { - from: format!("{}", bson), + from: format!("{bson}"), to: "List".to_owned(), }); } @@ -437,7 +436,7 @@ fn read_composite_value(bson: Bson, meta: &CompositeOutputMeta) -> crate::Result } bson => { return Err(MongoError::ConversionError { - from: format!("{:?}", bson), + from: format!("{bson:?}"), to: "Document".to_owned(), }) } @@ -468,7 +467,7 @@ where fn format_opt(opt: Option) -> String { match opt { - Some(t) => format!("{}", t), + Some(t) => format!("{t}"), None => "None".to_owned(), } } diff --git a/query-engine/connectors/query-connector/src/error.rs b/query-engine/connectors/query-connector/src/error.rs index 7337e1a7225b..e279b64a73f7 100644 --- a/query-engine/connectors/query-connector/src/error.rs +++ b/query-engine/connectors/query-connector/src/error.rs @@ -45,7 +45,7 @@ impl ConnectorError { ErrorKind::ForeignKeyConstraintViolation { constraint } => { let field_name = match constraint { DatabaseConstraint::Fields(fields) => fields.join(","), - DatabaseConstraint::Index(index) => format!("{} (index)", index), + DatabaseConstraint::Index(index) => format!("{index} (index)"), DatabaseConstraint::ForeignKey => "foreign key".to_string(), DatabaseConstraint::CannotParse => "(not available)".to_string(), }; @@ -56,7 +56,7 @@ impl ConnectorError { } ErrorKind::ConversionError(message) => Some(KnownError::new( user_facing_errors::query_engine::InconsistentColumnData { - message: format!("{}", message), + message: format!("{message}"), }, )), ErrorKind::QueryInvalidInput(message) => Some(KnownError::new( @@ -70,7 +70,7 @@ impl ConnectorError { })) } ErrorKind::MultiError(merror) => Some(KnownError::new(user_facing_errors::query_engine::MultiError { - errors: format!("{}", merror), + errors: format!("{merror}"), })), ErrorKind::UniqueConstraintViolation { constraint } => { Some(KnownError::new(user_facing_errors::query_engine::UniqueKeyViolation { diff --git a/query-engine/connectors/query-connector/src/write_args.rs b/query-engine/connectors/query-connector/src/write_args.rs index 51c6366806a6..48636aacf789 100644 --- a/query-engine/connectors/query-connector/src/write_args.rs +++ b/query-engine/connectors/query-connector/src/write_args.rs @@ -240,7 +240,7 @@ impl FieldPath { if self.path.is_empty() { alias.to_owned() } else { - format!("${}.{}", alias, rendered_path) + format!("${alias}.{rendered_path}") } } else { rendered_path @@ -258,7 +258,7 @@ impl FieldPath { if self.path.is_empty() { alias.to_owned() } else { - format!("{}_{}", alias, rendered_path) + format!("{alias}_{rendered_path}") } } else { rendered_path @@ -345,8 +345,7 @@ impl TryInto for WriteOperation { WriteOperation::Scalar(ScalarWriteOperation::Set(pv)) => Ok(pv), WriteOperation::Composite(CompositeWriteOperation::Set(pv)) => Ok(pv), x => Err(ConnectorError::from_kind(ErrorKind::InternalConversionError(format!( - "Unable to convert write expression {:?} into prisma value.", - x + "Unable to convert write expression {x:?} into prisma value." )))), } } diff --git a/query-engine/connectors/sql-query-connector/src/database/sqlite.rs b/query-engine/connectors/sql-query-connector/src/database/sqlite.rs index baeecad820a1..263bdbd150f6 100644 --- a/query-engine/connectors/sql-query-connector/src/database/sqlite.rs +++ b/query-engine/connectors/sql-query-connector/src/database/sqlite.rs @@ -72,8 +72,7 @@ impl FromSource for Sqlite { fn invalid_file_path_error(file_path: &str, connection_info: &ConnectionInfo) -> ConnectorError { SqlError::ConnectionError(QuaintKind::DatabaseUrlIsInvalid(format!( - "\"{}\" is not a valid sqlite file path", - file_path + "\"{file_path}\" is not a valid sqlite file path" ))) .into_connector_error(&connection_info) } diff --git a/query-engine/connectors/sql-query-connector/src/error.rs b/query-engine/connectors/sql-query-connector/src/error.rs index 7f254559f06b..aca07a46897d 100644 --- a/query-engine/connectors/sql-query-connector/src/error.rs +++ b/query-engine/connectors/sql-query-connector/src/error.rs @@ -31,8 +31,7 @@ impl From for SqlError { RawError::UnsupportedColumnType { column_type } => Self::RawError { code: String::from("N/A"), message: format!( - r#"Failed to deserialize column of type '{}'. If you're using $queryRaw and this column is explicitly marked as `Unsupported` in your Prisma schema, try casting this column to any supported Prisma type such as `String`."#, - column_type + r#"Failed to deserialize column of type '{column_type}'. If you're using $queryRaw and this column is explicitly marked as `Unsupported` in your Prisma schema, try casting this column to any supported Prisma type such as `String`."# ), }, RawError::ConnectionClosed => Self::ConnectionClosed, @@ -286,14 +285,14 @@ impl From for SqlError { QuaintKind::MissingFullTextSearchIndex => Self::MissingFullTextSearchIndex, e @ QuaintKind::ConnectionError(_) => Self::ConnectionError(e), QuaintKind::ColumnReadFailure(e) => Self::ColumnReadFailure(e), - QuaintKind::ColumnNotFound { column } => SqlError::ColumnDoesNotExist(format!("{}", column)), - QuaintKind::TableDoesNotExist { table } => SqlError::TableDoesNotExist(format!("{}", table)), + QuaintKind::ColumnNotFound { column } => SqlError::ColumnDoesNotExist(format!("{column}")), + QuaintKind::TableDoesNotExist { table } => SqlError::TableDoesNotExist(format!("{table}")), QuaintKind::ConnectionClosed => SqlError::ConnectionClosed, QuaintKind::InvalidIsolationLevel(msg) => Self::InvalidIsolationLevel(msg), QuaintKind::TransactionWriteConflict => Self::TransactionWriteConflict, QuaintKind::RollbackWithoutBegin => Self::RollbackWithoutBegin, e @ QuaintKind::UnsupportedColumnType { .. } => SqlError::ConversionError(e.into()), - e @ QuaintKind::TransactionAlreadyClosed(_) => SqlError::TransactionAlreadyClosed(format!("{}", e)), + e @ QuaintKind::TransactionAlreadyClosed(_) => SqlError::TransactionAlreadyClosed(format!("{e}")), e @ QuaintKind::IncorrectNumberOfParameters { .. } => SqlError::QueryError(e.into()), e @ QuaintKind::ConversionError(_) => SqlError::ConversionError(e.into()), e @ QuaintKind::ResultIndexOutOfBounds { .. } => SqlError::QueryError(e.into()), diff --git a/query-engine/connectors/sql-query-connector/src/filter_conversion.rs b/query-engine/connectors/sql-query-connector/src/filter_conversion.rs index dc3e12e0eb17..769db6d70e38 100644 --- a/query-engine/connectors/sql-query-connector/src/filter_conversion.rs +++ b/query-engine/connectors/sql-query-connector/src/filter_conversion.rs @@ -617,7 +617,7 @@ fn with_json_type_filter( comparable.or(expr_json.json_type_not_equals(JsonType::Number)).into() } serde_json::Value::Number(_) => comparable.and(expr_json.json_type_equals(JsonType::Number)).into(), - v => panic!("JSON target types only accept strings or numbers, found: {}", v), + v => panic!("JSON target types only accept strings or numbers, found: {v}"), } } _ => unreachable!(), @@ -644,7 +644,7 @@ fn default_scalar_filter( ScalarCondition::Equals(value) => comparable.equals(convert_first_value(fields, value, alias, ctx)), ScalarCondition::NotEquals(value) => comparable.not_equals(convert_first_value(fields, value, alias, ctx)), ScalarCondition::Contains(value) => match value { - ConditionValue::Value(value) => comparable.like(format!("%{}%", value)), + ConditionValue::Value(value) => comparable.like(format!("%{value}%")), ConditionValue::FieldRef(field_ref) => comparable.like(quaint::ast::concat::<'_, Expression<'_>>(vec![ Value::text("%").raw().into(), field_ref.aliased_col(alias, ctx).into(), @@ -652,7 +652,7 @@ fn default_scalar_filter( ])), }, ScalarCondition::NotContains(value) => match value { - ConditionValue::Value(value) => comparable.not_like(format!("%{}%", value)), + ConditionValue::Value(value) => comparable.not_like(format!("%{value}%")), ConditionValue::FieldRef(field_ref) => { comparable.not_like(quaint::ast::concat::<'_, Expression<'_>>(vec![ Value::text("%").raw().into(), @@ -662,14 +662,14 @@ fn default_scalar_filter( } }, ScalarCondition::StartsWith(value) => match value { - ConditionValue::Value(value) => comparable.like(format!("{}%", value)), + ConditionValue::Value(value) => comparable.like(format!("{value}%")), ConditionValue::FieldRef(field_ref) => comparable.like(quaint::ast::concat::<'_, Expression<'_>>(vec![ field_ref.aliased_col(alias, ctx).into(), Value::text("%").raw().into(), ])), }, ScalarCondition::NotStartsWith(value) => match value { - ConditionValue::Value(value) => comparable.not_like(format!("{}%", value)), + ConditionValue::Value(value) => comparable.not_like(format!("{value}%")), ConditionValue::FieldRef(field_ref) => { comparable.not_like(quaint::ast::concat::<'_, Expression<'_>>(vec![ field_ref.aliased_col(alias, ctx).into(), @@ -678,14 +678,14 @@ fn default_scalar_filter( } }, ScalarCondition::EndsWith(value) => match value { - ConditionValue::Value(value) => comparable.like(format!("%{}", value)), + ConditionValue::Value(value) => comparable.like(format!("%{value}")), ConditionValue::FieldRef(field_ref) => comparable.like(quaint::ast::concat::<'_, Expression<'_>>(vec![ Value::text("%").raw().into(), field_ref.aliased_col(alias, ctx).into(), ])), }, ScalarCondition::NotEndsWith(value) => match value { - ConditionValue::Value(value) => comparable.not_like(format!("%{}", value)), + ConditionValue::Value(value) => comparable.not_like(format!("%{value}")), ConditionValue::FieldRef(field_ref) => { comparable.not_like(quaint::ast::concat::<'_, Expression<'_>>(vec![ Value::text("%").raw().into(), @@ -773,18 +773,18 @@ fn insensitive_scalar_filter( let condition = match cond { ScalarCondition::Equals(ConditionValue::Value(PrismaValue::Null)) => comparable.is_null(), ScalarCondition::Equals(value) => match value { - ConditionValue::Value(value) => comparable.compare_raw("ILIKE", format!("{}", value)), + ConditionValue::Value(value) => comparable.compare_raw("ILIKE", format!("{value}")), ConditionValue::FieldRef(field_ref) => comparable.compare_raw("ILIKE", field_ref.aliased_col(alias, ctx)), }, ScalarCondition::NotEquals(ConditionValue::Value(PrismaValue::Null)) => comparable.is_not_null(), ScalarCondition::NotEquals(value) => match value { - ConditionValue::Value(value) => comparable.compare_raw("NOT ILIKE", format!("{}", value)), + ConditionValue::Value(value) => comparable.compare_raw("NOT ILIKE", format!("{value}")), ConditionValue::FieldRef(field_ref) => { comparable.compare_raw("NOT ILIKE", field_ref.aliased_col(alias, ctx)) } }, ScalarCondition::Contains(value) => match value { - ConditionValue::Value(value) => comparable.compare_raw("ILIKE", format!("%{}%", value)), + ConditionValue::Value(value) => comparable.compare_raw("ILIKE", format!("%{value}%")), ConditionValue::FieldRef(field_ref) => comparable.compare_raw( "ILIKE", concat::<'_, Expression<'_>>(vec![ @@ -795,7 +795,7 @@ fn insensitive_scalar_filter( ), }, ScalarCondition::NotContains(value) => match value { - ConditionValue::Value(value) => comparable.compare_raw("NOT ILIKE", format!("%{}%", value)), + ConditionValue::Value(value) => comparable.compare_raw("NOT ILIKE", format!("%{value}%")), ConditionValue::FieldRef(field_ref) => comparable.compare_raw( "NOT ILIKE", concat::<'_, Expression<'_>>(vec![ @@ -806,28 +806,28 @@ fn insensitive_scalar_filter( ), }, ScalarCondition::StartsWith(value) => match value { - ConditionValue::Value(value) => comparable.compare_raw("ILIKE", format!("{}%", value)), + ConditionValue::Value(value) => comparable.compare_raw("ILIKE", format!("{value}%")), ConditionValue::FieldRef(field_ref) => comparable.compare_raw( "ILIKE", concat::<'_, Expression<'_>>(vec![field_ref.aliased_col(alias, ctx).into(), Value::text("%").into()]), ), }, ScalarCondition::NotStartsWith(value) => match value { - ConditionValue::Value(value) => comparable.compare_raw("NOT ILIKE", format!("{}%", value)), + ConditionValue::Value(value) => comparable.compare_raw("NOT ILIKE", format!("{value}%")), ConditionValue::FieldRef(field_ref) => comparable.compare_raw( "NOT ILIKE", concat::<'_, Expression<'_>>(vec![field_ref.aliased_col(alias, ctx).into(), Value::text("%").into()]), ), }, ScalarCondition::EndsWith(value) => match value { - ConditionValue::Value(value) => comparable.compare_raw("ILIKE", format!("%{}", value)), + ConditionValue::Value(value) => comparable.compare_raw("ILIKE", format!("%{value}")), ConditionValue::FieldRef(field_ref) => comparable.compare_raw( "ILIKE", concat::<'_, Expression<'_>>(vec![Value::text("%").into(), field_ref.aliased_col(alias, ctx).into()]), ), }, ScalarCondition::NotEndsWith(value) => match value { - ConditionValue::Value(value) => comparable.compare_raw("NOT ILIKE", format!("%{}", value)), + ConditionValue::Value(value) => comparable.compare_raw("NOT ILIKE", format!("%{value}")), ConditionValue::FieldRef(field_ref) => comparable.compare_raw( "NOT ILIKE", concat::<'_, Expression<'_>>(vec![Value::text("%").into(), field_ref.aliased_col(alias, ctx).into()]), @@ -1040,7 +1040,7 @@ impl JsonFilterExt for (Expression<'static>, Expression<'static>) { match (value, target_type) { // string_contains (value) (ConditionValue::Value(value), JsonTargetType::String) => { - let contains = expr_string.like(format!("%{}%", value)); + let contains = expr_string.like(format!("%{value}%")); if reverse { contains.or(expr_json.json_type_not_equals(JsonType::String)).into() @@ -1098,7 +1098,7 @@ impl JsonFilterExt for (Expression<'static>, Expression<'static>) { match (value, target_type) { // string_starts_with (value) (ConditionValue::Value(value), JsonTargetType::String) => { - let starts_with = expr_string.like(format!("{}%", value)); + let starts_with = expr_string.like(format!("{value}%")); if reverse { starts_with.or(expr_json.json_type_not_equals(JsonType::String)).into() @@ -1158,7 +1158,7 @@ impl JsonFilterExt for (Expression<'static>, Expression<'static>) { match (value, target_type) { // string_ends_with (value) (ConditionValue::Value(value), JsonTargetType::String) => { - let ends_with = expr_string.like(format!("%{}", value)); + let ends_with = expr_string.like(format!("%{value}")); if reverse { ends_with.or(expr_json.json_type_not_equals(JsonType::String)).into() diff --git a/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs b/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs index 2d34908ef235..165f2f0007e2 100644 --- a/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs +++ b/query-engine/connectors/sql-query-connector/src/model_extensions/relation.rs @@ -21,7 +21,7 @@ impl RelationFieldExt for RelationField { if references.len() > 1 { references .iter() - .map(|to_field| format!("{}_{}", prefix, to_field)) + .map(|to_field| format!("{prefix}_{to_field}")) .map(|name| Column::from(name).table(self.as_table(ctx))) .collect() } else { diff --git a/query-engine/connectors/sql-query-connector/src/nested_aggregations.rs b/query-engine/connectors/sql-query-connector/src/nested_aggregations.rs index 7b4f89c84e0d..91236e77024a 100644 --- a/query-engine/connectors/sql-query-connector/src/nested_aggregations.rs +++ b/query-engine/connectors/sql-query-connector/src/nested_aggregations.rs @@ -20,7 +20,7 @@ pub(crate) fn build(aggr_selections: &[RelAggregationSelection], ctx: &Context<' for (index, selection) in aggr_selections.iter().enumerate() { match selection { RelAggregationSelection::Count(rf, filter) => { - let join_alias = format!("aggr_selection_{}", index); + let join_alias = format!("aggr_selection_{index}"); let aggregator_alias = selection.db_alias(); let join = compute_aggr_join( rf, diff --git a/query-engine/connectors/sql-query-connector/src/query_ext.rs b/query-engine/connectors/sql-query-connector/src/query_ext.rs index 34edc19cf060..164ce277d03e 100644 --- a/query-engine/connectors/sql-query-connector/src/query_ext.rs +++ b/query-engine/connectors/sql-query-connector/src/query_ext.rs @@ -82,7 +82,7 @@ pub(crate) trait QueryExt: Queryable + Send + Sync { let mut object = Map::new(); for (idx, p_value) in row.into_iter().enumerate() { - let column_name = columns.get(idx).unwrap_or(&format!("f{}", idx)).clone(); + let column_name = columns.get(idx).unwrap_or(&format!("f{idx}")).clone(); object.insert(column_name, p_value.as_typed_json()); } diff --git a/query-engine/connectors/sql-query-connector/src/sql_trace.rs b/query-engine/connectors/sql-query-connector/src/sql_trace.rs index 7f2ab235c35e..96f4f50ab405 100644 --- a/query-engine/connectors/sql-query-connector/src/sql_trace.rs +++ b/query-engine/connectors/sql-query-connector/src/sql_trace.rs @@ -8,7 +8,7 @@ pub fn trace_parent_to_string(context: &SpanContext) -> String { let span_id = context.span_id(); // see https://www.w3.org/TR/trace-context/#traceparent-header-field-values - format!("traceparent=00-{:032x}-{:032x}-01", trace_id, span_id) + format!("traceparent=00-{trace_id:032x}-{span_id:032x}-01") } pub trait SqlTraceComment: Sized { diff --git a/query-engine/core/src/error.rs b/query-engine/core/src/error.rs index 2e41d96d7b9d..c07252c60166 100644 --- a/query-engine/core/src/error.rs +++ b/query-engine/core/src/error.rs @@ -78,8 +78,7 @@ pub enum CoreError { impl CoreError { pub fn null_serialization_error(field_name: &str) -> Self { CoreError::SerializationError(format!( - "Inconsistent query result: Field {} is required to return data, got `null` instead.", - field_name + "Inconsistent query result: Field {field_name} is required to return data, got `null` instead." )) } @@ -138,13 +137,13 @@ impl From for CoreError { impl From for CoreError { fn from(e: url::ParseError) -> Self { - Self::ConfigurationError(format!("Error parsing connection string: {}", e)) + Self::ConfigurationError(format!("Error parsing connection string: {e}")) } } impl From for CoreError { fn from(e: connection_string::Error) -> Self { - Self::ConfigurationError(format!("Error parsing connection string: {}", e)) + Self::ConfigurationError(format!("Error parsing connection string: {e}")) } } @@ -256,7 +255,7 @@ impl From for user_facing_errors::Error { }) .into(), _ => user_facing_errors::KnownError::new(user_facing_errors::query_engine::InterpretationError { - details: format!("{}: {}", msg, cause), + details: format!("{msg}: {cause}"), }) .into(), } diff --git a/query-engine/core/src/executor/loader.rs b/query-engine/core/src/executor/loader.rs index f867e003d0e1..5c063866fdd0 100644 --- a/query-engine/core/src/executor/loader.rs +++ b/query-engine/core/src/executor/loader.rs @@ -26,8 +26,7 @@ pub async fn load( p if MONGODB.is_provider(p) => mongodb(source, url, features).await, x => Err(CoreError::ConfigurationError(format!( - "Unsupported connector type: {}", - x + "Unsupported connector type: {x}" ))), } } diff --git a/query-engine/core/src/interactive_transactions/actors.rs b/query-engine/core/src/interactive_transactions/actors.rs index ebc64a36e253..4fe7792d44ef 100644 --- a/query-engine/core/src/interactive_transactions/actors.rs +++ b/query-engine/core/src/interactive_transactions/actors.rs @@ -236,7 +236,7 @@ impl ITXClient { } other => { error!("Unexpected iTx response, {}", other); - let reason = format!("response '{}'", other); + let reason = format!("response '{other}'"); TransactionError::Closed { reason } } } diff --git a/query-engine/core/src/interactive_transactions/mod.rs b/query-engine/core/src/interactive_transactions/mod.rs index d3c73f705123..def10bfc10ef 100644 --- a/query-engine/core/src/interactive_transactions/mod.rs +++ b/query-engine/core/src/interactive_transactions/mod.rs @@ -100,7 +100,7 @@ impl CachedTx { if let Self::Open(ref mut otx) = self { Ok(otx) } else { - let reason = format!("Transaction is no longer valid. Last state: '{}'", self); + let reason = format!("Transaction is no longer valid. Last state: '{self}'"); Err(CoreError::from(TransactionError::Closed { reason })) } } diff --git a/query-engine/core/src/interpreter/error.rs b/query-engine/core/src/interpreter/error.rs index 659f43d41c0f..c6485d506b84 100644 --- a/query-engine/core/src/interpreter/error.rs +++ b/query-engine/core/src/interpreter/error.rs @@ -28,15 +28,15 @@ pub enum InterpreterError { impl fmt::Display for InterpreterError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - Self::QueryGraphBuilderError(e) => write!(f, "{:?}", e), - _ => write!(f, "Error occurred during query execution:\n{:?}", self), + Self::QueryGraphBuilderError(e) => write!(f, "{e:?}"), + _ => write!(f, "Error occurred during query execution:\n{self:?}"), } } } impl From for InterpreterError { fn from(e: CoreError) -> Self { - InterpreterError::Generic(format!("{:?}", e)) + InterpreterError::Generic(format!("{e:?}")) } } diff --git a/query-engine/core/src/interpreter/expressionista.rs b/query-engine/core/src/interpreter/expressionista.rs index 441c322da4cb..e2e667daafd7 100644 --- a/query-engine/core/src/interpreter/expressionista.rs +++ b/query-engine/core/src/interpreter/expressionista.rs @@ -348,8 +348,7 @@ impl Expressionista { let binding = match env.get(&parent_binding_name) { Some(binding) => Ok(binding), None => Err(InterpreterError::EnvVarNotFound(format!( - "Expected parent binding '{}' to be present.", - parent_binding_name + "Expected parent binding '{parent_binding_name}' to be present." ))), }?; @@ -365,7 +364,7 @@ impl Expressionista { res.map_err(|err| { InterpreterError::InterpretationError( - format!("Error for binding '{}'", parent_binding_name), + format!("Error for binding '{parent_binding_name}'"), Some(Box::new(err)), ) }) diff --git a/query-engine/core/src/interpreter/interpreter.rs b/query-engine/core/src/interpreter/interpreter.rs index f8cee91b6e35..21b6c1f163a5 100644 --- a/query-engine/core/src/interpreter/interpreter.rs +++ b/query-engine/core/src/interpreter/interpreter.rs @@ -228,7 +228,7 @@ impl<'conn> QueryInterpreter<'conn> { Expression::Query { query } => Box::pin(async move { match *query { Query::Read(read) => { - self.log_line(level, || format!("READ {}", read)); + self.log_line(level, || format!("READ {read}")); let span = info_span!("prisma:engine:read-execute"); Ok(read::execute(self.conn, read, None, trace_id) .instrument(span) @@ -237,7 +237,7 @@ impl<'conn> QueryInterpreter<'conn> { } Query::Write(write) => { - self.log_line(level, || format!("WRITE {}", write)); + self.log_line(level, || format!("WRITE {write}")); let span = info_span!("prisma:engine:write-execute"); Ok(write::execute(self.conn, write, trace_id) .instrument(span) @@ -248,12 +248,12 @@ impl<'conn> QueryInterpreter<'conn> { }), Expression::Get { binding_name } => Box::pin(async move { - self.log_line(level, || format!("GET {}", binding_name)); + self.log_line(level, || format!("GET {binding_name}")); env.clone().remove(&binding_name) }), Expression::GetFirstNonEmpty { binding_names } => Box::pin(async move { - self.log_line(level, || format!("GET FIRST NON EMPTY {:?}", binding_names)); + self.log_line(level, || format!("GET FIRST NON EMPTY {binding_names:?}")); Ok(binding_names .into_iter() diff --git a/query-engine/core/src/query_ast/mod.rs b/query-engine/core/src/query_ast/mod.rs index c981960f70b2..871619660d59 100644 --- a/query-engine/core/src/query_ast/mod.rs +++ b/query-engine/core/src/query_ast/mod.rs @@ -95,8 +95,8 @@ pub trait FilteredNestedMutation { impl std::fmt::Display for Query { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { - Self::Read(q) => write!(f, "{}", q), - Self::Write(q) => write!(f, "{}", q), + Self::Read(q) => write!(f, "{q}"), + Self::Write(q) => write!(f, "{q}"), } } } diff --git a/query-engine/core/src/query_document/error.rs b/query-engine/core/src/query_document/error.rs index 5b6fb41ef80a..71ed810c41a1 100644 --- a/query-engine/core/src/query_document/error.rs +++ b/query-engine/core/src/query_document/error.rs @@ -71,25 +71,25 @@ pub enum QueryParserErrorKind { impl Display for QueryParserErrorKind { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - Self::AssertionError(reason) => write!(f, "Assertion error: {}.", reason), + Self::AssertionError(reason) => write!(f, "Assertion error: {reason}."), Self::RequiredValueNotSetError => write!(f, "A value is required but not set."), Self::FieldNotFoundError => write!(f, "Field does not exist on enclosing type."), Self::ArgumentNotFoundError => write!(f, "Argument does not exist on enclosing type."), - Self::FieldCountError(err) => write!(f, "{}", err), - Self::ValueParseError(reason) => write!(f, "Error parsing value: {}.", reason), + Self::FieldCountError(err) => write!(f, "{err}"), + Self::ValueParseError(reason) => write!(f, "Error parsing value: {reason}."), Self::InputUnionParseError { parsing_errors } => write!( f, "Unable to match input value to any allowed input type for the field. Parse errors: [{}]", parsing_errors .iter() - .map(|err| format!("{}", err)) + .map(|err| format!("{err}")) .collect::>() .join(", ") ), Self::ValueTypeMismatchError { have, want } => { - write!(f, "Value types mismatch. Have: {:?}, want: {:?}", have, want) + write!(f, "Value types mismatch. Have: {have:?}, want: {want:?}") } - Self::ValueFitError(s) => write!(f, "{}", s), + Self::ValueFitError(s) => write!(f, "{s}"), } } } @@ -169,7 +169,7 @@ impl From for QueryParserError { fn from(err: prisma_models::DomainError) -> Self { QueryParserError { path: QueryPath::default(), - error_kind: QueryParserErrorKind::AssertionError(format!("Domain error occurred: {}", err)), + error_kind: QueryParserErrorKind::AssertionError(format!("Domain error occurred: {err}")), } } } diff --git a/query-engine/core/src/query_document/parser.rs b/query-engine/core/src/query_document/parser.rs index b127b510f511..f51fb2b71868 100644 --- a/query-engine/core/src/query_document/parser.rs +++ b/query-engine/core/src/query_document/parser.rs @@ -294,8 +294,7 @@ impl QueryDocumentParser { prisma_value::parse_datetime(s).map_err(|err| QueryParserError { path: path.clone(), error_kind: QueryParserErrorKind::ValueParseError(format!( - "Invalid DateTime: '{}' (must be ISO 8601 compatible). Underlying error: {}", - s, err + "Invalid DateTime: '{s}' (must be ISO 8601 compatible). Underlying error: {err}" )), }) } @@ -306,8 +305,7 @@ impl QueryDocumentParser { .map_err(|_| QueryParserError { path: path.clone(), error_kind: QueryParserErrorKind::ValueParseError(format!( - "'{}' is not a valid base64 encoded string.", - s + "'{s}' is not a valid base64 encoded string." )), }) } @@ -317,14 +315,14 @@ impl QueryDocumentParser { .map(PrismaValue::Float) .map_err(|_| QueryParserError { path: path.clone(), - error_kind: QueryParserErrorKind::ValueParseError(format!("'{}' is not a valid decimal string", s)), + error_kind: QueryParserErrorKind::ValueParseError(format!("'{s}' is not a valid decimal string")), }) } fn parse_bigint(&self, path: &QueryPath, s: String) -> QueryParserResult { s.parse::().map(PrismaValue::BigInt).map_err(|_| QueryParserError { path: path.clone(), - error_kind: QueryParserErrorKind::ValueParseError(format!("'{}' is not a valid big integer string", s)), + error_kind: QueryParserErrorKind::ValueParseError(format!("'{s}' is not a valid big integer string")), }) } @@ -354,14 +352,14 @@ impl QueryDocumentParser { fn parse_json(&self, path: &QueryPath, s: &str) -> QueryParserResult { serde_json::from_str(s).map_err(|err| QueryParserError { path: path.clone(), - error_kind: QueryParserErrorKind::ValueParseError(format!("Invalid json: {}", err)), + error_kind: QueryParserErrorKind::ValueParseError(format!("Invalid json: {err}")), }) } fn parse_uuid(&self, path: &QueryPath, s: &str) -> QueryParserResult { Uuid::parse_str(s).map_err(|err| QueryParserError { path: path.clone(), - error_kind: QueryParserErrorKind::ValueParseError(format!("Invalid UUID: {}", err)), + error_kind: QueryParserErrorKind::ValueParseError(format!("Invalid UUID: {err}")), }) } @@ -398,8 +396,7 @@ impl QueryDocumentParser { Err(QueryParserError { path: path.clone(), error_kind: QueryParserErrorKind::ValueParseError(format!( - "Enum value '{}' is invalid for enum type {}", - raw, name + "Enum value '{raw}' is invalid for enum type {name}" )), }) }; diff --git a/query-engine/core/src/query_document/transformers.rs b/query-engine/core/src/query_document/transformers.rs index 6117a2460ca2..77aee25ccb97 100644 --- a/query-engine/core/src/query_document/transformers.rs +++ b/query-engine/core/src/query_document/transformers.rs @@ -31,8 +31,7 @@ impl TryFrom for PrismaValue { v => Err(QueryParserError { path: QueryPath::default(), error_kind: QueryParserErrorKind::AssertionError(format!( - "Attempted conversion of ParsedInputValue ({:?}) into PrismaValue failed.", - v + "Attempted conversion of ParsedInputValue ({v:?}) into PrismaValue failed." )), }), } @@ -48,8 +47,7 @@ impl TryFrom for ParsedInputMap { v => Err(QueryParserError { path: QueryPath::default(), error_kind: QueryParserErrorKind::AssertionError(format!( - "Attempted conversion of non-map ParsedInputValue ({:?}) into map failed.", - v + "Attempted conversion of non-map ParsedInputValue ({v:?}) into map failed." )), }), } @@ -66,8 +64,7 @@ impl TryFrom for Option { v => Err(QueryParserError { path: QueryPath::default(), error_kind: QueryParserErrorKind::AssertionError(format!( - "Attempted conversion of non-map ParsedInputValue ({:?}) into Option map failed.", - v + "Attempted conversion of non-map ParsedInputValue ({v:?}) into Option map failed." )), }), } @@ -83,8 +80,7 @@ impl TryFrom for ParsedInputList { v => Err(QueryParserError { path: QueryPath::default(), error_kind: QueryParserErrorKind::AssertionError(format!( - "Attempted conversion of non-list ParsedInputValue ({:?}) into list failed.", - v + "Attempted conversion of non-list ParsedInputValue ({v:?}) into list failed." )), }), } @@ -103,8 +99,7 @@ impl TryFrom for Vec { v => Err(QueryParserError { path: QueryPath::default(), error_kind: QueryParserErrorKind::AssertionError(format!( - "Attempted conversion of non-list ParsedInputValue ({:?}) into prisma value list failed.", - v + "Attempted conversion of non-list ParsedInputValue ({v:?}) into prisma value list failed." )), }), } @@ -124,8 +119,7 @@ impl TryFrom for Option { v => Err(QueryParserError { path: QueryPath::default(), error_kind: QueryParserErrorKind::AssertionError(format!( - "Attempted conversion of non-String Prisma value type ({:?}) into String failed.", - v + "Attempted conversion of non-String Prisma value type ({v:?}) into String failed." )), }), } @@ -141,8 +135,7 @@ impl TryFrom for OrderBy { v => Err(QueryParserError { path: QueryPath::default(), error_kind: QueryParserErrorKind::AssertionError(format!( - "Attempted conversion of non-order-by enum ({:?}) into order by enum value failed.", - v + "Attempted conversion of non-order-by enum ({v:?}) into order by enum value failed." )), }), } @@ -158,8 +151,7 @@ impl TryFrom for ScalarFieldRef { v => Err(QueryParserError { path: QueryPath::default(), error_kind: QueryParserErrorKind::AssertionError(format!( - "Attempted conversion of non-field-ref enum ({:?}) into scalar field reference value failed.", - v + "Attempted conversion of non-field-ref enum ({v:?}) into scalar field reference value failed." )), }), } @@ -178,8 +170,7 @@ impl TryFrom for Option { v => Err(QueryParserError { path: QueryPath::default(), error_kind: QueryParserErrorKind::AssertionError(format!( - "Attempted conversion of non-float Prisma value type ({:?}) into float failed.", - v + "Attempted conversion of non-float Prisma value type ({v:?}) into float failed." )), }), } @@ -198,8 +189,7 @@ impl TryFrom for Option { v => Err(QueryParserError { path: QueryPath::default(), error_kind: QueryParserErrorKind::AssertionError(format!( - "Attempted conversion of non-bool Prisma value type ({:?}) into bool failed.", - v + "Attempted conversion of non-bool Prisma value type ({v:?}) into bool failed." )), }), } @@ -218,8 +208,7 @@ impl TryFrom for Option> { v => Err(QueryParserError { path: QueryPath::default(), error_kind: QueryParserErrorKind::AssertionError(format!( - "Attempted conversion of non-DateTime Prisma value type ({:?}) into DateTime failed.", - v + "Attempted conversion of non-DateTime Prisma value type ({v:?}) into DateTime failed." )), }), } @@ -238,8 +227,7 @@ impl TryFrom for Option { v => Err(QueryParserError { path: QueryPath::default(), error_kind: QueryParserErrorKind::AssertionError(format!( - "Attempted conversion of non-int Prisma value type ({:?}) into int failed.", - v + "Attempted conversion of non-int Prisma value type ({v:?}) into int failed." )), }), } @@ -257,8 +245,7 @@ impl TryFrom for bool { v => Err(QueryParserError { path: QueryPath::default(), error_kind: QueryParserErrorKind::AssertionError(format!( - "Attempted conversion of non-boolean Prisma value type ({:?}) into bool failed.", - v + "Attempted conversion of non-boolean Prisma value type ({v:?}) into bool failed." )), }), } diff --git a/query-engine/core/src/query_graph/formatters.rs b/query-engine/core/src/query_graph/formatters.rs index a102949e55b8..d984bedee979 100644 --- a/query-engine/core/src/query_graph/formatters.rs +++ b/query-engine/core/src/query_graph/formatters.rs @@ -71,9 +71,9 @@ impl Display for Computation { impl Display for Node { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - Self::Query(q) => write!(f, "{}", q), - Self::Flow(flow) => write!(f, "{}", flow), - Self::Computation(c) => write!(f, "{}", c), + Self::Query(q) => write!(f, "{q}"), + Self::Flow(flow) => write!(f, "{flow}"), + Self::Computation(c) => write!(f, "{c}"), Self::Empty => write!(f, "Empty"), } } @@ -83,8 +83,8 @@ impl ToGraphviz for Node { fn to_graphviz(&self) -> String { match self { Node::Query(q) => q.to_graphviz(), - Node::Flow(f) => format!("{}", f), - Node::Computation(c) => format!("{}", c), + Node::Flow(f) => format!("{f}"), + Node::Computation(c) => format!("{c}"), Node::Empty => "Empty".to_string(), } } @@ -130,7 +130,7 @@ fn fmt_raw_indices(i: &[NodeIndex]) -> String { } fn fmt_node_list(v: &[NodeRef]) -> String { - let inner_string = v.iter().map(|x| format!("{}", x)).collect::>().join(", "); + let inner_string = v.iter().map(|x| format!("{x}")).collect::>().join(", "); format!("[{}]", inner_string.as_str()) } diff --git a/query-engine/core/src/query_graph/transformers.rs b/query-engine/core/src/query_graph/transformers.rs index 8d761eeb10f8..7924f8f25794 100644 --- a/query-engine/core/src/query_graph/transformers.rs +++ b/query-engine/core/src/query_graph/transformers.rs @@ -8,7 +8,7 @@ impl TryFrom for Query { match n { Node::Query(q) => Ok(q), x => Err(QueryGraphError::InvalidNodeTransformation { - from: format!("{}", x), + from: format!("{x}"), to: "Query".to_owned(), }), } @@ -22,7 +22,7 @@ impl TryFrom for Flow { match n { Node::Flow(f) => Ok(f), x => Err(QueryGraphError::InvalidNodeTransformation { - from: format!("{}", x), + from: format!("{x}"), to: "Flow".to_owned(), }), } diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/composite.rs b/query-engine/core/src/query_graph_builder/extractors/filters/composite.rs index e25e3b8bb22a..7952caa3e669 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/composite.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/composite.rs @@ -34,8 +34,7 @@ pub fn parse(input_map: ParsedInputMap, field: &CompositeFieldRef, _reverse: boo (filters::IS_NOT, input) => Ok(field.is_not(extract_filter(input.try_into()?, &field.typ)?)), _ => Err(QueryGraphBuilderError::InputError(format!( - "Invalid filter key `{}` input combination for composite filter", - filter_key + "Invalid filter key `{filter_key}` input combination for composite filter" ))), } } else { diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/filter_fold.rs b/query-engine/core/src/query_graph_builder/extractors/filters/filter_fold.rs index 6dd32dcc134a..46ef17314c3a 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/filter_fold.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/filter_fold.rs @@ -178,7 +178,7 @@ impl FilterVisitor { } } - fn visit_and(&self, filters: &Vec) -> bool { + fn visit_and(&self, filters: &[Filter]) -> bool { let mut res = true; for (index, f) in filters.iter().enumerate() { @@ -214,7 +214,7 @@ impl FilterVisitor { res } - fn visit_or(&self, filters: &Vec) -> bool { + fn visit_or(&self, filters: &[Filter]) -> bool { let mut res = true; for (index, f) in filters.iter().enumerate() { @@ -250,7 +250,7 @@ impl FilterVisitor { res } - fn visit_not(&self, filters: &Vec) -> bool { + fn visit_not(&self, filters: &[Filter]) -> bool { let mut res = true; for (index, f) in filters.iter().enumerate() { diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/filter_grouping.rs b/query-engine/core/src/query_graph_builder/extractors/filters/filter_grouping.rs index 04144479eb32..97ce40fe8298 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/filter_grouping.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/filter_grouping.rs @@ -18,8 +18,7 @@ impl FromStr for FilterGrouping { filters::OR_LOWERCASE => Ok(Self::Or), filters::NOT_LOWERCASE => Ok(Self::Not), _ => Err(QueryGraphBuilderError::InputError(format!( - "{} is not a valid grouping filter operation", - s + "{s} is not a valid grouping filter operation" ))), } } diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/mod.rs b/query-engine/core/src/query_graph_builder/extractors/filters/mod.rs index 8e95fc8d5454..125c2593b4fc 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/mod.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/mod.rs @@ -291,8 +291,7 @@ fn extract_scalar_filters(field: &ScalarFieldRef, value: ParsedInputValue) -> Qu Ok(filters) } x => Err(QueryGraphBuilderError::InputError(format!( - "Invalid scalar filter input: {:?}", - x + "Invalid scalar filter input: {x:?}" ))), } } @@ -316,8 +315,7 @@ fn extract_relation_filters(field: &RelationFieldRef, value: ParsedInputValue) - } x => Err(QueryGraphBuilderError::InputError(format!( - "Invalid relation filter input: {:?}", - x + "Invalid relation filter input: {x:?}" ))), } } @@ -348,8 +346,7 @@ fn extract_composite_filters( ParsedInputValue::List(_) => Ok(vec![field.equals(PrismaValue::List(value.try_into()?))]), ParsedInputValue::Map(filter_map) => Ok(vec![composite::parse(filter_map, field, false)?]), x => Err(QueryGraphBuilderError::InputError(format!( - "Invalid composite filter input: {:?}", - x + "Invalid composite filter input: {x:?}" ))), } } diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/relation.rs b/query-engine/core/src/query_graph_builder/extractors/filters/relation.rs index 2b469b2f25ee..67e64bda3bcd 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/relation.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/relation.rs @@ -21,8 +21,7 @@ pub fn parse(filter_key: &str, field: &RelationFieldRef, input: ParsedInputValue (filters::IS_NOT, None) => Ok(Filter::not(vec![field.one_relation_is_null()])), _ => Err(QueryGraphBuilderError::InputError(format!( - "Invalid filter key `{}` input combination for relation filter", - filter_key + "Invalid filter key `{filter_key}` input combination for relation filter" ))), } } diff --git a/query-engine/core/src/query_graph_builder/extractors/filters/scalar.rs b/query-engine/core/src/query_graph_builder/extractors/filters/scalar.rs index 8b770e7362d0..5b95b229acd1 100644 --- a/query-engine/core/src/query_graph_builder/extractors/filters/scalar.rs +++ b/query-engine/core/src/query_graph_builder/extractors/filters/scalar.rs @@ -187,8 +187,7 @@ impl<'a> ScalarFilterParser<'a> { _ => { return Err(QueryGraphBuilderError::InputError(format!( - "{} is not a valid scalar filter operation", - filter_name + "{filter_name} is not a valid scalar filter operation" ))) } } @@ -399,8 +398,7 @@ impl<'a> ScalarFilterParser<'a> { _ => { return Err(QueryGraphBuilderError::InputError(format!( - "{} is not a valid scalar filter operation", - filter_name + "{filter_name} is not a valid scalar filter operation" ))) } } @@ -450,12 +448,10 @@ impl<'a> ScalarFilterParser<'a> { if field_ref.is_list() { "[]" } else { "" }, ))), Some(Field::Relation(field_ref)) => Err(QueryGraphBuilderError::InputError(format!( - "Expected a referenced scalar field {} but found a relation field.", - field_ref + "Expected a referenced scalar field {field_ref} but found a relation field." ))), Some(Field::Composite(field_ref)) => Err(QueryGraphBuilderError::InputError(format!( - "Expected a referenced scalar field {} but found a composite field.", - field_ref + "Expected a referenced scalar field {field_ref} but found a composite field." ))), None => Err(QueryGraphBuilderError::InputError(format!( "The referenced scalar field {}.{} does not exist.", @@ -492,12 +488,10 @@ impl<'a> ScalarFilterParser<'a> { if field_ref.is_list() { "[]" } else { "" }, ))), Some(Field::Relation(rf)) => Err(QueryGraphBuilderError::InputError(format!( - "Expected a referenced scalar list field {} but found a relation field.", - rf + "Expected a referenced scalar list field {rf} but found a relation field." ))), Some(Field::Composite(cf)) => Err(QueryGraphBuilderError::InputError(format!( - "Expected a referenced scalar list field {} but found a composite field.", - cf + "Expected a referenced scalar list field {cf} but found a composite field." ))), _ => Err(QueryGraphBuilderError::InputError(format!( "The referenced scalar list field {}.{} does not exist.", diff --git a/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs b/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs index 55908e26d3ce..63551229351a 100644 --- a/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs +++ b/query-engine/core/src/query_graph_builder/extractors/query_arguments.rs @@ -180,8 +180,7 @@ fn extract_order_by_relevance( PrismaValue::Enum(e) => Ok(vec![PrismaValue::String(e)]), PrismaValue::List(l) => Ok(l), x => Err(QueryGraphBuilderError::InputError(format!( - "Expected field `fields` to be of type String, Enum or List, found: {:?}", - x + "Expected field `fields` to be of type String, Enum or List, found: {x:?}" ))), }?; @@ -191,8 +190,7 @@ fn extract_order_by_relevance( .map(|field_name| match container.find_field(&field_name) { Some(Field::Scalar(sf)) => Ok(sf), _ => Err(QueryGraphBuilderError::InputError(format!( - "Invalid order-by reference input: Field {} is not a valid scalar field.", - field_name + "Invalid order-by reference input: Field {field_name} is not a valid scalar field." ))), }) .collect::, _>>()?; @@ -270,8 +268,7 @@ fn extract_skip(value: ParsedInputValue) -> QueryGraphBuilderResult> match val { Some(val) if val < 0 => Err(QueryGraphBuilderError::AssertionError(format!( - "Invalid value for skip argument: Value can only be positive, found: {}", - val, + "Invalid value for skip argument: Value can only be positive, found: {val}", ))), val => Ok(val), diff --git a/query-engine/core/src/query_graph_builder/read/utils.rs b/query-engine/core/src/query_graph_builder/read/utils.rs index 308919af6308..b817a8581489 100644 --- a/query-engine/core/src/query_graph_builder/read/utils.rs +++ b/query-engine/core/src/query_graph_builder/read/utils.rs @@ -158,7 +158,7 @@ pub fn collect_relation_aggr_selections( selections.push(RelAggregationSelection::Count(rf, filter)); } } - field_name => panic!("Unknown field name \"{}\" for a relation aggregation", field_name), + field_name => panic!("Unknown field name \"{field_name}\" for a relation aggregation"), } } diff --git a/query-engine/core/src/query_graph_builder/write/connect.rs b/query-engine/core/src/query_graph_builder/write/connect.rs index 3a0ab7a83f79..f102bc0762d7 100644 --- a/query-engine/core/src/query_graph_builder/write/connect.rs +++ b/query-engine/core/src/query_graph_builder/write/connect.rs @@ -90,8 +90,7 @@ pub fn connect_records_node( if len != expected_connects { return Err(QueryGraphBuilderError::RecordNotFound(format!( - "Expected {} records to be connected, found only {}.", - expected_connects, len, + "Expected {expected_connects} records to be connected, found only {len}.", ))); } diff --git a/query-engine/core/src/query_graph_builder/write/disconnect.rs b/query-engine/core/src/query_graph_builder/write/disconnect.rs index c81184d70f63..ebb64bd1a9ed 100644 --- a/query-engine/core/src/query_graph_builder/write/disconnect.rs +++ b/query-engine/core/src/query_graph_builder/write/disconnect.rs @@ -63,8 +63,7 @@ pub fn disconnect_records_node( let parent_id = match parent_ids.pop() { Some(pid) => Ok(pid), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No parent record was found for a nested disconnect on relation '{}'.", - relation_name + "No parent record was found for a nested disconnect on relation '{relation_name}'." ))), }?; diff --git a/query-engine/core/src/query_graph_builder/write/nested/connect_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/connect_nested.rs index 9779ccaf3a1e..aa7986ba5d95 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/connect_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/connect_nested.rs @@ -180,8 +180,7 @@ fn handle_one_to_many( let child_link = match child_links.pop() { Some(cl) => Ok(cl), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record(s) (needed to inline the relation on '{}' record(s)) was found for a nested connect on one-to-many relation '{}'.", - child_model_name, parent_model_name, relation_name + "No '{child_model_name}' record(s) (needed to inline the relation on '{parent_model_name}' record(s)) was found for a nested connect on one-to-many relation '{relation_name}'." ))), }?; @@ -210,8 +209,7 @@ fn handle_one_to_many( let parent_link = match parent_links.pop() { Some(pl) => Ok(pl), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record(s) (needed to inline the relation on '{}' record(s)) was found for a nested connect on one-to-many relation '{}'.", - parent_model_name, child_model_name, relation_name + "No '{parent_model_name}' record(s) (needed to inline the relation on '{child_model_name}' record(s)) was found for a nested connect on one-to-many relation '{relation_name}'." ))), }?; @@ -236,8 +234,7 @@ fn handle_one_to_many( if let QueryResult::Count(c) = query_result { if c != &expected_id_count { return Err(QueryGraphBuilderError::RecordNotFound(format!( - "Expected {} records to be connected after connect operation on one-to-many relation '{}', found {}.", - expected_id_count, relation_name, c, + "Expected {expected_id_count} records to be connected after connect operation on one-to-many relation '{relation_name}', found {c}.", ))); } } @@ -431,8 +428,7 @@ fn handle_one_to_one_parent_update( let child_id = match child_ids.pop() { Some(pid) => Ok(pid), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record to connect was found was found for a nested connect on one-to-one relation '{}'.", - child_model_name, relation_name + "No '{child_model_name}' record to connect was found was found for a nested connect on one-to-one relation '{relation_name}'." ))), }?; @@ -462,10 +458,7 @@ fn handle_one_to_one_parent_update( let parent_link = match parent_links.pop() { Some(link) => Ok(link), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to update inlined relation on '{}') was found for a nested connect on one-to-one relation '{}'.", - parent_model_name, - child_model_name, - relation_name + "No '{parent_model_name}' record (needed to update inlined relation on '{child_model_name}') was found for a nested connect on one-to-one relation '{relation_name}'." ))), }?; @@ -493,10 +486,7 @@ fn handle_one_to_one_parent_update( let child_link = match child_links.pop() { Some(link) => Ok(link), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to update inlined relation on '{}') was found for a nested connect on one-to-one relation '{}'.", - parent_model_name, - child_model_name, - relation_name + "No '{parent_model_name}' record (needed to update inlined relation on '{child_model_name}') was found for a nested connect on one-to-one relation '{relation_name}'." ))), }?; @@ -526,10 +516,7 @@ fn handle_one_to_one_parent_update( let parent_id = match parent_ids.pop() { Some(pid) => Ok(pid), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to update inlined relation on '{}') was found for a nested connect on relation '{}'.", - parent_model_name, - child_model_name, - relation_name + "No '{parent_model_name}' record (needed to update inlined relation on '{child_model_name}') was found for a nested connect on relation '{relation_name}'." ))), }?; @@ -587,8 +574,7 @@ fn handle_one_to_one_parent_create( let child_link = match child_links.pop() { Some(link) => Ok(link), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to inline connect on create for '{}' record) was found for a nested connect on one-to-one relation '{}'.", - child_model_name, parent_model_name, relation_name + "No '{child_model_name}' record (needed to inline connect on create for '{parent_model_name}' record) was found for a nested connect on one-to-one relation '{relation_name}'." ))), }?; @@ -623,8 +609,7 @@ fn handle_one_to_one_parent_create( let child_id = match child_ids.pop() { Some(pid) => Ok(pid), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record to connect was found was found for a nested connect on one-to-one relation '{}'.", - child_model_name, relation_name + "No '{child_model_name}' record to connect was found was found for a nested connect on one-to-one relation '{relation_name}'." ))), }?; @@ -648,10 +633,7 @@ fn handle_one_to_one_parent_create( let parent_link = match parent_links.pop() { Some(link) => Ok(link), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to update inlined relation on '{}') was found for a nested connect on one-to-one relation '{}'.", - parent_model_name, - child_model_name, - relation_name + "No '{parent_model_name}' record (needed to update inlined relation on '{child_model_name}') was found for a nested connect on one-to-one relation '{relation_name}'." ))), }?; diff --git a/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs index 85d9f043896c..52ec3c58f9b6 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/connect_or_create_nested.rs @@ -308,8 +308,7 @@ fn one_to_many_inlined_child( let parent_id = match parent_ids.pop() { Some(id) => Ok(id), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to inline the relation with a create on '{}' record(s)) was found for a nested connect or create on one-to-many relation '{}'.", - child_model_name, parent_model_name, relation_name + "No '{child_model_name}' record (needed to inline the relation with a create on '{parent_model_name}' record(s)) was found for a nested connect or create on one-to-many relation '{relation_name}'." ))), }?; @@ -336,8 +335,7 @@ fn one_to_many_inlined_child( let parent_id = match parent_ids.pop() { Some(id) => Ok(id), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to inline the relation the update for '{}' record(s)) was found for a nested connect or create on one-to-many relation '{}'.", - child_model_name, parent_model_name, relation_name + "No '{child_model_name}' record (needed to inline the relation the update for '{parent_model_name}' record(s)) was found for a nested connect or create on one-to-many relation '{relation_name}'." ))), }?; @@ -677,8 +675,7 @@ fn one_to_one_inlined_parent( let parent_id = match parent_ids.pop() { Some(id) => Ok(id), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to inline the relation with an update on '{}' record(s)) was found for a nested connect or create on one-to-one relation '{}'.", - child_model_name, parent_model_name, relation_name + "No '{child_model_name}' record (needed to inline the relation with an update on '{parent_model_name}' record(s)) was found for a nested connect or create on one-to-one relation '{relation_name}'." ))), }?; @@ -701,8 +698,7 @@ fn one_to_one_inlined_parent( let child_result = match child_results.pop() { Some(p) => Ok(p), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to inline the relation with an update on '{}' record(s)) was found for a nested connect or create on one-to-one relation '{}'.", - child_model_name, parent_model_name, relation_name + "No '{child_model_name}' record (needed to inline the relation with an update on '{parent_model_name}' record(s)) was found for a nested connect or create on one-to-one relation '{relation_name}'." ))), }?; @@ -837,8 +833,7 @@ fn one_to_one_inlined_child( let parent_link = match parent_links.pop() { Some(link) => Ok(link), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to find '{}' record(s) to update) was found for a nested connect or create on one-to-one relation '{}'.", - parent_model_name, child_model_name, relation_name + "No '{parent_model_name}' record (needed to find '{child_model_name}' record(s) to update) was found for a nested connect or create on one-to-one relation '{relation_name}'." ))), }?; @@ -863,8 +858,7 @@ fn one_to_one_inlined_child( let parent_link = match parent_links.pop() { Some(link) => Ok(link), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to inline relation with create on '{}' record(s)) was found for a nested connect or create on one-to-one relation '{}'.", - parent_model_name, child_model_name, relation_name + "No '{parent_model_name}' record (needed to inline relation with create on '{child_model_name}' record(s)) was found for a nested connect or create on one-to-one relation '{relation_name}'." ))), }?; @@ -889,8 +883,7 @@ fn one_to_one_inlined_child( let old_child_id = match new_child_ids.pop() { Some(id) => Ok(id), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to find '{}' record(s) to update) was found for a nested connect or create on one-to-one relation '{}'.", - parent_model_name, child_model_name, relation_name + "No '{parent_model_name}' record (needed to find '{child_model_name}' record(s) to update) was found for a nested connect or create on one-to-one relation '{relation_name}'." ))), }?; @@ -990,8 +983,7 @@ fn one_to_one_inlined_child( let old_child_id = match old_child_ids.pop() { Some(id) => Ok(id), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to find '{}' record(s) to update) was found for a nested connect or create on one-to-one relation '{}'.", - parent_model_name, child_model_name, relation_name + "No '{parent_model_name}' record (needed to find '{child_model_name}' record(s) to update) was found for a nested connect or create on one-to-one relation '{relation_name}'." ))), }?; diff --git a/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs index 0167acda6c0e..88256ccc95b9 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs @@ -156,8 +156,7 @@ fn handle_one_to_many( let child_link = match child_links.pop() { Some(link) => Ok(link), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to inline the relation on '{}' record) was found for a nested create on one-to-many relation '{}'.", - child_model_name, parent_model_name, relation_name + "No '{child_model_name}' record (needed to inline the relation on '{parent_model_name}' record) was found for a nested create on one-to-many relation '{relation_name}'." ))), }?; @@ -184,8 +183,7 @@ fn handle_one_to_many( let parent_link = match parent_links.pop() { Some(link) => Ok(link), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to inline the relation on '{}' record) was found for a nested create on one-to-many relation '{}'.", - parent_model_name, child_model_name, relation_name + "No '{parent_model_name}' record (needed to inline the relation on '{child_model_name}' record) was found for a nested create on one-to-many relation '{relation_name}'." ))), }?; @@ -348,8 +346,7 @@ fn handle_one_to_one( let link = match links.pop() { Some(link) => Ok(link), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to inline the relation with create on '{}' record) was found for a nested create on one-to-one relation '{}'.", - parent_model_name, child_model_name, relation_name + "No '{parent_model_name}' record (needed to inline the relation with create on '{child_model_name}' record) was found for a nested create on one-to-one relation '{relation_name}'." ))), }?; @@ -380,8 +377,7 @@ fn handle_one_to_one( let child_link = match child_links.pop() { Some(link) => Ok(link), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to inline the relation with an update on '{}' record) was found for a nested create on one-to-one relation '{}'.", - child_model_name, parent_model_name, relation_name + "No '{child_model_name}' record (needed to inline the relation with an update on '{parent_model_name}' record) was found for a nested create on one-to-one relation '{relation_name}'." ))), }?; @@ -474,8 +470,7 @@ pub fn nested_create_many( let parent_link = match parent_links.pop() { Some(p) => Ok(p), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to inline the relation on '{}' record) was found for a nested createMany on relation '{}'.", - parent_model_name, child_model_name, relation_name + "No '{parent_model_name}' record (needed to inline the relation on '{child_model_name}' record) was found for a nested createMany on relation '{relation_name}'." ))), }?; diff --git a/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs index 998b7fa786c1..d956ab60c71a 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/delete_nested.rs @@ -125,8 +125,7 @@ pub fn nested_delete( let child_id = match child_ids.pop() { Some(pid) => Ok(pid), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record was found for a nested delete on relation '{}'.", - child_model_name, relation_name + "No '{child_model_name}' record was found for a nested delete on relation '{relation_name}'." ))), }?; diff --git a/query-engine/core/src/query_graph_builder/write/nested/mod.rs b/query-engine/core/src/query_graph_builder/write/nested/mod.rs index 3ac728728454..f07f3788bcc4 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/mod.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/mod.rs @@ -47,7 +47,7 @@ pub fn connect_nested_query( operations::UPDATE_MANY => nested_update_many(graph, connector_ctx, &parent, &parent_relation_field, value, &child_model)?, operations::DELETE_MANY => nested_delete_many(graph, connector_ctx, &parent, &parent_relation_field, value, &child_model)?, operations::CONNECT_OR_CREATE => nested_connect_or_create(graph, connector_ctx, parent, &parent_relation_field, value, &child_model)?, - _ => panic!("Unhandled nested operation: {}", field_name), + _ => panic!("Unhandled nested operation: {field_name}"), }; } diff --git a/query-engine/core/src/query_graph_builder/write/nested/set_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/set_nested.rs index af9f5d59096e..6d2a00ed4fe0 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/set_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/set_nested.rs @@ -112,8 +112,7 @@ fn handle_many_to_many( let parent_id = match parent_ids.pop() { Some(pid) => Ok(pid), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' records (needed to disconnect existing child records) were found for a nested set on many-to-many relation '{}'.", - parent_model_name, relation_name + "No '{parent_model_name}' records (needed to disconnect existing child records) were found for a nested set on many-to-many relation '{relation_name}'." ))), }?; @@ -292,8 +291,7 @@ fn handle_one_to_many( let parent_link = match parent_links.pop() { Some(link) => Ok(link), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' records were found for a nested set on many-to-many relation '{}'.", - parent_model_name, relation_name + "No '{parent_model_name}' records were found for a nested set on many-to-many relation '{relation_name}'." ))), }?; diff --git a/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs index f75c3bb20b25..ea2ad022500c 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/update_nested.rs @@ -98,8 +98,7 @@ pub fn nested_update( let child_id = match child_ids.pop() { Some(pid) => Ok(pid), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record was found for a nested update on relation '{}'.", - child_model_name, relation_name + "No '{child_model_name}' record was found for a nested update on relation '{relation_name}'." ))), }?; diff --git a/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs index bcaa2e5e53ca..a69bf47607e1 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/upsert_nested.rs @@ -174,8 +174,7 @@ pub fn nested_upsert( let child_id = match child_ids.pop() { Some(id) => Ok(id), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed for nested update `where` on exists) was found for a nested upsert on relation '{}'.", - child_model_name, relation_name + "No '{child_model_name}' record (needed for nested update `where` on exists) was found for a nested upsert on relation '{relation_name}'." ))), }?; @@ -254,8 +253,7 @@ pub fn nested_upsert( let child_link = match child_links.pop() { Some(link) => Ok(link), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to update inlined relation on '{}') was found for a nested upsert on relation '{}'.", - child_model_name, parent_model_name, relation_name + "No '{child_model_name}' record (needed to update inlined relation on '{parent_model_name}') was found for a nested upsert on relation '{relation_name}'." ))), }?; @@ -280,8 +278,7 @@ pub fn nested_upsert( let parent_link = match parent_links.pop() { Some(link) => Ok(link), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No '{}' record (needed to update inlined relation on '{}') was found for a nested upsert on relation '{}'.", - parent_model_name, child_model_name, relation_name + "No '{parent_model_name}' record (needed to update inlined relation on '{child_model_name}') was found for a nested upsert on relation '{relation_name}'." ))), }?; diff --git a/query-engine/core/src/query_graph_builder/write/utils.rs b/query-engine/core/src/query_graph_builder/write/utils.rs index 4320d67f0e26..bf7fa36d3773 100644 --- a/query-engine/core/src/query_graph_builder/write/utils.rs +++ b/query-engine/core/src/query_graph_builder/write/utils.rs @@ -322,8 +322,7 @@ pub fn insert_existing_1to1_related_model_checks( let child_id = match child_ids.pop() { Some(pid) => Ok(pid), None => Err(QueryGraphBuilderError::RecordNotFound(format!( - "No parent record (needed to update the previous parent) was found for a nested connect on relation '{}' .", - relation_name + "No parent record (needed to update the previous parent) was found for a nested connect on relation '{relation_name}' ." ))), }?; @@ -408,7 +407,7 @@ pub fn insert_emulated_on_delete( ReferentialAction::Cascade => { emulate_on_delete_cascade(graph, &rf, connector_ctx, parent_node, child_node)? } - x => panic!("Unsupported referential action emulation: {}", x), + x => panic!("Unsupported referential action emulation: {x}"), } } @@ -690,7 +689,7 @@ pub fn emulate_on_delete_set_null( &dependent_records_node, &set_null_dependents_node, )?, - x => panic!("Unsupported referential action emulation: {}", x), + x => panic!("Unsupported referential action emulation: {x}"), } } @@ -835,7 +834,7 @@ pub fn emulate_on_update_set_null( &dependent_records_node, &set_null_dependents_node, )?, - x => panic!("Unsupported referential action emulation: {}", x), + x => panic!("Unsupported referential action emulation: {x}"), } } @@ -972,7 +971,7 @@ pub fn insert_emulated_on_update_with_intermediary_node( emulate_on_update_set_null(graph, &rf, connector_ctx, &join_node, child_node)? } ReferentialAction::Cascade => emulate_on_update_cascade(graph, &rf, connector_ctx, &join_node, child_node)?, - x => panic!("Unsupported referential action emulation: {}", x), + x => panic!("Unsupported referential action emulation: {x}"), } } @@ -1005,7 +1004,7 @@ pub fn insert_emulated_on_update( ReferentialAction::Cascade => { emulate_on_update_cascade(graph, &rf, connector_ctx, parent_node, child_node)? } - x => panic!("Unsupported referential action emulation: {}", x), + x => panic!("Unsupported referential action emulation: {x}"), } } diff --git a/query-engine/core/src/response_ir/internal.rs b/query-engine/core/src/response_ir/internal.rs index f570530f361d..c26da8e336f1 100644 --- a/query-engine/core/src/response_ir/internal.rs +++ b/query-engine/core/src/response_ir/internal.rs @@ -131,8 +131,8 @@ fn serialize_aggregations( for field in order { let item = flattened - .remove(&format!("{}_{}", query, field)) - .or_else(|| flattened.remove(&format!("_{}_{}", query, field))) + .remove(&format!("{query}_{field}")) + .or_else(|| flattened.remove(&format!("_{query}_{field}"))) .unwrap(); nested_map.insert(field.clone(), item); @@ -142,7 +142,7 @@ fn serialize_aggregations( } else { let item = flattened .remove(&query.clone()) - .or_else(|| flattened.remove(&format!("_{}", query))) + .or_else(|| flattened.remove(&format!("_{query}"))) .unwrap(); inner_map.insert(query.clone(), item); @@ -563,8 +563,8 @@ fn convert_prisma_value(field: &OutputFieldRef, value: PrismaValue, st: &ScalarT (st, pv) => { return Err(crate::FieldConversionError::create( field.name.clone(), - format!("{:?}", st), - format!("{}", pv), + format!("{st:?}"), + format!("{pv}"), )) } }; diff --git a/query-engine/core/src/response_ir/ir_serializer.rs b/query-engine/core/src/response_ir/ir_serializer.rs index cb7b6e5e72bf..ca0b5c4fc831 100644 --- a/query-engine/core/src/response_ir/ir_serializer.rs +++ b/query-engine/core/src/response_ir/ir_serializer.rs @@ -53,7 +53,7 @@ impl IrSerializer { ExpressionResult::Empty => panic!("Internal error: Attempted to serialize empty result."), - _ => panic!("Internal error: Attempted to serialize non-query result {:?}.", result), + _ => panic!("Internal error: Attempted to serialize non-query result {result:?}."), } } } diff --git a/query-engine/dml/src/default_value.rs b/query-engine/dml/src/default_value.rs index ea236cff1a0c..4e9f83d57b69 100644 --- a/query-engine/dml/src/default_value.rs +++ b/query-engine/dml/src/default_value.rs @@ -203,7 +203,7 @@ impl ValueGenerator { pub fn new_nanoid(length: Option) -> Self { if let Some(length) = length { ValueGenerator::new( - format!("nanoid({})", length), + format!("nanoid({length})"), vec![(None, PrismaValue::Int(length.into()))], ) .unwrap() @@ -241,8 +241,8 @@ impl ValueGenerator { Ok(()) } else { Err(format!( - "The function `{}()` cannot be used on fields of type `{}`.", - &self.name, scalar_type + "The function `{}()` cannot be used on fields of type `{scalar_type}`.", + &self.name )) } } @@ -278,7 +278,7 @@ impl ValueGeneratorFn { "dbgenerated" => Ok(Self::DbGenerated), "auto" => Ok(Self::Auto), name if name.starts_with("nanoid(") => Ok(Self::Nanoid(name[7..name.len() - 1].parse::().ok())), - _ => Err(format!("The function {} is not a known function.", name)), + _ => Err(format!("The function {name} is not a known function.")), } } @@ -340,7 +340,7 @@ impl PartialEq for ValueGenerator { impl fmt::Debug for DefaultValue { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self.kind { - DefaultKind::Single(ref v) => write!(f, "DefaultValue::Single({:?})", v), + DefaultKind::Single(ref v) => write!(f, "DefaultValue::Single({v:?})"), DefaultKind::Expression(g) => write!(f, "DefaultValue::Expression({}(){:?})", g.name(), g.args), } } diff --git a/query-engine/dml/src/model.rs b/query-engine/dml/src/model.rs index b305febd81b8..017b3e0029f2 100644 --- a/query-engine/dml/src/model.rs +++ b/query-engine/dml/src/model.rs @@ -435,7 +435,7 @@ impl Model { let model_name = &self.name.clone(); self.scalar_fields_mut() .find(|rf| rf.name == *name) - .unwrap_or_else(|| panic!("Could not find scalar field {} on model {}.", name, model_name)) + .unwrap_or_else(|| panic!("Could not find scalar field {name} on model {model_name}.")) } /// Finds a relation field by name and returns a mutable reference. @@ -444,7 +444,7 @@ impl Model { let model_name = &self.name.clone(); self.relation_fields_mut() .find(|rf| rf.name == *name) - .unwrap_or_else(|| panic!("Could not find relation field {} on model {}.", name, model_name)) + .unwrap_or_else(|| panic!("Could not find relation field {name} on model {model_name}.")) } /// This should match the logic in `prisma_models::Model::primary_identifier`. diff --git a/query-engine/dml/src/scalars.rs b/query-engine/dml/src/scalars.rs index 62542f77d5ca..e5d5903ec780 100644 --- a/query-engine/dml/src/scalars.rs +++ b/query-engine/dml/src/scalars.rs @@ -51,7 +51,7 @@ impl FromStr for ScalarType { "Json" => Ok(ScalarType::Json), "Bytes" => Ok(ScalarType::Bytes), "Decimal" => Ok(ScalarType::Decimal), - _ => Err(format!("type {} is not a known scalar type.", s)), + _ => Err(format!("type {s} is not a known scalar type.")), } } } diff --git a/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs b/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs index 332277233ad6..eb3482f3cd15 100644 --- a/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs +++ b/query-engine/dmmf/src/ast_builders/datamodel_ast_builder.rs @@ -314,15 +314,15 @@ mod tests { ]; for test_case in test_cases { - println!("TESTING: {}", test_case); + println!("TESTING: {test_case}"); - let datamodel_string = load_from_file(format!("{}.prisma", test_case).as_str()); + let datamodel_string = load_from_file(format!("{test_case}.prisma").as_str()); let dml = parse(&datamodel_string); let dmmf_string = render_to_dmmf(&dml); assert_eq_json( &dmmf_string, - &load_from_file(format!("{}.json", test_case).as_str()), + &load_from_file(format!("{test_case}.json").as_str()), test_case, ); } @@ -338,6 +338,6 @@ mod tests { fn load_from_file(file: &str) -> String { let samples_folder_path = concat!(env!("CARGO_MANIFEST_DIR"), "/test_files"); - fs::read_to_string(format!("{}/{}", samples_folder_path, file)).unwrap() + fs::read_to_string(format!("{samples_folder_path}/{file}")).unwrap() } } diff --git a/query-engine/prisma-models/src/builders/field_builders/composite_field_builder.rs b/query-engine/prisma-models/src/builders/field_builders/composite_field_builder.rs index 4028b452bd1c..6aa7ab1fdde6 100644 --- a/query-engine/prisma-models/src/builders/field_builders/composite_field_builder.rs +++ b/query-engine/prisma-models/src/builders/field_builders/composite_field_builder.rs @@ -20,7 +20,7 @@ impl CompositeFieldBuilder { typ: composite_types .iter() .find(|typ| &typ.name == type_name) - .unwrap_or_else(|| panic!("Invalid composite type reference: {}", type_name)) + .unwrap_or_else(|| panic!("Invalid composite type reference: {type_name}")) .clone(), arity: self.arity, container, diff --git a/query-engine/prisma-models/src/builders/index_builder.rs b/query-engine/prisma-models/src/builders/index_builder.rs index 0002d0321aaf..b4c08fd9b7cb 100644 --- a/query-engine/prisma-models/src/builders/index_builder.rs +++ b/query-engine/prisma-models/src/builders/index_builder.rs @@ -29,7 +29,7 @@ impl IndexBuilder { let field = fields .iter() .find(|sf| sf.name == name) - .unwrap_or_else(|| panic!("Unable to resolve field '{}'", name)); + .unwrap_or_else(|| panic!("Unable to resolve field '{name}'")); Arc::downgrade(field) }) diff --git a/query-engine/prisma-models/src/builders/internal_dm_builder.rs b/query-engine/prisma-models/src/builders/internal_dm_builder.rs index fdd6a1184cc0..734b6beff2bf 100644 --- a/query-engine/prisma-models/src/builders/internal_dm_builder.rs +++ b/query-engine/prisma-models/src/builders/internal_dm_builder.rs @@ -230,7 +230,7 @@ pub(crate) fn relation_placeholders(datamodel: &dml::Datamodel) -> Vec write!(f, "Float"), TypeIdentifier::Decimal => write!(f, "Decimal"), TypeIdentifier::Boolean => write!(f, "Bool"), - TypeIdentifier::Enum(e) => write!(f, "Enum{}", e), + TypeIdentifier::Enum(e) => write!(f, "Enum{e}"), TypeIdentifier::UUID => write!(f, "UUID"), TypeIdentifier::Json => write!(f, "Json"), TypeIdentifier::Xml => write!(f, "Xml"), diff --git a/query-engine/prisma-models/src/field_selection.rs b/query-engine/prisma-models/src/field_selection.rs index c2c15c535db7..0a69074b6003 100644 --- a/query-engine/prisma-models/src/field_selection.rs +++ b/query-engine/prisma-models/src/field_selection.rs @@ -286,7 +286,7 @@ impl Display for FieldSelection { "FieldSelection {{ fields: [{}] }}", self.selections .iter() - .map(|selection| format!("{}", selection)) + .map(|selection| format!("{selection}")) .join(", ") ) } @@ -300,10 +300,7 @@ impl Display for SelectedField { f, "{} {{ {} }}", cs.field, - cs.selections - .iter() - .map(|selection| format!("{}", selection)) - .join(", ") + cs.selections.iter().map(|selection| format!("{selection}")).join(", ") ), } } diff --git a/query-engine/prisma-models/src/prisma_value_ext.rs b/query-engine/prisma-models/src/prisma_value_ext.rs index 71a330cc3a1f..9a3ce3089a6c 100644 --- a/query-engine/prisma-models/src/prisma_value_ext.rs +++ b/query-engine/prisma-models/src/prisma_value_ext.rs @@ -25,21 +25,16 @@ impl PrismaValueExtensions for PrismaValue { (val @ PrismaValue::Json(_), TypeIdentifier::Json) => val, // Valid String coercions - (PrismaValue::Int(i), TypeIdentifier::String) => PrismaValue::String(format!("{}", i)), + (PrismaValue::Int(i), TypeIdentifier::String) => PrismaValue::String(format!("{i}")), (PrismaValue::Float(f), TypeIdentifier::String) => PrismaValue::String(f.to_string()), - (PrismaValue::Boolean(b), TypeIdentifier::String) => PrismaValue::String(format!("{}", b)), + (PrismaValue::Boolean(b), TypeIdentifier::String) => PrismaValue::String(format!("{b}")), (PrismaValue::Enum(e), TypeIdentifier::String) => PrismaValue::String(e), (PrismaValue::Uuid(u), TypeIdentifier::String) => PrismaValue::String(u.to_string()), // Valid Int coersions (PrismaValue::String(s), TypeIdentifier::Int) => match s.parse() { Ok(i) => PrismaValue::Int(i), - Err(_) => { - return Err(DomainError::ConversionFailure( - format!("{:?}", s), - format!("{:?}", to_type), - )) - } + Err(_) => return Err(DomainError::ConversionFailure(format!("{s:?}"), format!("{to_type:?}"))), }, (PrismaValue::Float(f), TypeIdentifier::Int) => PrismaValue::Int(f.to_i64().unwrap()), (PrismaValue::BigInt(i), TypeIdentifier::Int) => PrismaValue::Int(i), @@ -57,12 +52,7 @@ impl PrismaValueExtensions for PrismaValue { ), // Invalid coercion - (val, typ) => { - return Err(DomainError::ConversionFailure( - format!("{:?}", val), - format!("{:?}", typ), - )) - } + (val, typ) => return Err(DomainError::ConversionFailure(format!("{val:?}"), format!("{typ:?}"))), }; Ok(coerced) diff --git a/query-engine/prisma-models/tests/datamodel_converter_tests.rs b/query-engine/prisma-models/tests/datamodel_converter_tests.rs index 7043011fdc2d..3da5e0f653ae 100644 --- a/query-engine/prisma-models/tests/datamodel_converter_tests.rs +++ b/query-engine/prisma-models/tests/datamodel_converter_tests.rs @@ -428,7 +428,7 @@ fn many_to_many_relations() { .assert_model_a("Blog") .assert_model_b("Post") .assert_manifestation(&RelationLinkManifestation::RelationTable(RelationTable { - table: format!("_{}", relation_name), + table: format!("_{relation_name}"), model_a_column: "A".to_string(), model_b_column: "B".to_string(), })); @@ -603,7 +603,7 @@ impl ModelAssertions for Model { is_for_right_fields && has_right_type }) - .unwrap_or_else(|| panic!("Could not find the index for fields {:?} and type {:?}", fields, tpe)); + .unwrap_or_else(|| panic!("Could not find the index for fields {fields:?} and type {tpe:?}")); self } diff --git a/query-engine/query-engine-node-api/build.rs b/query-engine/query-engine-node-api/build.rs index 417ef9d9697d..2ed42a66137c 100644 --- a/query-engine/query-engine-node-api/build.rs +++ b/query-engine/query-engine-node-api/build.rs @@ -5,7 +5,7 @@ use std::process::Command; fn store_git_commit_hash() { let output = Command::new("git").args(["rev-parse", "HEAD"]).output().unwrap(); let git_hash = String::from_utf8(output.stdout).unwrap(); - println!("cargo:rustc-env=GIT_HASH={}", git_hash); + println!("cargo:rustc-env=GIT_HASH={git_hash}"); } fn main() { diff --git a/query-engine/query-engine-node-api/src/engine.rs b/query-engine/query-engine-node-api/src/engine.rs index b2e4a495fe42..5b38ed45da17 100644 --- a/query-engine/query-engine-node-api/src/engine.rs +++ b/query-engine/query-engine-node-api/src/engine.rs @@ -488,7 +488,7 @@ where match AssertUnwindSafe(fut).catch_unwind().await { Ok(result) => result, Err(err) => match Error::extract_panic_message(err) { - Some(message) => Err(napi::Error::from_reason(format!("PANIC: {}", message))), + Some(message) => Err(napi::Error::from_reason(format!("PANIC: {message}"))), None => Err(napi::Error::from_reason("PANIC: unknown panic".to_string())), }, } diff --git a/query-engine/query-engine-node-api/src/error.rs b/query-engine/query-engine-node-api/src/error.rs index d4a16b94a648..0dec758a03f5 100644 --- a/query-engine/query-engine-node-api/src/error.rs +++ b/query-engine/query-engine-node-api/src/error.rs @@ -77,19 +77,19 @@ impl From for ApiError { impl From for ApiError { fn from(e: url::ParseError) -> Self { - Self::configuration(format!("Error parsing connection string: {}", e)) + Self::configuration(format!("Error parsing connection string: {e}")) } } impl From for ApiError { fn from(e: connection_string::Error) -> Self { - Self::configuration(format!("Error parsing connection string: {}", e)) + Self::configuration(format!("Error parsing connection string: {e}")) } } impl From for ApiError { fn from(e: serde_json::Error) -> Self { - Self::JsonDecode(format!("{}", e)) + Self::JsonDecode(format!("{e}")) } } diff --git a/query-engine/query-engine-node-api/src/logger.rs b/query-engine/query-engine-node-api/src/logger.rs index 4ee96f3c1ad3..221f8a96e8a5 100644 --- a/query-engine/query-engine-node-api/src/logger.rs +++ b/query-engine/query-engine-node-api/src/logger.rs @@ -104,11 +104,10 @@ impl<'a> Visit for JsonVisitor<'a> { match field.name() { name if name.starts_with("r#") => { self.values - .insert(&name[2..], serde_json::Value::from(format!("{:?}", value))); + .insert(&name[2..], serde_json::Value::from(format!("{value:?}"))); } name => { - self.values - .insert(name, serde_json::Value::from(format!("{:?}", value))); + self.values.insert(name, serde_json::Value::from(format!("{value:?}"))); } }; } diff --git a/query-engine/query-engine/build.rs b/query-engine/query-engine/build.rs index d9a74f6b8c89..2e8fe20c0503 100644 --- a/query-engine/query-engine/build.rs +++ b/query-engine/query-engine/build.rs @@ -3,7 +3,7 @@ use std::process::Command; fn store_git_commit_hash() { let output = Command::new("git").args(["rev-parse", "HEAD"]).output().unwrap(); let git_hash = String::from_utf8(output.stdout).unwrap(); - println!("cargo:rustc-env=GIT_HASH={}", git_hash); + println!("cargo:rustc-env=GIT_HASH={git_hash}"); } fn main() { diff --git a/query-engine/query-engine/src/cli.rs b/query-engine/query-engine/src/cli.rs index a50ec777901e..0e47abb6b1bd 100644 --- a/query-engine/query-engine/src/cli.rs +++ b/query-engine/query-engine/src/cli.rs @@ -87,7 +87,7 @@ impl CliCommand { let dmmf = dmmf::render_dmmf(query_schema); let serialized = serde_json::to_string_pretty(&dmmf)?; - println!("{}", serialized); + println!("{serialized}"); Ok(()) } @@ -102,7 +102,7 @@ impl CliCommand { let json = psl::get_config::config_to_mcf_json_value(config); let serialized = serde_json::to_string(&json)?; - println!("{}", serialized); + println!("{serialized}"); Ok(()) } @@ -130,7 +130,7 @@ impl CliCommand { let res = serde_json::to_string(&res).unwrap(); let encoded_response = base64::encode(res); - println!("Response: {}", encoded_response); // reason for prefix is explained in TestServer.scala + println!("Response: {encoded_response}"); // reason for prefix is explained in TestServer.scala Ok(()) } diff --git a/query-engine/query-engine/src/error.rs b/query-engine/query-engine/src/error.rs index 1fbaeb86bd8a..ebd852f56e67 100644 --- a/query-engine/query-engine/src/error.rs +++ b/query-engine/query-engine/src/error.rs @@ -107,13 +107,13 @@ impl From for PrismaError { impl From for PrismaError { fn from(e: url::ParseError) -> PrismaError { - PrismaError::ConfigurationError(format!("Error parsing connection string: {}", e)) + PrismaError::ConfigurationError(format!("Error parsing connection string: {e}")) } } impl From for PrismaError { fn from(e: connection_string::Error) -> PrismaError { - PrismaError::ConfigurationError(format!("Error parsing connection string: {}", e)) + PrismaError::ConfigurationError(format!("Error parsing connection string: {e}")) } } @@ -137,13 +137,13 @@ impl From for PrismaError { impl From for PrismaError { fn from(e: base64::DecodeError) -> PrismaError { - PrismaError::ConfigurationError(format!("Invalid base64: {}", e)) + PrismaError::ConfigurationError(format!("Invalid base64: {e}")) } } impl From for PrismaError { fn from(e: GqlParseError) -> PrismaError { - PrismaError::QueryConversionError(format!("Error parsing GraphQL query: {}", e)) + PrismaError::QueryConversionError(format!("Error parsing GraphQL query: {e}")) } } diff --git a/query-engine/query-engine/src/opt.rs b/query-engine/query-engine/src/opt.rs index a42a8e31b31f..2e5ef81e7c61 100644 --- a/query-engine/query-engine/src/opt.rs +++ b/query-engine/query-engine/src/opt.rs @@ -223,11 +223,11 @@ fn parse_base64_string(s: &str) -> PrismaResult { } fn load_datamodel_file(path: &OsStr) -> String { - let mut f = File::open(path).unwrap_or_else(|_| panic!("Could not open datamodel file {:?}", path)); + let mut f = File::open(path).unwrap_or_else(|_| panic!("Could not open datamodel file {path:?}")); let mut datamodel = String::new(); f.read_to_string(&mut datamodel) - .unwrap_or_else(|_| panic!("Could not read datamodel file: {:?}", path)); + .unwrap_or_else(|_| panic!("Could not read datamodel file: {path:?}")); datamodel } diff --git a/query-engine/query-engine/src/server/mod.rs b/query-engine/query-engine/src/server/mod.rs index 02d161832c72..1f688e8c0d35 100644 --- a/query-engine/query-engine/src/server/mod.rs +++ b/query-engine/query-engine/src/server/mod.rs @@ -30,7 +30,7 @@ pub async fn listen(opts: &PrismaOpt, state: State) -> PrismaResult<()> { info!("Started query engine http server on http://{}", addr); if let Err(e) = server.await { - eprintln!("server error: {}", e); + eprintln!("server error: {e}"); } Ok(()) diff --git a/query-engine/request-handlers/src/error.rs b/query-engine/request-handlers/src/error.rs index 99bcd379cca2..6d80cea35891 100644 --- a/query-engine/request-handlers/src/error.rs +++ b/query-engine/request-handlers/src/error.rs @@ -58,18 +58,18 @@ impl HandlerError { impl From for HandlerError { fn from(e: url::ParseError) -> Self { - Self::configuration(format!("Error parsing connection string: {}", e)) + Self::configuration(format!("Error parsing connection string: {e}")) } } impl From for HandlerError { fn from(e: connection_string::Error) -> Self { - Self::configuration(format!("Error parsing connection string: {}", e)) + Self::configuration(format!("Error parsing connection string: {e}")) } } impl From for HandlerError { fn from(e: ParseError) -> Self { - Self::configuration(format!("Error parsing GraphQL query: {}", e)) + Self::configuration(format!("Error parsing GraphQL query: {e}")) } } diff --git a/query-engine/request-handlers/src/graphql/protocol_adapter.rs b/query-engine/request-handlers/src/graphql/protocol_adapter.rs index 8fedc44aabf3..c430ad6aec58 100644 --- a/query-engine/request-handlers/src/graphql/protocol_adapter.rs +++ b/query-engine/request-handlers/src/graphql/protocol_adapter.rs @@ -44,7 +44,7 @@ impl GraphQLProtocolAdapter { .definitions .into_iter() .find(|def| Self::matches_operation(def, op)) - .ok_or_else(|| HandlerError::query_conversion(format!("Operation '{}' does not match any query.", op))) + .ok_or_else(|| HandlerError::query_conversion(format!("Operation '{op}' does not match any query."))) .and_then(Self::convert_definition), None => gql_doc @@ -137,18 +137,15 @@ impl GraphQLProtocolAdapter { match value { Value::Variable(name) => Err(HandlerError::unsupported_feature( "Variable usage", - format!("Variable '{}'.", name), + format!("Variable '{name}'."), )), Value::Int(i) => match i.as_i64() { Some(i) => Ok(PrismaValue::Int(i)), - None => Err(HandlerError::query_conversion(format!( - "Invalid 64 bit integer: {:?}", - i - ))), + None => Err(HandlerError::query_conversion(format!("Invalid 64 bit integer: {i:?}"))), }, Value::Float(f) => match BigDecimal::from_f64(f) { Some(dec) => Ok(PrismaValue::Float(dec)), - None => Err(HandlerError::query_conversion(format!("invalid 64-bit float: {:?}", f))), + None => Err(HandlerError::query_conversion(format!("invalid 64-bit float: {f:?}"))), }, Value::String(s) => Ok(PrismaValue::String(s)), Value::Boolean(b) => Ok(PrismaValue::Boolean(b)), diff --git a/query-engine/request-handlers/src/graphql/response.rs b/query-engine/request-handlers/src/graphql/response.rs index 859d6261fecc..1847e4bc0656 100644 --- a/query-engine/request-handlers/src/graphql/response.rs +++ b/query-engine/request-handlers/src/graphql/response.rs @@ -111,7 +111,7 @@ impl From for GQLError { impl From for GQLError { fn from(err: CoreError) -> GQLError { GQLError { - error: format!("{}", err), + error: format!("{err}"), user_facing_error: err.into(), } } diff --git a/query-engine/request-handlers/src/graphql/schema_renderer/type_renderer.rs b/query-engine/request-handlers/src/graphql/schema_renderer/type_renderer.rs index 8799b9ae7131..29c321b27eb3 100644 --- a/query-engine/request-handlers/src/graphql/schema_renderer/type_renderer.rs +++ b/query-engine/request-handlers/src/graphql/schema_renderer/type_renderer.rs @@ -32,7 +32,7 @@ impl<'a> GqlTypeRenderer<'a> { InputType::List(ref l) => { let substring = self.render_input_type(l, ctx); - format!("[{}]", substring) + format!("[{substring}]") } InputType::Scalar(ref scalar) => { @@ -72,7 +72,7 @@ impl<'a> GqlTypeRenderer<'a> { OutputType::List(l) => { let substring = self.render_output_type(l, ctx); - format!("[{}]", substring) + format!("[{substring}]") } OutputType::Scalar(ref scalar) => { diff --git a/query-engine/schema-builder/src/cache.rs b/query-engine/schema-builder/src/cache.rs index 1059151c39e9..3c40ae5718e3 100644 --- a/query-engine/schema-builder/src/cache.rs +++ b/query-engine/schema-builder/src/cache.rs @@ -41,8 +41,7 @@ impl TypeRefCache { pub fn insert(&mut self, ident: Identifier, value: Arc) { if let Some(old) = self.cache.insert(ident.clone(), value) { panic!( - "Invariant violation: Inserted identifier {:?} twice, this is a bug and invalidates weak arc references. {:?}", - ident, old + "Invariant violation: Inserted identifier {ident:?} twice, this is a bug and invalidates weak arc references. {old:?}" ) } } diff --git a/query-engine/schema-builder/src/enum_types.rs b/query-engine/schema-builder/src/enum_types.rs index fc5b898b47ef..daccf0ea2c5d 100644 --- a/query-engine/schema-builder/src/enum_types.rs +++ b/query-engine/schema-builder/src/enum_types.rs @@ -105,7 +105,7 @@ pub(crate) fn order_by_relevance_enum( container: &str, values: Vec, ) -> EnumTypeWeakRef { - let name = format!("{}OrderByRelevanceFieldEnum", container); + let name = format!("{container}OrderByRelevanceFieldEnum"); let ident = Identifier::new(name, PRISMA_NAMESPACE); return_cached_enum!(ctx, &ident); diff --git a/query-engine/schema-builder/src/input_types/fields/data_input_mapper/update.rs b/query-engine/schema-builder/src/input_types/fields/data_input_mapper/update.rs index 8f579d70edff..cb5dde1ba796 100644 --- a/query-engine/schema-builder/src/input_types/fields/data_input_mapper/update.rs +++ b/query-engine/schema-builder/src/input_types/fields/data_input_mapper/update.rs @@ -26,7 +26,7 @@ impl DataInputFieldMapper for UpdateDataInputFieldMapper { TypeIdentifier::String => InputType::object(update_operations_object_type(ctx, "String", sf, false)), TypeIdentifier::Boolean => InputType::object(update_operations_object_type(ctx, "Bool", sf, false)), TypeIdentifier::Enum(e) => { - InputType::object(update_operations_object_type(ctx, &format!("Enum{}", e), sf, false)) + InputType::object(update_operations_object_type(ctx, &format!("Enum{e}"), sf, false)) } TypeIdentifier::Json => map_scalar_input_type_for_field(ctx, sf), TypeIdentifier::DateTime => InputType::object(update_operations_object_type(ctx, "DateTime", sf, false)), @@ -169,7 +169,7 @@ fn update_operations_object_type( // - "Nullable" affects the `set` operation (`set` is nullable) let nullable = if !sf.is_required() { "Nullable" } else { "" }; let ident = Identifier::new( - format!("{}{}FieldUpdateOperationsInput", nullable, prefix), + format!("{nullable}{prefix}FieldUpdateOperationsInput"), PRISMA_NAMESPACE, ); return_cached_input!(ctx, &ident); diff --git a/query-engine/schema-builder/src/input_types/fields/field_ref_type.rs b/query-engine/schema-builder/src/input_types/fields/field_ref_type.rs index 078763b57db8..36e71902417d 100644 --- a/query-engine/schema-builder/src/input_types/fields/field_ref_type.rs +++ b/query-engine/schema-builder/src/input_types/fields/field_ref_type.rs @@ -44,5 +44,5 @@ fn field_ref_input_type_name(allow_type: &InputType) -> String { _ => unreachable!("input ref type only support scalar or enums"), }; - format!("{}FieldRefInput", typ_str) + format!("{typ_str}FieldRefInput") } diff --git a/query-engine/schema/src/input_types.rs b/query-engine/schema/src/input_types.rs index 5f1e813471ab..f76b54424acc 100644 --- a/query-engine/schema/src/input_types.rs +++ b/query-engine/schema/src/input_types.rs @@ -204,9 +204,9 @@ impl Debug for InputType { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { InputType::Object(obj) => write!(f, "Object({})", obj.into_arc().identifier.name()), - InputType::Scalar(s) => write!(f, "{:?}", s), - InputType::Enum(e) => write!(f, "{:?}", e), - InputType::List(l) => write!(f, "{:?}", l), + InputType::Scalar(s) => write!(f, "{s:?}"), + InputType::Enum(e) => write!(f, "{e:?}"), + InputType::List(l) => write!(f, "{l:?}"), } } } diff --git a/query-engine/schema/src/query_schema.rs b/query-engine/schema/src/query_schema.rs index ff66d4d44a86..585601f3f767 100644 --- a/query-engine/schema/src/query_schema.rs +++ b/query-engine/schema/src/query_schema.rs @@ -189,7 +189,7 @@ impl fmt::Display for QueryTag { Self::AggregateRaw => "aggregateRaw", }; - write!(f, "{}", s) + write!(f, "{s}") } } @@ -261,6 +261,6 @@ impl std::fmt::Display for ScalarType { ScalarType::Bytes => "Bytes", }; - write!(f, "{}", typ) + write!(f, "{typ}") } }