From 26fbea24afb637e17f8fc199538abde9f5331eb1 Mon Sep 17 00:00:00 2001 From: Sophie <29753584+Druue@users.noreply.github.com> Date: Tue, 26 Mar 2024 12:21:38 +0100 Subject: [PATCH 01/25] fix(qe): boolean batching (#4783) - Added batch_boolean test and assert that unique booleans can compact - Handle unique boolean selection set - Added test for batching multi-case for booleans --------- Co-authored-by: Serhii Tatarintsev Co-authored-by: Alberto Schiabel --- .../queries/batching/select_one_compound.rs | 39 +++++++++++++++ .../queries/batching/select_one_singular.rs | 47 +++++++++++++++++++ .../core/src/query_document/selection.rs | 25 ++++++++-- 3 files changed, 107 insertions(+), 4 deletions(-) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs index 84b13c84f0f..b8fcf86c13d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs @@ -383,6 +383,45 @@ mod compound_batch { Ok(()) } + #[connector_test(schema(common_list_types), capabilities(ScalarLists))] + async fn should_only_batch_if_possible_list_boolean(runner: Runner) -> TestResult<()> { + run_query!( + &runner, + r#"mutation { + createOneTestModel(data: { id: 1, bool: [true, false] }) { id } + }"# + ); + run_query!( + &runner, + r#"mutation { + createOneTestModel(data: { id: 2, bool: [false, true] }) { id } + }"# + ); + + let queries = vec![ + r#"query { + findUniqueTestModel(where: { id: 1, bool: { equals: [true, false] } }) { id, bool } + }"# + .to_string(), + r#"query { + findUniqueTestModel( where: { id: 2, bool: { equals: [false, true] } }) { id, bool } + }"# + .to_string(), + ]; + + // COMPACT: Queries use scalar list + let doc = compact_batch(&runner, queries.clone()).await?; + assert!(doc.is_compact()); + + let batch_results = runner.batch(queries, false, None).await?; + insta::assert_snapshot!( + batch_results.to_string(), + @r###"{"batchResult":[{"data":{"findUniqueTestModel":{"id":1,"bool":[true,false]}}},{"data":{"findUniqueTestModel":{"id":2,"bool":[false,true]}}}]}"### + ); + + Ok(()) + } + async fn create_test_data(runner: &Runner) -> TestResult<()> { runner .query(r#"mutation { createOneArtist(data: { firstName: "Musti" lastName: "Naukio", non_unique: 0 }) { firstName }}"#) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs index 521b668f276..373d748ed79 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs @@ -348,6 +348,53 @@ mod singular_batch { Ok(()) } + fn boolean_unique() -> String { + let schema = indoc! { + r#" + model User { + #id(id, String, @id) + isManager Boolean? @unique + } + "# + }; + + schema.to_owned() + } + + #[connector_test(schema(boolean_unique))] + async fn batch_boolean(runner: Runner) -> TestResult<()> { + run_query!( + &runner, + r#"mutation { + createOneUser(data: { id: "A", isManager: true }) { id } + }"# + ); + run_query!( + &runner, + r#"mutation { + createOneUser(data: { id: "B", isManager: false }) { id } + }"# + ); + + let (res, compact_doc) = compact_batch( + &runner, + vec![ + r#"{ findUniqueUser(where: { isManager: true }) { id, isManager } }"#.to_string(), + r#"{ findUniqueUser(where: { isManager: false }) { id, isManager } }"#.to_string(), + ], + ) + .await?; + + insta::assert_snapshot!( + res.to_string(), + @r###"{"batchResult":[{"data":{"findUniqueUser":{"id":"A","isManager":true}}},{"data":{"findUniqueUser":{"id":"B","isManager":false}}}]}"### + ); + + assert!(compact_doc.is_compact()); + + Ok(()) + } + // Regression test for https://github.com/prisma/prisma/issues/16548 #[connector_test(schema(schemas::generic))] async fn repro_16548(runner: Runner) -> TestResult<()> { diff --git a/query-engine/core/src/query_document/selection.rs b/query-engine/core/src/query_document/selection.rs index 206fc95c831..18f8fde7843 100644 --- a/query-engine/core/src/query_document/selection.rs +++ b/query-engine/core/src/query_document/selection.rs @@ -211,10 +211,27 @@ impl<'a> From> for ArgumentValue { ArgumentValue::from(conjuctive) } - SelectionSet::Single(key, vals) => ArgumentValue::object([( - key.to_string(), - ArgumentValue::object([(filters::IN.to_owned(), ArgumentValue::list(vals))]), - )]), + SelectionSet::Single(key, vals) => { + let is_bool = vals.iter().any(|v| match v { + ArgumentValue::Scalar(s) => matches!(s, query_structure::PrismaValue::Boolean(_)), + _ => false, + }); + + if is_bool { + let conjunctive = vals.into_iter().fold(Conjuctive::new(), |acc, val| { + let mut argument: IndexMap = IndexMap::new(); + argument.insert(key.clone().into_owned(), val); + acc.or(argument) + }); + + return ArgumentValue::from(conjunctive); + } + + ArgumentValue::object([( + key.to_string(), + ArgumentValue::object([(filters::IN.to_owned(), ArgumentValue::list(vals))]), + )]) + } SelectionSet::Empty => ArgumentValue::null(), } } From 80622ddd22cc14c988384c2e32a9919f27ef725c Mon Sep 17 00:00:00 2001 From: Alberto Schiabel Date: Wed, 27 Mar 2024 14:46:14 +0100 Subject: [PATCH 02/25] fix(adapter-d1): fix tests (#4794) * fix(adapter-d1): uncomment tests that now work locally * fix(adapter-d1): add more context around group_by_having tests * fix(adapter-d1): uncomment more tests that now work locally * fix(adapter-d1): uncomment other tests that now work locally, explain why certain tests still fail * fix(adapter-d1): uncomment remain tests that now work, explain why they fail --- .../query-engine-tests/tests/new/cursor.rs | 2 +- .../tests/new/disconnect.rs | 4 +- .../tests/new/native_upsert.rs | 16 ++--- .../query-engine-tests/tests/new/occ.rs | 26 +++++---- .../new/ref_actions/on_delete/cascade.rs | 12 ++-- .../new/ref_actions/on_delete/restrict.rs | 8 +-- .../new/ref_actions/on_delete/set_default.rs | 8 +-- .../new/ref_actions/on_delete/set_null.rs | 18 +++--- .../new/ref_actions/on_update/cascade.rs | 28 ++++++--- .../new/ref_actions/on_update/restrict.rs | 32 ++++++---- .../new/ref_actions/on_update/set_default.rs | 8 +-- .../new/ref_actions/on_update/set_null.rs | 43 ++++++++------ .../tests/new/regressions/max_integer.rs | 13 +++-- .../tests/new/regressions/prisma_12572.rs | 2 +- .../tests/new/regressions/prisma_13089.rs | 2 +- .../tests/new/regressions/prisma_14696.rs | 2 +- .../tests/new/regressions/prisma_15177.rs | 2 +- .../tests/new/regressions/prisma_15581.rs | 2 +- .../tests/new/relation_load_strategy.rs | 4 +- .../tests/new/update_no_select.rs | 2 +- .../tests/queries/aggregation/avg.rs | 6 +- .../queries/aggregation/combination_spec.rs | 4 +- .../tests/queries/aggregation/count.rs | 2 +- .../tests/queries/aggregation/group_by.rs | 2 +- .../queries/aggregation/group_by_having.rs | 41 ++++++++++++- .../aggregation/many_count_relation.rs | 12 ++-- .../tests/queries/aggregation/sum.rs | 2 +- .../aggregation/uniq_count_relation.rs | 4 +- .../queries/batching/select_one_compound.rs | 4 +- .../queries/batching/select_one_singular.rs | 6 +- .../queries/batching/transactional_batch.rs | 18 +++++- .../queries/data_types/through_relation.rs | 8 +++ .../tests/queries/filters/self_relation.rs | 6 +- .../order_by_aggregation.rs | 58 ++++++++++++++++--- .../tests/writes/data_types/bigint.rs | 12 +++- .../nested_connect_inside_create.rs | 2 +- .../nested_connect_inside_update.rs | 11 ++-- .../nested_create_inside_create.rs | 2 +- .../nested_create_inside_update.rs | 2 +- .../nested_delete_inside_update.rs | 6 +- .../nested_delete_inside_upsert.rs | 2 +- .../nested_delete_many_inside_update.rs | 2 +- .../nested_disconnect_inside_update.rs | 2 +- .../nested_disconnect_inside_upsert.rs | 4 +- .../nested_set_inside_update.rs | 2 +- .../nested_update_many_inside_update.rs | 2 +- .../nested_upsert_inside_update.rs | 2 +- .../combining_different_nested_mutations.rs | 2 +- .../nested_atomic_number_ops.rs | 8 +-- .../nested_connect_inside_upsert.rs | 2 +- .../nested_connect_or_create.rs | 2 +- .../nested_create_many.rs | 2 +- .../nested_update_inside_update.rs | 10 +++- .../delete_many_relations.rs | 32 ++++++++-- 54 files changed, 343 insertions(+), 173 deletions(-) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/cursor.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/cursor.rs index 58fdb32a53c..cd1526d1c38 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/cursor.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/cursor.rs @@ -17,7 +17,7 @@ mod bigint_cursor { schema.to_owned() } - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn bigint_id_must_work(runner: Runner) -> TestResult<()> { test_data(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/disconnect.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/disconnect.rs index 60cd0c1a1ee..7aacd31016c 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/disconnect.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/disconnect.rs @@ -7,7 +7,7 @@ use query_engine_tests::*; mod disconnect_security { use query_engine_tests::assert_query; - #[connector_test(schema(schemas::a1_to_bm_opt), exclude(Sqlite("cfd1")))] + #[connector_test(schema(schemas::a1_to_bm_opt))] async fn must_honor_connect_scope_one2m(runner: Runner) -> TestResult<()> { one_to_many_test_data(&runner).await?; @@ -35,7 +35,7 @@ mod disconnect_security { Ok(()) } - #[connector_test(schema(schemas::posts_categories), exclude(Sqlite("cfd1")))] + #[connector_test(schema(schemas::posts_categories))] async fn must_honor_connect_scope_m2m(runner: Runner) -> TestResult<()> { many_to_many_test_data(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/native_upsert.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/native_upsert.rs index 43d66079120..1d27f583e33 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/native_upsert.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/native_upsert.rs @@ -3,7 +3,7 @@ use query_engine_tests::*; #[test_suite(capabilities(NativeUpsert))] mod native_upsert { - #[connector_test(schema(user), exclude(Sqlite("cfd1")))] + #[connector_test(schema(user))] async fn should_upsert_on_single_unique(mut runner: Runner) -> TestResult<()> { let upsert = r#" mutation { @@ -42,7 +42,7 @@ mod native_upsert { Ok(()) } - #[connector_test(schema(user), exclude(Sqlite("cfd1")))] + #[connector_test(schema(user))] async fn should_upsert_on_id(mut runner: Runner) -> TestResult<()> { let upsert = r#" mutation { @@ -85,7 +85,7 @@ mod native_upsert { Ok(()) } - #[connector_test(schema(user), exclude(Sqlite("cfd1")))] + #[connector_test(schema(user))] async fn should_upsert_on_unique_list(mut runner: Runner) -> TestResult<()> { let upsert = r#" mutation { @@ -129,7 +129,7 @@ mod native_upsert { Ok(()) } - #[connector_test(schema(user), exclude(Sqlite("cfd1")))] + #[connector_test(schema(user))] async fn should_not_use_native_upsert_on_two_uniques(mut runner: Runner) -> TestResult<()> { let upsert = r#" mutation { @@ -175,7 +175,7 @@ mod native_upsert { // Should not use native upsert when the unique field values defined in the where clause // do not match the same uniques fields in the create clause - #[connector_test(schema(user), exclude(Sqlite("cfd1")))] + #[connector_test(schema(user))] async fn should_not_use_if_where_and_create_different(mut runner: Runner) -> TestResult<()> { run_query!( &runner, @@ -228,7 +228,7 @@ mod native_upsert { Ok(()) } - #[connector_test(schema(user), exclude(Sqlite("cfd1")))] + #[connector_test(schema(user))] async fn should_not_if_missing_update(mut runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { @@ -278,7 +278,7 @@ mod native_upsert { schema.to_owned() } - #[connector_test(schema(relations), exclude(Sqlite("cfd1")))] + #[connector_test(schema(relations))] async fn should_not_if_has_nested_select(mut runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { @@ -322,7 +322,7 @@ mod native_upsert { schema.to_owned() } - #[connector_test(schema(compound_id), exclude(Sqlite("cfd1")))] + #[connector_test(schema(compound_id))] async fn should_upsert_on_compound_id(mut runner: Runner) -> TestResult<()> { let upsert = r#" mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/occ.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/occ.rs index 5ddb6f1721b..0d9bb8bb386 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/occ.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/occ.rs @@ -112,10 +112,19 @@ mod occ { assert_eq!(booked_user_id, found_booked_user_id); } - // On PlanetScale: - // assertion `left == right` failed - // left: 6 - // right: 1 + // On PlanetScale, this fails with: + // ``` + // assertion `left == right` failed + // left: 6 + // right: 1 + // ``` + // + // On D1, this fails with: + // ``` + // assertion `left == right` failed + // left: 3 + // right: 1 + // ``` #[connector_test( schema(occ_simple), exclude( @@ -141,7 +150,7 @@ mod occ { #[connector_test( schema(occ_simple), - exclude(CockroachDb, Vitess("planetscale.js", "planetscale.js.wasm"), Sqlite("cfd1")) + exclude(CockroachDb, Vitess("planetscale.js", "planetscale.js.wasm")) )] async fn occ_update_test(runner: Runner) -> TestResult<()> { let runner = Arc::new(runner); @@ -173,10 +182,7 @@ mod occ { Ok(()) } - #[connector_test( - schema(occ_simple), - exclude(Vitess("planetscale.js", "planetscale.js.wasm"), Sqlite("cfd1")) - )] + #[connector_test(schema(occ_simple), exclude(Vitess("planetscale.js", "planetscale.js.wasm")))] async fn occ_delete_test(runner: Runner) -> TestResult<()> { let runner = Arc::new(runner); @@ -208,7 +214,7 @@ mod occ { Ok(()) } - #[connector_test(schema(occ_simple), exclude(Sqlite("cfd1")))] + #[connector_test(schema(occ_simple))] async fn occ_delete_many_test(runner: Runner) -> TestResult<()> { let runner = Arc::new(runner); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/cascade.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/cascade.rs index 64d4ba4facf..bfb163b5e98 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/cascade.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/cascade.rs @@ -23,7 +23,7 @@ mod one2one_req { } /// Deleting the parent deletes child as well. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn delete_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), @@ -67,7 +67,7 @@ mod one2one_opt { } /// Deleting the parent deletes child as well. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn delete_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), @@ -107,7 +107,7 @@ mod one2one_opt { /// Deleting the parent deletes child as well. /// Checks that it works even with different parent/child primary identifier names. - #[connector_test(schema(diff_id_name), exclude(Sqlite("cfd1")))] + #[connector_test(schema(diff_id_name))] async fn delete_parent_diff_id_name(runner: Runner) -> TestResult<()> { run_query!( &runner, @@ -151,7 +151,7 @@ mod one2many_req { } /// Deleting the parent deletes all children. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn delete_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: [ { id: 1 }, { id: 2 } ] }}) { id }}"#), @@ -195,7 +195,7 @@ mod one2many_opt { } /// Deleting the parent deletes all children. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn delete_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: [ { id: 1 }, { id: 2 } ] }}) { id }}"#), @@ -216,7 +216,7 @@ mod one2many_opt { } } -#[test_suite(schema(schema), exclude(SqlServer, Sqlite("cfd1")), relation_mode = "prisma")] +#[test_suite(schema(schema), exclude(SqlServer), relation_mode = "prisma")] mod multiple_cascading_paths { use indoc::indoc; use query_engine_tests::run_query; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/restrict.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/restrict.rs index 32b187d6140..b3982c85f1a 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/restrict.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/restrict.rs @@ -6,7 +6,7 @@ use query_engine_tests::*; #[test_suite( suite = "restrict_onD_1to1_req", schema(required), - exclude(SqlServer, Sqlite("cfd1")), + exclude(SqlServer), relation_mode = "prisma" )] mod one2one_req { @@ -49,7 +49,7 @@ mod one2one_req { #[test_suite( suite = "restrict_onD_1to1_opt", schema(optional), - exclude(SqlServer, Sqlite("cfd1")), + exclude(SqlServer), relation_mode = "prisma" )] mod one2one_opt { @@ -153,7 +153,7 @@ mod one2one_opt { #[test_suite( suite = "restrict_onD_1toM_req", schema(required), - exclude(SqlServer, Sqlite("cfd1")), + exclude(SqlServer), relation_mode = "prisma" )] mod one2many_req { @@ -222,7 +222,7 @@ mod one2many_req { #[test_suite( suite = "restrict_onD_1toM_opt", schema(optional), - exclude(SqlServer, Sqlite("cfd1")), + exclude(SqlServer), relation_mode = "prisma" )] mod one2many_opt { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs index db277d42a44..131dbcf8959 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_default.rs @@ -4,7 +4,7 @@ use query_engine_tests::*; #[test_suite( suite = "setdefault_onD_1to1_req", - exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm"), Sqlite("cfd1")) + exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) )] mod one2one_req { fn required_with_default() -> String { @@ -108,7 +108,7 @@ mod one2one_req { #[test_suite( suite = "setdefault_onD_1to1_opt", - exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm"), Sqlite("cfd1")) + exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) )] mod one2one_opt { fn optional_with_default() -> String { @@ -217,7 +217,7 @@ mod one2one_opt { #[test_suite( suite = "setdefault_onD_1toM_req", - exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm"), Sqlite("cfd1")) + exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) )] mod one2many_req { fn required_with_default() -> String { @@ -321,7 +321,7 @@ mod one2many_req { #[test_suite( suite = "setdefault_onD_1toM_opt", - exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm"), Sqlite("cfd1")) + exclude(MongoDb, MySQL, Vitess("planetscale.js", "planetscale.js.wasm")) )] mod one2many_opt { fn optional_with_default() -> String { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_null.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_null.rs index 36c81fcb113..0dbdb895064 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_null.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_delete/set_null.rs @@ -23,7 +23,7 @@ mod one2one_opt { } /// Deleting the parent suceeds and sets the FK null. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn delete_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, child: { create: { id: 1 }}}) { id }}"#), @@ -63,7 +63,7 @@ mod one2one_opt { /// Deleting the parent suceeds and sets the FK null. /// Checks that it works even with different parent/child primary identifier names. - #[connector_test(schema(diff_id_name), exclude(Sqlite("cfd1")))] + #[connector_test(schema(diff_id_name))] async fn delete_parent_diff_id_name(runner: Runner) -> TestResult<()> { run_query!( &runner, @@ -111,7 +111,7 @@ mod one2one_opt { } // SET_NULL should also apply to child relations sharing a common fk - #[connector_test(schema(one2one2one_opt_set_null), exclude(Sqlite("cfd1")))] + #[connector_test(schema(one2one2one_opt_set_null))] async fn delete_parent_recurse_set_null(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(runner, r#"mutation { @@ -181,7 +181,7 @@ mod one2one_opt { } // SET_NULL should also apply to child relations sharing a common fk - #[connector_test(schema(one2one2one_opt_set_null_restrict), exclude(Sqlite("cfd1")))] + #[connector_test(schema(one2one2one_opt_set_null_restrict))] async fn delete_parent_set_null_restrict(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(runner, r#"mutation { @@ -246,11 +246,7 @@ mod one2one_opt { } // SET_NULL should also apply to child relations sharing a common fk - #[connector_test( - schema(one2one2one_opt_set_null_cascade), - exclude_features("relationJoins"), - exclude(Sqlite("cfd1")) - )] + #[connector_test(schema(one2one2one_opt_set_null_cascade), exclude_features("relationJoins"))] async fn delete_parent_set_null_cascade(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(runner, r#"mutation { @@ -320,7 +316,7 @@ mod one2many_opt { } /// Deleting the parent suceeds and sets the FK null. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn delete_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, children: { create: { id: 1 }}}) { id }}"#), @@ -368,7 +364,7 @@ mod one2many_opt { } // Do not recurse when relations have no fks in common - #[connector_test(schema(prisma_17255_schema), exclude(Sqlite("cfd1")))] + #[connector_test(schema(prisma_17255_schema))] async fn prisma_17255(runner: Runner) -> TestResult<()> { run_query!( &runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/cascade.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/cascade.rs index 858bc567ce2..99cd190e161 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/cascade.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/cascade.rs @@ -34,6 +34,12 @@ mod one2one_req { } #[connector_test(schema(required), exclude(Sqlite("cfd1")))] + /// On D1, this fails with: + /// + /// ```diff + /// - {"data":{"updateManyParent":{"count":1}}} + /// + {"data":{"updateManyParent":{"count":2}}} + /// ``` async fn update_parent_cascade(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { @@ -114,7 +120,7 @@ mod one2one_req { schema.to_owned() } - #[connector_test(schema(required_compound), exclude(Sqlite("cfd1")))] + #[connector_test(schema(required_compound))] async fn update_parent_compound_cascade(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(runner, r#"mutation { @@ -170,8 +176,14 @@ mod one2one_opt { schema.to_owned() } - // Updating the parent updates the child FK as well. #[connector_test(schema(optional), exclude(Sqlite("cfd1")))] + // Updating the parent updates the child FK as well. + // On D1, this fails with: + // + // ```diff + // - {"data":{"updateManyParent":{"count":1}}} + // + {"data":{"updateManyParent":{"count":2}}} + // ``` async fn update_parent_cascade(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { @@ -248,7 +260,7 @@ mod one2one_opt { schema.to_owned() } - #[connector_test(schema(optional_compound), exclude(Sqlite("cfd1")))] + #[connector_test(schema(optional_compound))] async fn update_parent_compound_cascade(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(runner, r#"mutation { @@ -292,7 +304,7 @@ mod one2one_opt { // Updating the parent updates the child FK as well. // Checks that it works even with different parent/child primary identifier names - #[connector_test(schema(diff_id_name), exclude(Sqlite("cfd1")))] + #[connector_test(schema(diff_id_name))] async fn update_parent_diff_id_name(runner: Runner) -> TestResult<()> { run_query!( &runner, @@ -342,7 +354,7 @@ mod one2many_req { } /// Updating the parent updates the child as well. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn update_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), @@ -387,7 +399,7 @@ mod one2many_opt { } /// Updating the parent updates the child as well. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn update_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), @@ -431,7 +443,7 @@ mod one2many_opt { schema.to_owned() } - #[connector_test(schema(optional_compound_uniq), exclude(Sqlite("cfd1")))] + #[connector_test(schema(optional_compound_uniq))] async fn update_compound_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(runner, r#"mutation { @@ -456,7 +468,7 @@ mod one2many_opt { } } -#[test_suite(schema(schema), exclude(SqlServer, Sqlite("cfd1")), relation_mode = "prisma")] +#[test_suite(schema(schema), exclude(SqlServer), relation_mode = "prisma")] mod multiple_cascading_paths { use indoc::indoc; use query_engine_tests::run_query; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/restrict.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/restrict.rs index 72c4521ba28..99c3c0b094d 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/restrict.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/restrict.rs @@ -31,7 +31,7 @@ mod one2one_req { } /// Updating the parent must fail if a child is connected. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn update_parent_failure(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -48,7 +48,7 @@ mod one2one_req { } /// Updating the parent must fail if a child is connected. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn update_many_parent_failure(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -65,7 +65,7 @@ mod one2one_req { } /// Updating the parent must fail if a child is connected. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn upsert_parent_failure(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -94,7 +94,7 @@ mod one2one_req { #[test_suite( suite = "restrict_onU_1to1_opt", schema(optional), - exclude(SqlServer, Sqlite("cfd1")), + exclude(SqlServer), relation_mode = "prisma" )] mod one2one_opt { @@ -135,7 +135,7 @@ mod one2one_opt { } /// Updating the parent must fail if a child is connected. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn update_many_parent_failure(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -181,7 +181,7 @@ mod one2one_opt { #[test_suite( suite = "restrict_onU_1toM_req", schema(required), - exclude(SqlServer, Sqlite("cfd1")), + exclude(SqlServer), relation_mode = "prisma" )] mod one2many_req { @@ -255,8 +255,13 @@ mod one2many_req { Ok(()) } + #[connector_test(exclude(Sqlite("cfd1")))] /// Updating the parent succeeds if no child is connected or if the linking fields aren't part of the update payload. - #[connector_test] + /// + /// ```diff + /// - {"data":{"updateManyParent":{"count":1}}} + /// + {"data":{"updateManyParent":{"count":2}}} + /// ``` async fn update_parent(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; run_query!( @@ -328,7 +333,7 @@ mod one2many_opt { } /// Updating the parent must fail if a child is connected. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn update_parent_failure(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -345,7 +350,7 @@ mod one2many_opt { } /// Updating the parent must fail if a child is connected. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn update_many_parent_failure(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -362,7 +367,7 @@ mod one2many_opt { } /// Updating the parent must fail if a child is connected. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn upsert_parent_failure(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -378,8 +383,13 @@ mod one2many_opt { Ok(()) } - /// Updating the parent succeeds if no child is connected or if the linking fields aren't part of the update payload. #[connector_test(exclude(Sqlite("cfd1")))] + /// Updating the parent succeeds if no child is connected or if the linking fields aren't part of the update payload. + /// + /// ```diff + /// - {"data":{"updateManyParent":{"count":1}}} + /// + {"data":{"updateManyParent":{"count":2}}} + /// ``` async fn update_parent(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; run_query!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs index 000df1abb6c..99c2ffb63a5 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_default.rs @@ -2,7 +2,7 @@ use indoc::indoc; use query_engine_tests::*; -#[test_suite(suite = "setdefault_onU_1to1_req", exclude(MongoDb, MySQL, Vitess, Sqlite("cfd1")))] +#[test_suite(suite = "setdefault_onU_1to1_req", exclude(MongoDb, MySQL, Vitess))] mod one2one_req { fn required_with_default() -> String { let schema = indoc! { @@ -105,7 +105,7 @@ mod one2one_req { } } -#[test_suite(suite = "setdefault_onU_1to1_opt", exclude(MongoDb, MySQL, Vitess, Sqlite("cfd1")))] +#[test_suite(suite = "setdefault_onU_1to1_opt", exclude(MongoDb, MySQL, Vitess))] mod one2one_opt { fn optional_with_default() -> String { let schema = indoc! { @@ -210,7 +210,7 @@ mod one2one_opt { } } -#[test_suite(suite = "setdefault_onU_1toM_req", exclude(MongoDb, MySQL, Vitess, Sqlite("cfd1")))] +#[test_suite(suite = "setdefault_onU_1toM_req", exclude(MongoDb, MySQL, Vitess))] mod one2many_req { fn required_with_default() -> String { let schema = indoc! { @@ -313,7 +313,7 @@ mod one2many_req { } } -#[test_suite(suite = "setdefault_onU_1toM_opt", exclude(MongoDb, MySQL, Vitess, Sqlite("cfd1")))] +#[test_suite(suite = "setdefault_onU_1toM_opt", exclude(MongoDb, MySQL, Vitess))] mod one2many_opt { fn optional_with_default() -> String { let schema = indoc! { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_null.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_null.rs index 02ce9e343c7..8ef0ab0d1e8 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_null.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/ref_actions/on_update/set_null.rs @@ -25,7 +25,7 @@ mod one2one_opt { } /// Updating the parent suceeds and sets the FK null. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn update_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), @@ -66,6 +66,12 @@ mod one2one_opt { } #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"updateManyParent":{"count":1}}} + // + {"data":{"updateManyParent":{"count":2}}} + // ``` async fn update_many_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", child: { create: { id: 1 }}}) { id }}"#), @@ -112,7 +118,7 @@ mod one2one_opt { } // SET_NULL should recurse if there are relations sharing a common fk - #[connector_test(schema(one2one2one_opt_set_null), exclude(Sqlite("cfd1")))] + #[connector_test(schema(one2one2one_opt_set_null))] async fn update_parent_recurse_set_null(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(runner, r#"mutation { @@ -181,7 +187,7 @@ mod one2one_opt { } // SET_NULL should recurse if there are relations sharing a common fk - #[connector_test(schema(one2one2one_opt_restrict), exclude(SqlServer, Sqlite("cfd1")))] + #[connector_test(schema(one2one2one_opt_restrict), exclude(SqlServer))] async fn update_parent_recurse_restrict_failure(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(runner, r#"mutation { @@ -253,7 +259,7 @@ mod one2one_opt { } // SET_NULL should not recurse if there is no relation sharing a common fk - #[connector_test(schema(one2one2one_no_shared_fk), exclude(Sqlite("cfd1")))] + #[connector_test(schema(one2one2one_no_shared_fk))] async fn update_parent_no_recursion(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(runner, r#"mutation { @@ -318,7 +324,7 @@ mod one2one_opt { // Updating the parent updates the child FK as well. // Checks that it works even with different parent/child primary identifier names. - #[connector_test(schema(diff_id_name), exclude(Sqlite("cfd1")))] + #[connector_test(schema(diff_id_name))] async fn update_parent_diff_id_name(runner: Runner) -> TestResult<()> { run_query!( &runner, @@ -344,12 +350,7 @@ mod one2one_opt { } } -#[test_suite( - suite = "setnull_onU_1toM_opt", - schema(optional), - exclude(Sqlite("cfd1")), - relation_mode = "prisma" -)] +#[test_suite(suite = "setnull_onU_1toM_opt", schema(optional), relation_mode = "prisma")] mod one2many_opt { fn optional() -> String { let schema = indoc! { @@ -391,7 +392,7 @@ mod one2many_opt { } /// Updating the parent succeeds and sets the FK null. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn update_parent_nested(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), @@ -411,7 +412,7 @@ mod one2many_opt { Ok(()) } - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn upsert_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), @@ -431,7 +432,7 @@ mod one2many_opt { Ok(()) } - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn upsert_parent_nested(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), @@ -457,6 +458,12 @@ mod one2many_opt { } #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"updateManyParent":{"count":1}}} + // + {"data":{"updateManyParent":{"count":2}}} + // ``` async fn update_many_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { createOneParent(data: { id: 1, uniq: "1", children: { create: { id: 1 }}}) { id }}"#), @@ -500,7 +507,7 @@ mod one2many_opt { schema.to_owned() } - #[connector_test(schema(optional_compound_uniq), exclude(Sqlite("cfd1")))] + #[connector_test(schema(optional_compound_uniq))] async fn update_compound_parent(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(runner, r#"mutation { @@ -626,7 +633,7 @@ mod one2many_opt { } // SET_NULL should recurse if there are relations sharing a common fk - #[connector_test(schema(one2m2m_opt_restrict), exclude(SqlServer, Sqlite("cfd1")))] + #[connector_test(schema(one2m2m_opt_restrict), exclude(SqlServer))] async fn update_parent_recurse_restrict_failure(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(runner, r#"mutation { @@ -699,7 +706,7 @@ mod one2many_opt { } // SET_NULL should not recurse if there is no relation sharing a common fk - #[connector_test(schema(one2m2m_no_shared_fk), exclude(Sqlite("cfd1")))] + #[connector_test(schema(one2m2m_no_shared_fk))] async fn update_parent_no_recursion(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(runner, r#"mutation { @@ -786,7 +793,7 @@ mod one2many_opt { } // Relation fields with at least one shared compound should also be set to null - #[connector_test(schema(one2m2m_compound_opt_set_null), exclude(Sqlite("cfd1")))] + #[connector_test(schema(one2m2m_compound_opt_set_null))] async fn update_parent_compound_recurse(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs index 1a0c50ac5ba..60d7ca96495 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/max_integer.rs @@ -33,7 +33,7 @@ mod max_integer { const U32_OVERFLOW_MAX: i64 = (u32::MAX as i64) + 1; const OVERFLOW_MIN: i8 = -1; - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn transform_gql_parser_too_large(runner: Runner) -> TestResult<()> { match runner.protocol() { query_engine_tests::EngineProtocol::Graphql => { @@ -115,7 +115,7 @@ mod max_integer { // The document parser does not crash on encountering an exponent-notation-serialized int. // This triggers a 2009 instead of 2033 as this is in the document parser. - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn document_parser_no_crash_too_large(runner: Runner) -> TestResult<()> { assert_error!( runner, @@ -127,7 +127,7 @@ mod max_integer { Ok(()) } - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn document_parser_no_crash_too_small(runner: Runner) -> TestResult<()> { assert_error!( runner, @@ -158,6 +158,11 @@ mod max_integer { // MongoDB is excluded because it automatically upcasts a value as an i64 if doesn't fit in an i32. // MySQL 5.6 is excluded because it never overflows but inserts the min or max of the range of the column type instead. // D1 doesn't fail. + // + // On D1, this panics with + // ``` + // Expected result to return an error, but found success: {"data":{"createOneTest":{"id":1,"int":2147483648}}} + // ``` #[connector_test(exclude(MongoDb, MySql(5.6), Sqlite("cfd1")))] async fn unfitted_int_should_fail(runner: Runner) -> TestResult<()> { assert_error!( @@ -784,7 +789,7 @@ mod float_serialization_issues { schema.to_string() } - #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] + #[connector_test(exclude(SqlServer))] async fn int_range_overlap_works(runner: Runner) -> TestResult<()> { runner .query("mutation { createOneTest(data: { id: 1, float: 1e20 }) { id float } }") diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_12572.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_12572.rs index fa332f099a6..35f056f8fa8 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_12572.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_12572.rs @@ -26,7 +26,7 @@ mod prisma_12572 { .to_owned() } - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn all_generated_timestamps_are_the_same(runner: Runner) -> TestResult<()> { runner .query(r#"mutation { createOneTest1(data: {id:"one", test2s: { create: {id: "two"}}}) { id }}"#) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_13089.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_13089.rs index ab2337c64e8..a7683b21fc1 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_13089.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_13089.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(schema(schema), exclude(Sqlite("cfd1")))] +#[test_suite(schema(schema))] mod prisma_13097 { fn schema() -> String { r#" diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_14696.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_14696.rs index 7a93fbf1fd0..73227e0bb00 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_14696.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_14696.rs @@ -2,7 +2,7 @@ use query_engine_tests::*; // mongodb has very specific constraint on id fields // mssql fails with a multiple cascading referential actions paths error -#[test_suite(schema(schema), exclude(MongoDB, SqlServer, Sqlite("cfd1")))] +#[test_suite(schema(schema), exclude(MongoDB, SqlServer))] mod prisma_14696 { fn schema() -> String { include_str!("./prisma_14696.prisma").to_string() diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15177.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15177.rs index d1042249af3..a5ce0b6faa6 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15177.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15177.rs @@ -1,7 +1,7 @@ use indoc::indoc; use query_engine_tests::*; -#[test_suite(schema(schema), exclude(MongoDb, Sqlite("cfd1")))] +#[test_suite(schema(schema), exclude(MongoDb))] mod prisma_15177 { fn schema() -> String { let schema = indoc! { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15581.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15581.rs index f1d6f7ebc17..e042eb8c3d4 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15581.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15581.rs @@ -88,7 +88,7 @@ mod prisma_15581 { .to_owned() } - #[connector_test(schema(single_field_id_schema), exclude(Sqlite("cfd1")))] + #[connector_test(schema(single_field_id_schema))] async fn single_create_one_model_with_default_now_in_id(runner: Runner) -> TestResult<()> { run_query!( runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/relation_load_strategy.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/relation_load_strategy.rs index 7b900414ee6..55acc7b3052 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/relation_load_strategy.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/relation_load_strategy.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(schema(schema), exclude(Sqlite("cfd1")))] +#[test_suite(schema(schema))] mod relation_load_strategy { fn schema() -> String { indoc! {r#" @@ -139,7 +139,7 @@ mod relation_load_strategy { $query, $result, capabilities(CorrelatedSubqueries), - exclude(Mysql("5.6", "5.7", "mariadb"), Sqlite("cfd1")) + exclude(Mysql("5.6", "5.7", "mariadb")) ); relation_load_strategy_test!( [<$name _lateral>], diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/update_no_select.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/update_no_select.rs index 4c356e0575c..00405c7c4f4 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/update_no_select.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/update_no_select.rs @@ -6,7 +6,7 @@ mod update_with_no_select { include_str!("occ_simple.prisma").to_owned() } - #[connector_test(schema(occ_simple), exclude(Sqlite("cfd1")))] + #[connector_test(schema(occ_simple))] async fn update_with_no_select(mut runner: Runner) -> TestResult<()> { let create_one_resource = r#" mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/avg.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/avg.rs index c07d0b34a54..a155090c7d5 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/avg.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/avg.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(schema(schemas::common_numeric_types), exclude(Sqlite("cfd1")))] +#[test_suite(schema(schemas::common_numeric_types))] mod aggregation_avg { use query_engine_tests::run_query; @@ -97,7 +97,7 @@ mod decimal_aggregation_avg { schema.to_owned() } - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn avg_no_records(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!( @@ -110,7 +110,7 @@ mod decimal_aggregation_avg { Ok(()) } - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn avg_some_records(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, decimal: "5.5" }"#).await?; create_row(&runner, r#"{ id: 2, decimal: "4.5" }"#).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/combination_spec.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/combination_spec.rs index 1f9cb9af04f..46bdd77ddb5 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/combination_spec.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/combination_spec.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(schema(schema), exclude(Sqlite("cfd1")))] +#[test_suite(schema(schema))] mod combinations { use indoc::indoc; use query_engine_tests::{assert_error, run_query}; @@ -337,7 +337,7 @@ mod decimal_combinations { Ok(()) } - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn some_records(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ dec: "5.5" }"#).await?; create_row(&runner, r#"{ dec: "4.5" }"#).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/count.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/count.rs index 9d5b4fecda4..3d5572650c1 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/count.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/count.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(schema(schemas::common_nullable_types), exclude(Sqlite("cfd1")))] +#[test_suite(schema(schemas::common_nullable_types))] mod aggregation_count { use query_engine_tests::run_query; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by.rs index 8a9c329ecc1..e372c4525f0 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(schema(schemas::numeric_text_optional_one2m), exclude(Sqlite("cfd1")))] +#[test_suite(schema(schemas::numeric_text_optional_one2m))] mod aggregation_group_by { use query_engine_tests::{assert_error, run_query}; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by_having.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by_having.rs index 54ff7bba2af..545c44cfe41 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by_having.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/group_by_having.rs @@ -53,6 +53,12 @@ mod aggr_group_by_having { } #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"groupByTestModel":[{"string":"group1","_count":{"int":2}}]}} + // + {"data":{"groupByTestModel":[]}} + // ``` async fn having_count_scalar_filter(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, int: 1, string: "group1" }"#).await?; create_row(&runner, r#"{ id: 2, int: 2, string: "group1" }"#).await?; @@ -128,6 +134,12 @@ mod aggr_group_by_having { } #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"groupByTestModel":[{"string":"group1","_sum":{"float":16.0,"int":16}}]}} + // + {"data":{"groupByTestModel":[]}} + // ``` async fn having_sum_scalar_filter(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?; create_row(&runner, r#"{ id: 2, float: 6, int: 6, string: "group1" }"#).await?; @@ -197,6 +209,12 @@ mod aggr_group_by_having { } #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"groupByTestModel":[{"string":"group1","_min":{"float":0.0,"int":0}},{"string":"group2","_min":{"float":0.0,"int":0}}]}} + // + {"data":{"groupByTestModel":[]}} + // ``` async fn having_min_scalar_filter(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?; create_row(&runner, r#"{ id: 2, float: 0, int: 0, string: "group1" }"#).await?; @@ -265,6 +283,12 @@ mod aggr_group_by_having { } #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"groupByTestModel":[{"string":"group1","_max":{"float":10.0,"int":10}},{"string":"group2","_max":{"float":10.0,"int":10}}]}} + // + {"data":{"groupByTestModel":[]}} + // ``` async fn having_max_scalar_filter(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?; create_row(&runner, r#"{ id: 2, float: 0, int: 0, string: "group1" }"#).await?; @@ -333,6 +357,12 @@ mod aggr_group_by_having { } #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"groupByTestModel":[{"string":"group1","_count":{"string":2}}]}} + // + {"data":{"groupByTestModel":[]}} + // ``` async fn having_count_non_numerical_field(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?; create_row(&runner, r#"{ id: 2, float: 0, int: 0, string: "group1" }"#).await?; @@ -351,6 +381,15 @@ mod aggr_group_by_having { } #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this panics with: + // + // ``` + // assertion `left == right` failed: Query result: {"data":{"groupByTestModel":[]}} is not part of the expected results: ["{\"data\":{\"groupByTestModel\":[{\"string\":\"group1\"},{\"string\":\"group2\"}]}}", "{\"data\":{\"groupByTestModel\":[{\"string\":\"group2\"},{\"string\":\"group1\"}]}}"] for connector SQLite (cfd1) + // left: false + // right: true + // note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace + // FAILED + // ``` async fn having_without_aggr_sel(runner: Runner) -> TestResult<()> { create_row(&runner, r#"{ id: 1, float: 10, int: 10, string: "group1" }"#).await?; create_row(&runner, r#"{ id: 2, float: 0, int: 0, string: "group1" }"#).await?; @@ -394,7 +433,7 @@ mod aggr_group_by_having { /// Error cases - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn having_filter_mismatch_selection(runner: Runner) -> TestResult<()> { assert_error!( runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/many_count_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/many_count_relation.rs index 2b3ca88edb6..312463f19b1 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/many_count_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/many_count_relation.rs @@ -143,7 +143,7 @@ mod many_count_rel { } // Counting with skip should not affect the count - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn count_with_skip(runner: Runner) -> TestResult<()> { // 4 comment / 4 categories create_row( @@ -201,7 +201,7 @@ mod many_count_rel { } // Counting with distinct should not affect the count - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn count_with_distinct(runner: Runner) -> TestResult<()> { create_row( &runner, @@ -272,7 +272,7 @@ mod many_count_rel { } // Counting nested one2m and m2m should work - #[connector_test(schema(schema_nested), exclude(Sqlite("cfd1")))] + #[connector_test(schema(schema_nested))] async fn nested_count_one2m_m2m(runner: Runner) -> TestResult<()> { run_query!( &runner, @@ -619,11 +619,7 @@ mod many_count_rel { } // Regression test for: https://github.com/prisma/prisma/issues/7299 - #[connector_test( - schema(schema_one2m_multi_fks), - capabilities(CompoundIds), - exclude(CockroachDb, Sqlite("cfd1")) - )] + #[connector_test(schema(schema_one2m_multi_fks), capabilities(CompoundIds), exclude(CockroachDb))] async fn count_one2m_compound_ids(runner: Runner) -> TestResult<()> { run_query!( runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/sum.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/sum.rs index 6fa28e7d129..59a89cdff93 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/sum.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/sum.rs @@ -94,7 +94,7 @@ mod decimal_aggregation_sum { schema.to_owned() } - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn sum_no_records(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, "query { aggregateTestModel { _sum { decimal } } }"), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/uniq_count_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/uniq_count_relation.rs index 409b360e6b9..45c49150e47 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/uniq_count_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/aggregation/uniq_count_relation.rs @@ -114,7 +114,7 @@ mod uniq_count_rel { } // Counting with take should not affect the count - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn count_with_take(runner: Runner) -> TestResult<()> { // 4 comment / 4 categories create_row( @@ -172,7 +172,7 @@ mod uniq_count_rel { } // Counting with filters should not affect the count - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn count_with_filters(runner: Runner) -> TestResult<()> { // 4 comment / 4 categories create_row( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs index b8fcf86c13d..ed94e4487bf 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs @@ -191,7 +191,7 @@ mod compound_batch { Ok(()) } - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn two_equal_queries(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -237,7 +237,7 @@ mod compound_batch { } // Ensures non compactable batch are not compacted - #[connector_test(schema(should_batch_schema), exclude(Sqlite("cfd1")))] + #[connector_test(schema(should_batch_schema))] async fn should_only_batch_if_possible(runner: Runner) -> TestResult<()> { runner .query( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs index 373d748ed79..257620e1bdb 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs @@ -92,7 +92,7 @@ mod singular_batch { } // "Two successful queries and one failing with different selection set" should "work" - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn two_success_one_fail_diff_set(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -143,7 +143,7 @@ mod singular_batch { Ok(()) } - #[connector_test(exclude(Sqlite("cfd1")))] + #[connector_test] async fn relation_traversal_filtered(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -268,7 +268,7 @@ mod singular_batch { } // Regression test for https://github.com/prisma/prisma/issues/18096 - #[connector_test(schema(bigint_id), exclude(Sqlite("cfd1")))] + #[connector_test(schema(bigint_id))] async fn batch_bigint_id(runner: Runner) -> TestResult<()> { run_query!(&runner, r#"mutation { createOneTestModel(data: { id: 1 }) { id } }"#); run_query!(&runner, r#"mutation { createOneTestModel(data: { id: 2 }) { id } }"#); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/transactional_batch.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/transactional_batch.rs index 3ab21e9742e..0130b3ee710 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/transactional_batch.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/transactional_batch.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(schema(schema), exclude(Sqlite("cfd1")))] +#[test_suite(schema(schema))] mod transactional { use indoc::indoc; use query_engine_tests::run_query; @@ -44,7 +44,13 @@ mod transactional { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"findManyModelA":[]}} + // + {"data":{"findManyModelA":[{"id":1}]}} + // ``` async fn one_success_one_fail(runner: Runner) -> TestResult<()> { let queries = vec![ r#"mutation { createOneModelA(data: { id: 1 }) { id }}"#.to_string(), @@ -77,7 +83,13 @@ mod transactional { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"findManyModelB":[]}} + // + {"data":{"findManyModelB":[{"id":1}]}} + // ``` async fn one_query(runner: Runner) -> TestResult<()> { // Existing ModelA in the DB will prevent the nested ModelA creation in the batch. insta::assert_snapshot!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs index 00fe015b9dc..ea3cf546047 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/data_types/through_relation.rs @@ -182,6 +182,14 @@ mod scalar_relations { } #[connector_test(schema(schema_decimal), capabilities(DecimalType), exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"findManyParent":[{"id":1,"children":[{"childId":1,"dec":"1"},{"childId":2,"dec":"-1"},{"childId":3,"dec":"123.4567891"},{"childId":4,"dec":"95993.57"}]}]}} + // + {"data":{"findManyParent":[{"id":1,"children":[{"childId":1,"dec":"1"},{"childId":2,"dec":"-1"},{"childId":3,"dec":"123.4567891"},{"childId":4,"dec":"95993.57000000001"}]}]}} + // ``` + // + // Basically, decimals are treated as doubles (and lose precision) due to D1 not providing column type information on queries. async fn decimal_type(runner: Runner) -> TestResult<()> { create_child(&runner, r#"{ childId: 1, dec: "1" }"#).await?; create_child(&runner, r#"{ childId: 2, dec: "-1" }"#).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/self_relation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/self_relation.rs index 483e16e3bfe..1b3cd11df1f 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/self_relation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/filters/self_relation.rs @@ -43,8 +43,12 @@ mod self_relation_filters { schema.to_owned() } - // Filter Queries along self relations should succeed with one level. #[connector_test(exclude(SqlServer, Sqlite("cfd1")))] + // Filter Queries along self relations should succeed with one level. + // On D1, this test fails with a panic: + // ``` + // {"errors":[{"error":"RecordNotFound(\"Expected 1 records to be connected after connect operation on one-to-many relation 'Cuckoo', found 4.\")","user_facing_error":{"is_panic":false,"message":"The required connected records were not found. Expected 1 records to be connected after connect operation on one-to-many relation 'Cuckoo', found 4.","meta":{"details":"Expected 1 records to be connected after connect operation on one-to-many relation 'Cuckoo', found 4."},"error_code":"P2018"}}]} + // ``` async fn l1_query(runner: Runner) -> TestResult<()> { test_data(&runner).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs index 45f4b21048b..744d26e7b56 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(schema(schema), exclude(Sqlite("cfd1")))] +#[test_suite(schema(schema))] mod order_by_aggr { use indoc::indoc; use query_engine_tests::{match_connector_result, run_query}; @@ -33,7 +33,13 @@ mod order_by_aggr { schema.to_owned() } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"findManyUser":[{"id":3,"posts":[]},{"id":1,"posts":[{"title":"alice_post_1"}]},{"id":2,"posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]}]}} + // + {"data":{"findManyUser":[{"id":1,"posts":[{"title":"alice_post_1"}]},{"id":2,"posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]},{"id":3,"posts":[]}]}} + // ``` async fn one2m_count_asc(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -52,7 +58,13 @@ mod order_by_aggr { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"findManyUser":[{"id":2,"posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]},{"id":1,"posts":[{"title":"alice_post_1"}]},{"id":3,"posts":[]}]}} + // + {"data":{"findManyUser":[{"id":3,"posts":[]},{"id":2,"posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]},{"id":1,"posts":[{"title":"alice_post_1"}]}]}} + // ``` async fn one2m_count_desc(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -109,7 +121,13 @@ mod order_by_aggr { Ok(()) } - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"findManyUser":[{"id":3,"name":"Motongo","posts":[]},{"id":1,"name":"Alice","posts":[{"title":"alice_post_1"}]},{"id":2,"name":"Bob","posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]}]}} + // + {"data":{"findManyUser":[{"id":1,"name":"Alice","posts":[{"title":"alice_post_1"}]},{"id":2,"name":"Bob","posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]},{"id":3,"name":"Motongo","posts":[]}]}} + // ``` async fn one2m_count_asc_field_asc(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -438,7 +456,13 @@ mod order_by_aggr { // With pagination tests // "[Cursor] Ordering by one2m count asc" should "work" - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"findManyUser":[{"id":1,"posts":[{"id":1}]},{"id":2,"posts":[{"id":2},{"id":3}]}]}} + // + {"data":{"findManyUser":[{"id":1,"posts":[{"id":1}]},{"id":2,"posts":[{"id":2},{"id":3}]},{"id":3,"posts":[]}]}} + // ``` async fn cursor_one2m_count_asc(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -458,7 +482,13 @@ mod order_by_aggr { } // "[Cursor] Ordering by one2m count desc" should "work" - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"findManyUser":[{"id":1,"posts":[{"id":1}]},{"id":2,"posts":[{"id":2},{"id":3}]}]}} + // + {"data":{"findManyUser":[{"id":1,"posts":[{"id":1}]},{"id":2,"posts":[{"id":2},{"id":3}]},{"id":3,"posts":[]}]}} + // ``` async fn cursor_one2m_count_desc(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -520,7 +550,13 @@ mod order_by_aggr { } // "[Cursor][Combo] Ordering by one2m count asc + field asc" - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"findManyUser":[{"id":2,"name":"Bob","posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]}]}} + // + {"data":{"findManyUser":[{"id":2,"name":"Bob","posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]},{"id":3,"name":"Motongo","posts":[]}]}} + // ``` async fn cursor_one2m_count_asc_field_asc(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -769,8 +805,14 @@ mod order_by_aggr { schema.to_owned() } + #[connector_test(schema(schema_regression_8036), exclude(Sqlite("cfd1")))] // Regression test for: // https://github.com/prisma/prisma/issues/8036 - #[connector_test(schema(schema_regression_8036))] + // On D1, this fails with: + // + // ```diff + // - {"data":{"findManyPost":[{"id":2,"title":"Second","_count":{"LikedPeople":0}},{"id":3,"title":"Third","_count":{"LikedPeople":0}},{"id":4,"title":"Fourth","_count":{"LikedPeople":0}},{"id":5,"title":"Fifth","_count":{"LikedPeople":0}}]}} + // + {"data":{"findManyPost":[]}} + // ``` async fn count_m2m_records_not_connected(runner: Runner) -> TestResult<()> { run_query!( runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bigint.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bigint.rs index 0eb8ca4b7d3..9f158e37d31 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bigint.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/data_types/bigint.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(schema(schema), exclude(Sqlite("cfd1")))] +#[test_suite(schema(schema))] mod bigint { use indoc::indoc; use query_engine_tests::run_query; @@ -16,8 +16,14 @@ mod bigint { schema.to_owned() } - // "Using a BigInt field" should "work" - #[connector_test] + #[connector_test(exclude(Sqlite("cfd1")))] + // "Using a BigInt field" should "work". + // On D1, this fails with: + // + // ```diff + // - {"data":{"createOneModel":{"field":"123456789012341234"}}} + // + {"data":{"createOneModel":{"field":"123456789012341200"}}} + // ``` async fn using_bigint_field(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_create.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_create.rs index e541a58fb9d..0fdc90e8376 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_create.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_create.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(exclude(Sqlite("cfd1")))] +#[test_suite] mod connect_inside_create { use indoc::indoc; use query_engine_tests::{assert_error, run_query, run_query_json, DatamodelWithParams}; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_update.rs index d5981424346..eb5934aa457 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_connect_inside_update.rs @@ -6,7 +6,7 @@ mod connect_inside_update { use query_test_macros::relation_link_test; // "a P1 to C1 relation with the child already in a relation" should "be connectable through a nested mutation if the child is already in a relation" - #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneOpt", exclude(SqlServer, Sqlite("cfd1")))] + #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneOpt", exclude(SqlServer))] async fn p1_c1_child_in_rel_connect_mut(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let loose_child = t.child().parse( run_query_json!( @@ -166,7 +166,7 @@ mod connect_inside_update { } // "a P1 to C1 relation with the child without a relation" should "be connectable through a nested mutation" - #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneOpt", exclude(SqlServer, Sqlite("cfd1")))] + #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneOpt", exclude(SqlServer))] async fn p1_c1_child_wo_rel_connect_mut(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let child = t.child().parse( run_query_json!( @@ -218,7 +218,7 @@ mod connect_inside_update { } // "a P1 to C1 relation with the parent without a relation" should "be connectable through a nested mutation" - #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneOpt", exclude(SqlServer, Sqlite("cfd1")))] + #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneOpt", exclude(SqlServer))] async fn p1_c1_parnt_wo_rel_connect_mut(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = t.parent().parse( run_query_json!( @@ -1142,10 +1142,7 @@ mod connect_inside_update { // Regression test for https://github.com/prisma/prisma/issues/18173 // Excluded on MongoDB because all models require an @id attribute // Excluded on SQLServer because models with unique nulls can't have multiple NULLs, unlike other dbs. - #[connector_test( - schema(p1_c1_child_compound_unique_schema), - exclude(MongoDb, SqlServer, Sqlite("cfd1")) - )] + #[connector_test(schema(p1_c1_child_compound_unique_schema), exclude(MongoDb, SqlServer))] async fn p1_c1_child_compound_unique(runner: Runner) -> TestResult<()> { run_query!(&runner, r#"mutation { createOneParent(data: { id: 1 }) { id } }"#); run_query!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_create_inside_create.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_create_inside_create.rs index 79bc385497a..bde7a0a9003 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_create_inside_create.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_create_inside_create.rs @@ -1,7 +1,7 @@ use query_engine_tests::*; // TODO(dom): All failings except one (only a couple of tests is failing per test) -#[test_suite(exclude(Sqlite("cfd1")))] +#[test_suite] mod create_inside_create { use query_engine_tests::{run_query, DatamodelWithParams}; use query_test_macros::relation_link_test; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_create_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_create_inside_update.rs index aa1784fee89..a1e8b43c83a 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_create_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_create_inside_update.rs @@ -2,7 +2,7 @@ use query_engine_tests::*; //TODO: which tests to keep and which ones to delete???? Some do not really test the compound unique functionality // TODO(dom): All failing except one -#[test_suite(exclude(Sqlite("cfd1")))] +#[test_suite] mod create_inside_update { use query_engine_tests::{assert_error, run_query, run_query_json, DatamodelWithParams}; use query_test_macros::relation_link_test; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_update.rs index b221a0ac79c..4a24dddbade 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_update.rs @@ -782,7 +782,7 @@ mod delete_inside_update { // ---------------------------------- // "a P1 to CM relation " should "work" - #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToMany", exclude(SqlServer, Sqlite("cfd1")))] + #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToMany", exclude(SqlServer))] async fn p1_cm_by_id_should_work(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = t.parent().parse( run_query_json!( @@ -828,7 +828,7 @@ mod delete_inside_update { } // "a P1 to CM relation "should "work" - #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToMany", exclude(SqlServer, Sqlite("cfd1")))] + #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToMany", exclude(SqlServer))] async fn p1_cm_by_id_and_filters_should_work(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = t.parent().parse( run_query_json!( @@ -912,7 +912,7 @@ mod delete_inside_update { } // "a P1 to CM relation" should "error if the node is connected but the additional filters don't match it" - #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToMany", exclude(SqlServer, Sqlite("cfd1")))] + #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToMany", exclude(SqlServer))] async fn p1_cm_error_if_filter_not_match(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let parent = t.parent().parse( run_query_json!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_upsert.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_upsert.rs index 247592890df..814d176681b 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_upsert.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_inside_upsert.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(exclude(Sqlite("cfd1")))] +#[test_suite] mod delete_inside_upsert { use query_engine_tests::{assert_error, run_query, run_query_json, DatamodelWithParams}; use query_test_macros::relation_link_test; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_many_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_many_inside_update.rs index 17ce16cb09c..7516bfc1220 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_many_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_delete_many_inside_update.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(exclude(Sqlite("cfd1")))] +#[test_suite] mod delete_many_inside_update { use query_engine_tests::{assert_error, run_query, run_query_json}; use query_test_macros::relation_link_test; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_update.rs index e608d02217b..18ea1fa0d4b 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_update.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(exclude(Sqlite("cfd1")))] +#[test_suite] mod disconnect_inside_update { use query_engine_tests::{assert_error, run_query, run_query_json, DatamodelWithParams}; use query_test_macros::relation_link_test; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_upsert.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_upsert.rs index 12b22ecb464..c35f9c3e0b7 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_upsert.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_disconnect_inside_upsert.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(exclude(Sqlite("cfd1")))] +#[test_suite] mod disconnect_inside_upsert { use query_engine_tests::{assert_error, run_query, run_query_json}; use query_test_macros::relation_link_test; @@ -116,7 +116,7 @@ mod disconnect_inside_upsert { // "a P1 to C1 relation " should "be disconnectable through a nested mutation by id" // TODO: MongoDB doesn't support joins on top-level updates. It should be un-excluded once we fix that. - #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneOpt", exclude(MongoDb, Sqlite("cfd1")))] + #[relation_link_test(on_parent = "ToOneOpt", on_child = "ToOneOpt", exclude(MongoDb))] async fn p1_c1_by_fails_if_filter_no_match(runner: &Runner, t: &DatamodelWithParams) -> TestResult<()> { let res = run_query_json!( runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_set_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_set_inside_update.rs index 56906b8171a..15368192227 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_set_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_set_inside_update.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(exclude(Sqlite("cfd1")))] +#[test_suite] mod set_inside_update { use query_engine_tests::{run_query, run_query_json, DatamodelWithParams}; use query_test_macros::relation_link_test; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_update_many_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_update_many_inside_update.rs index f7bb65d5cac..4a42911d989 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_update_many_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_update_many_inside_update.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(exclude(Sqlite("cfd1")))] +#[test_suite] // update_many_inside_update mod um_inside_update { use query_engine_tests::{assert_error, run_query, run_query_json, DatamodelWithParams, Runner, TestResult}; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_upsert_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_upsert_inside_update.rs index b45a9ac95fc..b71142d71b6 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_upsert_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/already_converted/nested_upsert_inside_update.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(exclude(Sqlite("cfd1")))] +#[test_suite] mod upsert_inside_update { use query_engine_tests::{run_query, run_query_json}; use query_test_macros::relation_link_test; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/combining_different_nested_mutations.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/combining_different_nested_mutations.rs index 66096a1a743..9c92f51db25 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/combining_different_nested_mutations.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/combining_different_nested_mutations.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(exclude(Sqlite("cfd1")))] +#[test_suite] mod many_nested_muts { use query_engine_tests::{run_query, DatamodelWithParams}; use query_test_macros::relation_link_test; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/nested_atomic_number_ops.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/nested_atomic_number_ops.rs index 9c793cf2e2d..c325fccb6d6 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/nested_atomic_number_ops.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/nested_atomic_number_ops.rs @@ -25,7 +25,7 @@ mod atomic_number_ops { } // "An updateOne mutation with number operations on the top and updates on the child (inl. child)" should "handle id changes correctly" - #[connector_test(schema(schema_1), capabilities(UpdateableId), exclude(Sqlite("cfd1")))] + #[connector_test(schema(schema_1), capabilities(UpdateableId))] async fn update_number_ops_on_child(runner: Runner) -> TestResult<()> { run_query!( &runner, @@ -110,7 +110,7 @@ mod atomic_number_ops { } //"An updateOne mutation with number operations on the top and updates on the child (inl. parent)" should "handle id changes correctly" - #[connector_test(schema(schema_2), capabilities(UpdateableId), exclude(Sqlite("cfd1")))] + #[connector_test(schema(schema_2), capabilities(UpdateableId))] async fn update_number_ops_on_parent(runner: Runner) -> TestResult<()> { run_query!( &runner, @@ -195,7 +195,7 @@ mod atomic_number_ops { } // "A nested updateOne mutation" should "correctly apply all number operations for Int" - #[connector_test(schema(schema_3), exclude(CockroachDb, Sqlite("cfd1")))] + #[connector_test(schema(schema_3), exclude(CockroachDb))] async fn nested_update_int_ops(runner: Runner) -> TestResult<()> { create_test_model(&runner, 1, None, None).await?; create_test_model(&runner, 2, Some(3), None).await?; @@ -324,7 +324,7 @@ mod atomic_number_ops { } // "A nested updateOne mutation" should "correctly apply all number operations for Int" - #[connector_test(schema(schema_3), exclude(MongoDb, Sqlite("cfd1")))] + #[connector_test(schema(schema_3), exclude(MongoDb))] async fn nested_update_float_ops(runner: Runner) -> TestResult<()> { create_test_model(&runner, 1, None, None).await?; create_test_model(&runner, 2, None, Some("5.5")).await?; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_connect_inside_upsert.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_connect_inside_upsert.rs index 939d6d6ad99..23aecbe1ab2 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_connect_inside_upsert.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_connect_inside_upsert.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(schema(schema), exclude(Sqlite("cfd1")))] +#[test_suite(schema(schema))] mod connect_inside_upsert { use indoc::indoc; use query_engine_tests::{assert_error, run_query, run_query_json}; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_connect_or_create.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_connect_or_create.rs index 54e29a1a414..db3a2d47efa 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_connect_or_create.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_connect_or_create.rs @@ -3,7 +3,7 @@ use query_engine_tests::*; // Note: Except for m:n cases that are always resolved using the primary identifier of the models, we use different // relation links to ensure that the underlying QE logic correctly uses link resolvers instead of // only primary id resolvers. -#[test_suite(exclude(Sqlite("cfd1")))] +#[test_suite] mod connect_or_create { use indoc::indoc; use query_engine_tests::run_query; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs index dd2901b144e..3cd6be2eabe 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(schema(schema), exclude(Sqlite("cfd1")))] +#[test_suite(schema(schema))] mod nested_create_many { use indoc::indoc; use query_engine_tests::{assert_error, run_query}; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_update_inside_update.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_update_inside_update.rs index 08f443dd66f..b25dc7e8cfa 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_update_inside_update.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_update_inside_update.rs @@ -590,8 +590,14 @@ mod update_inside_update { // Transactionality - // "TRANSACTIONAL: a many to many relation" should "fail gracefully on wrong where and assign error correctly and not execute partially" #[connector_test(schema(schema_1), exclude(Sqlite("cfd1")))] + // "TRANSACTIONAL: a many to many relation" should "fail gracefully on wrong where and assign error correctly and not execute partially" + // On D1, this fails with: + // + // ```diff + // - {"data":{"findUniqueNote":{"text":"Some Text"}}} + // + {"data":{"findUniqueNote":{"text":"Some Changed Text"}}} + // ``` async fn tx_m2m_fail_wrong_where(runner: Runner) -> TestResult<()> { let res = run_query_json!( &runner, @@ -793,7 +799,7 @@ mod update_inside_update { } // "a deeply nested mutation" should "execute all levels of the mutation if there are only node edges on the path" - #[connector_test(schema(schema_3), exclude(Sqlite("cfd1")))] + #[connector_test(schema(schema_3))] async fn deep_nested_mutation_exec_all_muts(runner: Runner) -> TestResult<()> { run_query!( &runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many_relations.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many_relations.rs index 524fa64b309..ec9508347a6 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many_relations.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/delete_many_relations.rs @@ -6,13 +6,19 @@ mod delete_many_rels { use query_engine_tests::{run_query, Runner}; use query_test_macros::relation_link_test; - // "a P1 to C1 relation " should "succeed when trying to delete the parent" #[relation_link_test( on_parent = "ToOneOpt", on_child = "ToOneOpt", id_only = true, exclude(Sqlite("cfd1")) )] + // "a P1 to C1 relation " should "succeed when trying to delete the parent" + // On D1, this fails with: + // + // ```diff + // - {"data":{"deleteManyParent":{"count":1}}} + // + {"data":{"deleteManyParent":{"count":3}}} + // ``` async fn p1_c1(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" @@ -120,8 +126,14 @@ mod delete_many_rels { Ok(()) } - // "a PM to C1 " should "succeed in deleting the parent" #[relation_link_test(on_parent = "ToMany", on_child = "ToOneOpt", exclude(Sqlite("cfd1")))] + // "a PM to C1 relation " should "succeed in deleting the parent" + // On D1, this fails with: + // + // ```diff + // - {"data":{"deleteManyParent":{"count":1}}} + // + {"data":{"deleteManyParent":{"count":3}}} + // ``` async fn pm_c1(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" @@ -267,8 +279,14 @@ mod delete_many_rels { Ok(()) } - // "a PM to CM relation" should "succeed in deleting the parent" #[relation_link_test(on_parent = "ToMany", on_child = "ToMany", exclude(Sqlite("cfd1")))] + // "a PM to CM relation" should "succeed in deleting the parent" + // On D1, this fails with: + // + // ```diff + // - {"data":{"deleteManyParent":{"count":1}}} + // + {"data":{"deleteManyParent":{"count":3}}} + // ``` async fn pm_cm(runner: &Runner, _t: &DatamodelWithParams) -> TestResult<()> { runner .query(indoc! { r#" @@ -354,8 +372,14 @@ mod delete_many_rels { schema.to_owned() } - // "a PM to CM relation" should "delete the parent from other relations as well" #[connector_test(schema(additional_schema), exclude(Sqlite("cfd1")))] + // "a PM to CM relation" should "delete the parent from other relations as well" + // On D1, this fails with: + // + // ```diff + // - {"data":{"deleteManyParent":{"count":1}}} + // + {"data":{"deleteManyParent":{"count":3}}} + // ``` async fn pm_cm_other_relations(runner: Runner) -> TestResult<()> { runner .query( From 12fad4795eef0c21ed444646215f274961d99cf9 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Thu, 28 Mar 2024 02:22:34 +0100 Subject: [PATCH 03/25] driver-adapters: Serialize i32 arguments as `number` (#4797) * driver-adapters: Serialize i32 arguments as `number` Before this PR, we serialized all integers as bigint. This was not a problem for any of the adapters that supported bigint natively, however, it turned into a problem for D1. When we are doing order by aggregations, we inserd `ORDER BY COALESCE(count, 0)` into the query, where `0` is passed as i32 argument. As mentioned earlier, i32 argument got converted to `bigint` before this PR. In D1 adapter, we convert all bigints to strings. Which means, that above SQL query would become `ORDER BY COALESCE(count, '0')` now and produce different order for the rows where `count = NULL`. Since i32 bounds are below `Number.MAX_SAFE_INTEGER`, it is safe to convert it to `number` instead of `bigint`. `0` in above query is hardcoded to always be `i32`, so this fixes the issue. Close prisma/team-orm#1049 * fix(adapter-d1): uncomment remaining tests that now work --------- Co-authored-by: jkomyno --- .../order_by_aggregation.rs | 57 +++---------------- .../driver-adapters/src/conversion/js_arg.rs | 1 + .../driver-adapters/src/conversion/mysql.rs | 1 + .../src/conversion/postgres.rs | 1 + .../driver-adapters/src/conversion/sqlite.rs | 1 + .../driver-adapters/src/napi/conversion.rs | 1 + .../driver-adapters/src/wasm/conversion.rs | 1 + 7 files changed, 13 insertions(+), 50 deletions(-) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs index 744d26e7b56..3c94dd50d30 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/order_and_pagination/order_by_aggregation.rs @@ -33,13 +33,7 @@ mod order_by_aggr { schema.to_owned() } - #[connector_test(exclude(Sqlite("cfd1")))] - // On D1, this fails with: - // - // ```diff - // - {"data":{"findManyUser":[{"id":3,"posts":[]},{"id":1,"posts":[{"title":"alice_post_1"}]},{"id":2,"posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]}]}} - // + {"data":{"findManyUser":[{"id":1,"posts":[{"title":"alice_post_1"}]},{"id":2,"posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]},{"id":3,"posts":[]}]}} - // ``` + #[connector_test] async fn one2m_count_asc(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -58,13 +52,7 @@ mod order_by_aggr { Ok(()) } - #[connector_test(exclude(Sqlite("cfd1")))] - // On D1, this fails with: - // - // ```diff - // - {"data":{"findManyUser":[{"id":2,"posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]},{"id":1,"posts":[{"title":"alice_post_1"}]},{"id":3,"posts":[]}]}} - // + {"data":{"findManyUser":[{"id":3,"posts":[]},{"id":2,"posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]},{"id":1,"posts":[{"title":"alice_post_1"}]}]}} - // ``` + #[connector_test] async fn one2m_count_desc(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -121,13 +109,7 @@ mod order_by_aggr { Ok(()) } - #[connector_test(exclude(Sqlite("cfd1")))] - // On D1, this fails with: - // - // ```diff - // - {"data":{"findManyUser":[{"id":3,"name":"Motongo","posts":[]},{"id":1,"name":"Alice","posts":[{"title":"alice_post_1"}]},{"id":2,"name":"Bob","posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]}]}} - // + {"data":{"findManyUser":[{"id":1,"name":"Alice","posts":[{"title":"alice_post_1"}]},{"id":2,"name":"Bob","posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]},{"id":3,"name":"Motongo","posts":[]}]}} - // ``` + #[connector_test] async fn one2m_count_asc_field_asc(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -456,13 +438,7 @@ mod order_by_aggr { // With pagination tests // "[Cursor] Ordering by one2m count asc" should "work" - #[connector_test(exclude(Sqlite("cfd1")))] - // On D1, this fails with: - // - // ```diff - // - {"data":{"findManyUser":[{"id":1,"posts":[{"id":1}]},{"id":2,"posts":[{"id":2},{"id":3}]}]}} - // + {"data":{"findManyUser":[{"id":1,"posts":[{"id":1}]},{"id":2,"posts":[{"id":2},{"id":3}]},{"id":3,"posts":[]}]}} - // ``` + #[connector_test] async fn cursor_one2m_count_asc(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -482,13 +458,7 @@ mod order_by_aggr { } // "[Cursor] Ordering by one2m count desc" should "work" - #[connector_test(exclude(Sqlite("cfd1")))] - // On D1, this fails with: - // - // ```diff - // - {"data":{"findManyUser":[{"id":1,"posts":[{"id":1}]},{"id":2,"posts":[{"id":2},{"id":3}]}]}} - // + {"data":{"findManyUser":[{"id":1,"posts":[{"id":1}]},{"id":2,"posts":[{"id":2},{"id":3}]},{"id":3,"posts":[]}]}} - // ``` + #[connector_test] async fn cursor_one2m_count_desc(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -550,13 +520,7 @@ mod order_by_aggr { } // "[Cursor][Combo] Ordering by one2m count asc + field asc" - #[connector_test(exclude(Sqlite("cfd1")))] - // On D1, this fails with: - // - // ```diff - // - {"data":{"findManyUser":[{"id":2,"name":"Bob","posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]}]}} - // + {"data":{"findManyUser":[{"id":2,"name":"Bob","posts":[{"title":"bob_post_1"},{"title":"bob_post_2"}]},{"id":3,"name":"Motongo","posts":[]}]}} - // ``` + #[connector_test] async fn cursor_one2m_count_asc_field_asc(runner: Runner) -> TestResult<()> { create_test_data(&runner).await?; @@ -805,14 +769,7 @@ mod order_by_aggr { schema.to_owned() } - #[connector_test(schema(schema_regression_8036), exclude(Sqlite("cfd1")))] - // Regression test for: // https://github.com/prisma/prisma/issues/8036 - // On D1, this fails with: - // - // ```diff - // - {"data":{"findManyPost":[{"id":2,"title":"Second","_count":{"LikedPeople":0}},{"id":3,"title":"Third","_count":{"LikedPeople":0}},{"id":4,"title":"Fourth","_count":{"LikedPeople":0}},{"id":5,"title":"Fifth","_count":{"LikedPeople":0}}]}} - // + {"data":{"findManyPost":[]}} - // ``` + #[connector_test(schema(schema_regression_8036))] async fn count_m2m_records_not_connected(runner: Runner) -> TestResult<()> { run_query!( runner, diff --git a/query-engine/driver-adapters/src/conversion/js_arg.rs b/query-engine/driver-adapters/src/conversion/js_arg.rs index d6f67ed7716..6521829bd27 100644 --- a/query-engine/driver-adapters/src/conversion/js_arg.rs +++ b/query-engine/driver-adapters/src/conversion/js_arg.rs @@ -2,6 +2,7 @@ use serde_json::value::Value as JsonValue; #[derive(Debug, PartialEq)] pub enum JSArg { + SafeInt(i32), Value(serde_json::Value), Buffer(Vec), Array(Vec), diff --git a/query-engine/driver-adapters/src/conversion/mysql.rs b/query-engine/driver-adapters/src/conversion/mysql.rs index bd59d3b94ed..08704b06bcc 100644 --- a/query-engine/driver-adapters/src/conversion/mysql.rs +++ b/query-engine/driver-adapters/src/conversion/mysql.rs @@ -13,6 +13,7 @@ pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { quaint::ValueType::Bytes(Some(bytes)) => JSArg::Buffer(bytes.to_vec()), quaint::ValueType::Date(Some(d)) => JSArg::Value(JsonValue::String(d.format(DATE_FORMAT).to_string())), quaint::ValueType::DateTime(Some(dt)) => JSArg::Value(JsonValue::String(dt.format(DATETIME_FORMAT).to_string())), + quaint::ValueType::Int32(Some(value)) => JSArg::SafeInt(*value), quaint::ValueType::Time(Some(t)) => JSArg::Value(JsonValue::String(t.format(TIME_FORMAT).to_string())), quaint::ValueType::Array(Some(ref items)) => JSArg::Array( items diff --git a/query-engine/driver-adapters/src/conversion/postgres.rs b/query-engine/driver-adapters/src/conversion/postgres.rs index 949cc17e9eb..524834111bc 100644 --- a/query-engine/driver-adapters/src/conversion/postgres.rs +++ b/query-engine/driver-adapters/src/conversion/postgres.rs @@ -14,6 +14,7 @@ pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { (quaint::ValueType::DateTime(Some(dt)), _) => JSArg::Value(JsonValue::String(dt.naive_utc().to_string())), (quaint::ValueType::Json(Some(s)), _) => JSArg::Value(JsonValue::String(serde_json::to_string(s)?)), (quaint::ValueType::Bytes(Some(bytes)), _) => JSArg::Buffer(bytes.to_vec()), + (quaint::ValueType::Int32(Some(value)), _) => JSArg::SafeInt(*value), (quaint::ValueType::Numeric(Some(bd)), _) => JSArg::Value(JsonValue::String(bd.to_string())), (quaint::ValueType::Array(Some(items)), _) => JSArg::Array( items diff --git a/query-engine/driver-adapters/src/conversion/sqlite.rs b/query-engine/driver-adapters/src/conversion/sqlite.rs index b11acdca0d7..af070ec0b2c 100644 --- a/query-engine/driver-adapters/src/conversion/sqlite.rs +++ b/query-engine/driver-adapters/src/conversion/sqlite.rs @@ -9,6 +9,7 @@ pub fn value_to_js_arg(value: &quaint::Value) -> serde_json::Result { }, quaint::ValueType::Json(Some(s)) => JSArg::Value(s.to_owned()), quaint::ValueType::Bytes(Some(bytes)) => JSArg::Buffer(bytes.to_vec()), + quaint::ValueType::Int32(Some(value)) => JSArg::SafeInt(*value), quaint::ValueType::Array(Some(ref items)) => JSArg::Array( items .iter() diff --git a/query-engine/driver-adapters/src/napi/conversion.rs b/query-engine/driver-adapters/src/napi/conversion.rs index 6cfe445925e..2fab5a28bb7 100644 --- a/query-engine/driver-adapters/src/napi/conversion.rs +++ b/query-engine/driver-adapters/src/napi/conversion.rs @@ -16,6 +16,7 @@ impl FromNapiValue for JSArg { impl ToNapiValue for JSArg { unsafe fn to_napi_value(env: napi::sys::napi_env, value: Self) -> napi::Result { match value { + JSArg::SafeInt(v) => ToNapiValue::to_napi_value(env, v), JSArg::Value(v) => ToNapiValue::to_napi_value(env, v), JSArg::Buffer(bytes) => { let env = napi::Env::from_raw(env); diff --git a/query-engine/driver-adapters/src/wasm/conversion.rs b/query-engine/driver-adapters/src/wasm/conversion.rs index 73e6a7c3033..d2039210a62 100644 --- a/query-engine/driver-adapters/src/wasm/conversion.rs +++ b/query-engine/driver-adapters/src/wasm/conversion.rs @@ -24,6 +24,7 @@ impl ToJsValue for Query { impl ToJsValue for JSArg { fn to_js_value(&self) -> Result { match self { + JSArg::SafeInt(num) => Ok(JsValue::from(*num)), JSArg::Value(value) => serde_serialize(value), JSArg::Buffer(buf) => { let array = Uint8Array::from(buf.as_slice()); From 60bda880bf8b49c797eeaca55d30965b9cab1f12 Mon Sep 17 00:00:00 2001 From: Jan Piotrowski Date: Thu, 28 Mar 2024 17:12:25 +0100 Subject: [PATCH 04/25] ci(wasm-size): Clarify label (#4799) --- .github/workflows/wasm-size.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/wasm-size.yml b/.github/workflows/wasm-size.yml index e03195a5651..6adab506390 100644 --- a/.github/workflows/wasm-size.yml +++ b/.github/workflows/wasm-size.yml @@ -1,4 +1,4 @@ -name: "QE: WASM size" +name: "QE: WASM Query Engine size" on: pull_request: paths-ignore: @@ -114,7 +114,7 @@ jobs: issue-number: ${{ github.event.pull_request.number }} body: | - ### WASM Size + ### WASM Query Engine file Size |Engine | This PR | Base branch | Diff |------------------|----------------------------------------------|--------------------------------------------------|----------------------------------------------- From 446e407d9ae07e4aa9c42325f74209669dd45422 Mon Sep 17 00:00:00 2001 From: Sophie <29753584+Druue@users.noreply.github.com> Date: Thu, 28 Mar 2024 17:46:54 +0100 Subject: [PATCH 05/25] fix(qe): querying full table on batched findUnique() (#4789) * test 23343 * Fixed test batch_23343 Updated SelectionSet to allow for same field from compound index and extra field in findUnique Where. Co-authored-by: Flavian Desverne * quick ref (#4795) Co-authored-by: Flavian Desverne --------- Co-authored-by: Flavian Desverne Co-authored-by: Serhii Tatarintsev --- .../queries/batching/select_one_compound.rs | 63 +++++++ query-engine/core/src/query_document/mod.rs | 32 ++-- .../core/src/query_document/selection.rs | 174 ++++++++++-------- 3 files changed, 181 insertions(+), 88 deletions(-) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs index ed94e4487bf..0d055f591c7 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_compound.rs @@ -422,6 +422,69 @@ mod compound_batch { Ok(()) } + fn schema_23343() -> String { + let schema = indoc! { r#" + model Post { + id Int + tenantId String + userId Int + text String + + @@unique([tenantId, userId]) + } + "# }; + + schema.to_owned() + } + + #[connector_test(schema(schema_23343))] + async fn batch_23343(runner: Runner) -> TestResult<()> { + create_test_data_23343(&runner).await?; + + let queries = vec![ + r#"query { + findUniquePost(where: { tenantId_userId: { tenantId: "tenant1", userId: 1 }, tenantId: "tenant1" }) + { id, tenantId, userId, text }}"# + .to_string(), + r#"query { + findUniquePost(where: { tenantId_userId: { tenantId: "tenant2", userId: 3 }, tenantId: "tenant2" }) + { id, tenantId, userId, text }}"# + .to_string(), + ]; + + let batch_results = runner.batch(queries, false, None).await?; + insta::assert_snapshot!( + batch_results.to_string(), + @r###"{"batchResult":[{"data":{"findUniquePost":{"id":1,"tenantId":"tenant1","userId":1,"text":"Post 1!"}}},{"data":{"findUniquePost":{"id":3,"tenantId":"tenant2","userId":3,"text":"Post 3!"}}}]}"### + ); + + Ok(()) + } + + async fn create_test_data_23343(runner: &Runner) -> TestResult<()> { + runner + .query(r#"mutation { createOnePost(data: { id: 1, tenantId: "tenant1", userId: 1, text: "Post 1!" }) { id } }"#) + .await? + .assert_success(); + + runner + .query(r#"mutation { createOnePost(data: { id: 2, tenantId: "tenant1", userId: 2, text: "Post 2!" }) { id } }"#) + .await? + .assert_success(); + + runner + .query(r#"mutation { createOnePost(data: { id: 3, tenantId: "tenant2", userId: 3, text: "Post 3!" }) { id } }"#) + .await? + .assert_success(); + + runner + .query(r#"mutation { createOnePost(data: { id: 4, tenantId: "tenant2", userId: 4, text: "Post 4!" }) { id } }"#) + .await? + .assert_success(); + + Ok(()) + } + async fn create_test_data(runner: &Runner) -> TestResult<()> { runner .query(r#"mutation { createOneArtist(data: { firstName: "Musti" lastName: "Naukio", non_unique: 0 }) { firstName }}"#) diff --git a/query-engine/core/src/query_document/mod.rs b/query-engine/core/src/query_document/mod.rs index fa424bc44d6..575e3074df2 100644 --- a/query-engine/core/src/query_document/mod.rs +++ b/query-engine/core/src/query_document/mod.rs @@ -37,6 +37,8 @@ use schema::{constants::*, QuerySchema}; use std::collections::HashMap; use user_facing_errors::query_engine::validation::ValidationError; +use self::selection::QueryFilters; + pub(crate) type QueryParserResult = std::result::Result; #[derive(Debug)] @@ -213,21 +215,21 @@ impl CompactedDocument { // The query arguments are extracted here. Combine all query // arguments from the different queries into a one large argument. - let selection_set = selections.iter().fold(SelectionSet::new(), |mut acc, selection| { - // findUnique always has only one argument. We know it must be an object, otherwise this will panic. - let where_obj = selection.arguments()[0] - .1 - .clone() - .into_object() - .expect("Trying to compact a selection with non-object argument"); - let filters = extract_filter(where_obj, &model); - - for (field, filter) in filters { - acc = acc.push(field, filter); - } - - acc - }); + let query_filters = selections + .iter() + .map(|selection| { + // findUnique always has only one argument. We know it must be an object, otherwise this will panic. + let where_obj = selection.arguments()[0] + .1 + .clone() + .into_object() + .expect("Trying to compact a selection with non-object argument"); + let filters = extract_filter(where_obj, &model); + + QueryFilters::new(filters) + }) + .collect(); + let selection_set = SelectionSet::new(query_filters); // We must select all unique fields in the query so we can // match the right response back to the right request later on. diff --git a/query-engine/core/src/query_document/selection.rs b/query-engine/core/src/query_document/selection.rs index 18f8fde7843..5b950fc38d3 100644 --- a/query-engine/core/src/query_document/selection.rs +++ b/query-engine/core/src/query_document/selection.rs @@ -1,8 +1,9 @@ +use std::iter; + use crate::{ArgumentValue, ArgumentValueObject}; use indexmap::IndexMap; use itertools::Itertools; use schema::constants::filters; -use std::borrow::Cow; pub type SelectionArgument = (String, ArgumentValue); @@ -102,106 +103,132 @@ impl Selection { } } +#[derive(Debug, Clone, PartialEq, Default)] +pub struct QueryFilters(Vec<(String, ArgumentValue)>); + +impl QueryFilters { + pub fn new(filters: Vec<(String, ArgumentValue)>) -> Self { + Self(filters) + } + + pub fn keys(&self) -> impl IntoIterator + '_ { + self.0.iter().map(|(key, _)| key.as_str()) + } + + pub fn has_many_keys(&self) -> bool { + self.0.len() > 1 + } + + pub fn get_single_key(&self) -> Option<&(String, ArgumentValue)> { + self.0.first() + } +} + #[derive(Debug, Clone, PartialEq)] -pub enum SelectionSet<'a> { - Single(Cow<'a, str>, Vec), - Multi(Vec>>, Vec>), +pub enum SelectionSet { + Single(QuerySingle), + Many(Vec), Empty, } -impl<'a> Default for SelectionSet<'a> { - fn default() -> Self { - Self::Empty - } -} +#[derive(Debug, Clone, PartialEq)] +pub struct QuerySingle(String, Vec); + +impl QuerySingle { + /// Attempt at building a single query filter from multiple query filters. + /// Returns `None` if one of the query filters have more than one key. + pub fn new(query_filters: &[QueryFilters]) -> Option { + if query_filters.is_empty() { + return None; + } -impl<'a> SelectionSet<'a> { - pub fn new() -> Self { - Self::default() - } + if query_filters.iter().any(|query_filters| query_filters.has_many_keys()) { + return None; + } - pub fn push(self, column: impl Into>, value: ArgumentValue) -> Self { - let column = column.into(); + let first = query_filters.first().unwrap(); + let (key, value) = first.get_single_key().unwrap(); - match self { - Self::Single(key, mut vals) if key == column => { - vals.push(value); - Self::Single(key, vals) - } - Self::Single(key, mut vals) => { - vals.push(value); - Self::Multi(vec![vec![key, column]], vec![vals]) - } - Self::Multi(mut keys, mut vals) => { - match (keys.last_mut(), vals.last_mut()) { - (Some(keys), Some(vals)) if !keys.contains(&column) => { - keys.push(column); - vals.push(value); - } - _ => { - keys.push(vec![column]); - vals.push(vec![value]); - } - } + let mut result = QuerySingle(key.clone(), vec![value.clone()]); - Self::Multi(keys, vals) + for filters in query_filters.iter().skip(1) { + if let Some(single) = QuerySingle::push(result, filters) { + result = single; + } else { + return None; } - Self::Empty => Self::Single(column, vec![value]), } + + Some(result) } - pub fn len(&self) -> usize { - match self { - Self::Single(_, _) => 1, - Self::Multi(v, _) => v.len(), - Self::Empty => 0, + fn push(mut previous: Self, next: &QueryFilters) -> Option { + if next.0.is_empty() { + Some(previous) + // We have already validated that all `QueryFilters` have a single key. + // So we can continue building it. + } else { + let (key, value) = next.0.first().unwrap(); + + // if key matches, push value + if key == &previous.0 { + previous.1.push(value.clone()); + + Some(previous) + } else { + // if key does not match, it's a many + None + } } } +} - pub fn is_single(&self) -> bool { - matches!(self, Self::Single(_, _)) +impl Default for SelectionSet { + fn default() -> Self { + Self::Empty } +} - pub fn is_multi(&self) -> bool { - matches!(self, Self::Multi(_, _)) - } +impl SelectionSet { + pub fn new(filters: Vec) -> Self { + let single = QuerySingle::new(&filters); - pub fn is_empty(&self) -> bool { - self.len() == 0 + match single { + Some(single) => SelectionSet::Single(single), + None if filters.is_empty() => SelectionSet::Empty, + None => SelectionSet::Many(filters), + } } - pub fn keys(&self) -> Vec<&str> { + pub fn keys(&self) -> Box + '_> { match self { - Self::Single(key, _) => vec![key.as_ref()], - Self::Multi(keys, _) => match keys.first() { - Some(keys) => keys.iter().map(|key| key.as_ref()).collect(), - None => Vec::new(), - }, - Self::Empty => Vec::new(), + Self::Single(single) => Box::new(iter::once(single.0.as_str())), + Self::Many(filters) => Box::new(filters.iter().flat_map(|f| f.keys()).unique()), + Self::Empty => Box::new(iter::empty()), } } } -pub struct In<'a> { - selection_set: SelectionSet<'a>, +#[derive(Debug)] +pub struct In { + selection_set: SelectionSet, } -impl<'a> In<'a> { - pub fn new(selection_set: SelectionSet<'a>) -> Self { +impl In { + pub fn new(selection_set: SelectionSet) -> Self { Self { selection_set } } } -impl<'a> From> for ArgumentValue { - fn from(other: In<'a>) -> Self { +impl From for ArgumentValue { + fn from(other: In) -> Self { match other.selection_set { - SelectionSet::Multi(key_sets, val_sets) => { - let key_vals = key_sets.into_iter().zip(val_sets); - - let conjuctive = key_vals.fold(Conjuctive::new(), |acc, (keys, vals)| { - let ands = keys.into_iter().zip(vals).fold(Conjuctive::new(), |acc, (key, val)| { - let mut argument = IndexMap::new(); - argument.insert(key.into_owned(), val); + SelectionSet::Many(buckets) => { + let conjuctive = buckets.into_iter().fold(Conjuctive::new(), |acc, bucket| { + // Needed because we flush the last bucket by pushing an empty one, which gets translated to a `Null` as the Conjunctive is empty. + let ands = bucket.0.into_iter().fold(Conjuctive::new(), |acc, (key, value)| { + let mut argument = IndexMap::with_capacity(1); + argument.insert(key.clone(), value); acc.and(argument) }); @@ -211,16 +238,17 @@ impl<'a> From> for ArgumentValue { ArgumentValue::from(conjuctive) } - SelectionSet::Single(key, vals) => { - let is_bool = vals.iter().any(|v| match v { + SelectionSet::Single(QuerySingle(key, vals)) => { + let is_bool = vals.clone().into_iter().any(|v| match v { ArgumentValue::Scalar(s) => matches!(s, query_structure::PrismaValue::Boolean(_)), _ => false, }); if is_bool { let conjunctive = vals.into_iter().fold(Conjuctive::new(), |acc, val| { - let mut argument: IndexMap = IndexMap::new(); - argument.insert(key.clone().into_owned(), val); + let mut argument = IndexMap::new(); + + argument.insert(key.to_string(), val); acc.or(argument) }); From 473ed3124229e22d881cb7addf559799debae1ab Mon Sep 17 00:00:00 2001 From: Nikita Lapkov <5737185+laplab@users.noreply.github.com> Date: Thu, 28 Mar 2024 17:18:35 +0000 Subject: [PATCH 06/25] feat: enable createMany-related capabilities and tests for SQLite (#4779) Co-authored-by: Flavian Desverne --- .gitignore | 1 + Cargo.lock | 1 + .../cockroach_datamodel_connector.rs | 3 +- .../src/builtin_connectors/mongodb.rs | 6 +- .../mssql_datamodel_connector.rs | 3 +- .../mysql_datamodel_connector.rs | 3 +- .../postgres_datamodel_connector.rs | 3 +- .../sqlite_datamodel_connector.rs | 4 +- .../src/datamodel_connector/capabilities.rs | 1 + .../query-engine-tests/Cargo.toml | 1 + .../query-engine-tests/src/utils/metrics.rs | 23 +++ .../query-engine-tests/src/utils/mod.rs | 1 + .../tests/new/create_many.rs | 2 +- .../query-engine-tests/tests/new/metrics.rs | 31 +-- .../tests/new/regressions/prisma_14001.rs | 2 +- .../tests/new/regressions/prisma_7434.rs | 2 +- .../tests/new/relation_load_strategy.rs | 3 +- .../nested_create_many.rs | 8 +- .../writes/top_level_mutations/create_many.rs | 184 +++++++++++++++++- .../query-connector/src/write_args.rs | 2 +- .../src/query_builder/write.rs | 6 +- .../interpreter/query_interpreters/write.rs | 112 ++++++++++- query-engine/core/src/query_ast/write.rs | 8 + .../src/query_graph_builder/write/create.rs | 3 +- .../write/nested/create_nested.rs | 3 + .../query_graph_builder/write/nested/mod.rs | 2 +- 26 files changed, 369 insertions(+), 49 deletions(-) create mode 100644 query-engine/connector-test-kit-rs/query-engine-tests/src/utils/metrics.rs diff --git a/.gitignore b/.gitignore index d401ff68f18..0c2f9ea4318 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,7 @@ prisma-gpg-private.asc .test_config *.pending-snap .pending.md +dev.db *.class *.log diff --git a/Cargo.lock b/Cargo.lock index 8fbb896def5..d838995c6f9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3813,6 +3813,7 @@ dependencies = [ "futures", "indoc 2.0.3", "insta", + "itertools 0.12.0", "once_cell", "paste", "prisma-value", diff --git a/psl/psl-core/src/builtin_connectors/cockroach_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/cockroach_datamodel_connector.rs index 03b312ba357..c5c9334fe98 100644 --- a/psl/psl-core/src/builtin_connectors/cockroach_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/cockroach_datamodel_connector.rs @@ -62,7 +62,8 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector RowIn | DeleteReturning | SupportsFiltersOnRelationsWithoutJoins | - LateralJoin + LateralJoin | + SupportsDefaultInInsert }); const SCALAR_TYPE_DEFAULTS: &[(ScalarType, CockroachType)] = &[ diff --git a/psl/psl-core/src/builtin_connectors/mongodb.rs b/psl/psl-core/src/builtin_connectors/mongodb.rs index 814f3f60fd4..1034521fac1 100644 --- a/psl/psl-core/src/builtin_connectors/mongodb.rs +++ b/psl/psl-core/src/builtin_connectors/mongodb.rs @@ -31,7 +31,11 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector DefaultValueAuto | TwoWayEmbeddedManyToManyRelation | UndefinedType | - DeleteReturning + DeleteReturning | + // MongoDB does not have a notion of default values for fields. + // This capability is enabled as a performance optimisation to avoid issuing multiple queries + // when using `createMany()` with MongoDB. + SupportsDefaultInInsert }); pub(crate) struct MongoDbDatamodelConnector; diff --git a/psl/psl-core/src/builtin_connectors/mssql_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/mssql_datamodel_connector.rs index 2146e2b95a1..9fe851aa94e 100644 --- a/psl/psl-core/src/builtin_connectors/mssql_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/mssql_datamodel_connector.rs @@ -51,7 +51,8 @@ const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Connector SupportsTxIsolationRepeatableRead | SupportsTxIsolationSerializable | SupportsTxIsolationSnapshot | - SupportsFiltersOnRelationsWithoutJoins + SupportsFiltersOnRelationsWithoutJoins | + SupportsDefaultInInsert // InsertReturning | DeleteReturning - unimplemented. }); diff --git a/psl/psl-core/src/builtin_connectors/mysql_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/mysql_datamodel_connector.rs index 4240525bc5e..1d91e590981 100644 --- a/psl/psl-core/src/builtin_connectors/mysql_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/mysql_datamodel_connector.rs @@ -68,7 +68,8 @@ pub const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Conne SupportsTxIsolationSerializable | RowIn | SupportsFiltersOnRelationsWithoutJoins | - CorrelatedSubqueries + CorrelatedSubqueries | + SupportsDefaultInInsert }); const CONSTRAINT_SCOPES: &[ConstraintScope] = &[ConstraintScope::GlobalForeignKey, ConstraintScope::ModelKeyIndex]; diff --git a/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs index 35bcc30d024..d5cebd189bc 100644 --- a/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs @@ -71,7 +71,8 @@ pub const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Conne DistinctOn | DeleteReturning | SupportsFiltersOnRelationsWithoutJoins | - LateralJoin + LateralJoin | + SupportsDefaultInInsert }); pub struct PostgresDatamodelConnector; diff --git a/psl/psl-core/src/builtin_connectors/sqlite_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/sqlite_datamodel_connector.rs index 4d5febb74b5..b58dd9e2bbd 100644 --- a/psl/psl-core/src/builtin_connectors/sqlite_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/sqlite_datamodel_connector.rs @@ -28,7 +28,9 @@ pub const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Conne InsertReturning | DeleteReturning | UpdateReturning | - SupportsFiltersOnRelationsWithoutJoins + SupportsFiltersOnRelationsWithoutJoins | + CreateMany | + CreateManyWriteableAutoIncId }); pub struct SqliteDatamodelConnector; diff --git a/psl/psl-core/src/datamodel_connector/capabilities.rs b/psl/psl-core/src/datamodel_connector/capabilities.rs index b520e53841a..cf3f36eeea1 100644 --- a/psl/psl-core/src/datamodel_connector/capabilities.rs +++ b/psl/psl-core/src/datamodel_connector/capabilities.rs @@ -74,6 +74,7 @@ capabilities!( InsensitiveFilters, CreateMany, CreateManyWriteableAutoIncId, + SupportsDefaultInInsert, // This capability is set if connector supports using `DEFAULT` instead of a value in the list of `INSERT` arguments. WritableAutoincField, CreateSkipDuplicates, UpdateableId, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/Cargo.toml b/query-engine/connector-test-kit-rs/query-engine-tests/Cargo.toml index 2ac097a7a18..c60b9cca459 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/Cargo.toml +++ b/query-engine/connector-test-kit-rs/query-engine-tests/Cargo.toml @@ -27,3 +27,4 @@ paste = "1.0.14" [dev-dependencies] insta = "1.7.1" +itertools.workspace = true diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/metrics.rs b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/metrics.rs new file mode 100644 index 00000000000..df6da0fec9a --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/metrics.rs @@ -0,0 +1,23 @@ +use serde_json::Value; + +pub fn get_counter(json: &Value, name: &str) -> u64 { + let metric_value = get_metric_value(json, "counters", name); + metric_value.as_u64().unwrap() +} + +pub fn get_gauge(json: &Value, name: &str) -> f64 { + let metric_value = get_metric_value(json, "gauges", name); + metric_value.as_f64().unwrap() +} + +pub fn get_metric_value(json: &Value, metric_type: &str, name: &str) -> serde_json::Value { + let metrics = json.get(metric_type).unwrap().as_array().unwrap(); + let metric = metrics + .iter() + .find(|metric| metric.get("key").unwrap().as_str() == Some(name)) + .unwrap() + .as_object() + .unwrap(); + + metric.get("value").unwrap().clone() +} diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/mod.rs b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/mod.rs index df331383d6c..a3fadb7d195 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/mod.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/src/utils/mod.rs @@ -1,6 +1,7 @@ mod batch; mod bytes; mod json; +pub mod metrics; mod querying; mod raw; mod string; diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/create_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/create_many.rs index fc3ec925352..06988cf1de1 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/create_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/create_many.rs @@ -64,7 +64,7 @@ mod cockroachdb { mod single_col { use query_engine_tests::run_query; - #[connector_test(exclude(CockroachDb))] + #[connector_test(exclude(CockroachDb, Sqlite("cfd1")))] async fn foo(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, "mutation { createManyTestModel(data: [{},{}]) { count }}"), diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs index f1310947427..7a020f27aa3 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/metrics.rs @@ -14,7 +14,6 @@ mod metrics { }; use query_engine_tests::ConnectorVersion::*; use query_engine_tests::*; - use serde_json::Value; #[connector_test] async fn metrics_are_recorded(runner: Runner) -> TestResult<()> { @@ -30,8 +29,8 @@ mod metrics { let json = runner.get_metrics().to_json(Default::default()); // We cannot assert the full response it will be slightly different per database - let total_queries = get_counter(&json, PRISMA_DATASOURCE_QUERIES_TOTAL); - let total_operations = get_counter(&json, PRISMA_CLIENT_QUERIES_TOTAL); + let total_queries = utils::metrics::get_counter(&json, PRISMA_DATASOURCE_QUERIES_TOTAL); + let total_operations = utils::metrics::get_counter(&json, PRISMA_CLIENT_QUERIES_TOTAL); match runner.connector_version() { Sqlite(_) => assert_eq!(total_queries, 2), @@ -63,7 +62,7 @@ mod metrics { let _ = runner.commit_tx(tx_id).await?; let json = runner.get_metrics().to_json(Default::default()); - let active_transactions = get_gauge(&json, PRISMA_CLIENT_QUERIES_ACTIVE); + let active_transactions = utils::metrics::get_gauge(&json, PRISMA_CLIENT_QUERIES_ACTIVE); assert_eq!(active_transactions, 0.0); let tx_id = runner.start_tx(5000, 5000, None).await?; @@ -80,30 +79,8 @@ mod metrics { let _ = runner.rollback_tx(tx_id.clone()).await?; let json = runner.get_metrics().to_json(Default::default()); - let active_transactions = get_gauge(&json, PRISMA_CLIENT_QUERIES_ACTIVE); + let active_transactions = utils::metrics::get_gauge(&json, PRISMA_CLIENT_QUERIES_ACTIVE); assert_eq!(active_transactions, 0.0); Ok(()) } - - fn get_counter(json: &Value, name: &str) -> u64 { - let metric_value = get_metric_value(json, "counters", name); - metric_value.as_u64().unwrap() - } - - fn get_gauge(json: &Value, name: &str) -> f64 { - let metric_value = get_metric_value(json, "gauges", name); - metric_value.as_f64().unwrap() - } - - fn get_metric_value(json: &Value, metric_type: &str, name: &str) -> serde_json::Value { - let metrics = json.get(metric_type).unwrap().as_array().unwrap(); - let metric = metrics - .iter() - .find(|metric| metric.get("key").unwrap().as_str() == Some(name)) - .unwrap() - .as_object() - .unwrap(); - - metric.get("value").unwrap().clone() - } } diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_14001.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_14001.rs index 8b08a70c16c..9b7b0e514b7 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_14001.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_14001.rs @@ -1,6 +1,6 @@ use query_engine_tests::*; -#[test_suite(schema(schema), exclude(Sqlite))] +#[test_suite(schema(schema))] mod prisma_14001 { fn schema() -> String { r#" diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_7434.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_7434.rs index f7114d24983..166e9e1e4a9 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_7434.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_7434.rs @@ -4,7 +4,7 @@ use query_engine_tests::*; mod not_in_chunking { use query_engine_tests::Runner; - #[connector_test(exclude(CockroachDb))] + #[connector_test(exclude(CockroachDb, Sqlite("cfd1")))] async fn not_in_batch_filter(runner: Runner) -> TestResult<()> { assert_error!( runner, diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/relation_load_strategy.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/relation_load_strategy.rs index 55acc7b3052..9cccd46caeb 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/relation_load_strategy.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/relation_load_strategy.rs @@ -438,8 +438,7 @@ mod relation_load_strategy { count } } - "#, - exclude(Sqlite) + "# ); relation_load_strategy_not_available_test!( diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs index 3cd6be2eabe..821b99f9fce 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/nested_mutations/not_using_schema_base/nested_create_many.rs @@ -25,7 +25,7 @@ mod nested_create_many { } // "A basic createMany on a create top level" should "work" - #[connector_test(exclude(Sqlite))] + #[connector_test] async fn create_many_on_create(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { @@ -53,7 +53,7 @@ mod nested_create_many { } // "A basic createMany on a create top level" should "work" - #[connector_test(exclude(Sqlite))] + #[connector_test] async fn create_many_shorthand_on_create(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { @@ -78,7 +78,7 @@ mod nested_create_many { // "Nested createMany" should "error on duplicates by default" // TODO(dom): Not working for mongo - #[connector_test(exclude(Sqlite, MongoDb))] + #[connector_test(exclude(MongoDb))] async fn nested_createmany_fail_dups(runner: Runner) -> TestResult<()> { assert_error!( &runner, @@ -140,7 +140,7 @@ mod nested_create_many { // Each DB allows a certain amount of params per single query, and a certain number of rows. // We create 1000 nested records. // "Nested createMany" should "allow creating a large number of records (horizontal partitioning check)" - #[connector_test(exclude(Sqlite))] + #[connector_test(exclude(Sqlite("cfd1")))] async fn allow_create_large_number_records(runner: Runner) -> TestResult<()> { let records: Vec<_> = (1..=1000).map(|i| format!(r#"{{ id: {i}, str1: "{i}" }}"#)).collect(); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs index 35a044b1473..f59aee0756f 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/writes/top_level_mutations/create_many.rs @@ -1,6 +1,7 @@ use query_engine_tests::*; -#[test_suite(capabilities(CreateMany))] +// TODO: create many returns the wrong count for CFD1 +#[test_suite(capabilities(CreateMany), exclude(Sqlite("cfd1")))] mod create_many { use indoc::indoc; use query_engine_tests::{assert_error, run_query}; @@ -64,7 +65,11 @@ mod create_many { } // Covers: AutoIncrement ID working with basic autonincrement functionality. - #[connector_test(schema(schema_2), capabilities(CreateManyWriteableAutoIncId), exclude(CockroachDb))] + #[connector_test( + schema(schema_2), + capabilities(CreateManyWriteableAutoIncId), + exclude(CockroachDb, Sqlite("cfd1")) + )] async fn basic_create_many_autoincrement(runner: Runner) -> TestResult<()> { insta::assert_snapshot!( run_query!(&runner, r#"mutation { @@ -294,6 +299,181 @@ mod create_many { Ok(()) } + + fn schema_7() -> String { + let schema = indoc! { + r#"model Test { + req Int @id + req_default Int @default(dbgenerated("1")) + req_default_static Int @default(1) + opt Int? + opt_default Int? @default(dbgenerated("1")) + opt_default_static Int? @default(1) + }"# + }; + + schema.to_owned() + } + + #[connector_test(schema(schema_7), only(Sqlite))] + async fn create_many_by_shape(runner: Runner) -> TestResult<()> { + use itertools::Itertools; + + let mut id = 1; + + // Generates a powerset of all combinations of these fields + // In an attempt to ensure that we never generate invalid insert statements + // because of the grouping logic. + for sets in vec!["req_default", "opt", "opt_default"] + .into_iter() + .powerset() + .map(|mut set| { + set.extend_from_slice(&["req"]); + set + }) + .powerset() + { + let data = sets + .into_iter() + .map(|set| { + let res = set.into_iter().map(|field| format!("{field}: {id}")).join(", "); + + id += 1; + + format!("{{ {res} }}") + }) + .join(", "); + + run_query!( + &runner, + format!(r#"mutation {{ createManyTest(data: [{data}]) {{ count }} }}"#) + ); + } + + Ok(()) + } + + // LibSQL & co are ignored because they don't support metrics + #[connector_test(schema(schema_7), only(Sqlite("3")))] + async fn create_many_by_shape_counter_1(runner: Runner) -> TestResult<()> { + use query_engine_metrics::PRISMA_DATASOURCE_QUERIES_TOTAL; + + // Generated queries: + // INSERT INTO `main`.`Test` (`opt`, `req`) VALUES (null, ?), (?, ?) params=[1,2,2] + // INSERT INTO `main`.`Test` (`opt_default`, `opt`, `req`) VALUES (?, null, ?), (?, ?, ?) params=[3,3,6,6,6] + // INSERT INTO `main`.`Test` (`req_default`, `opt_default`, `req`, `opt`) VALUES (?, ?, ?, null), (?, ?, ?, ?) params=[5,5,5,7,7,7,7] + // INSERT INTO `main`.`Test` (`req`, `req_default`, `opt`) VALUES (?, ?, ?) params=[4,4,4] + run_query!( + &runner, + r#"mutation { + createManyTest( + data: [ + { req: 1 } + { opt: 2, req: 2 } + { opt_default: 3, req: 3 } + { req_default: 4, opt: 4, req: 4 } + { req_default: 5, opt_default: 5, req: 5 } + { opt: 6, opt_default: 6, req: 6 } + { req_default: 7, opt: 7, opt_default: 7, req: 7 } + ] + ) { + count + } + }"# + ); + + let json = runner.get_metrics().to_json(Default::default()); + let counter = metrics::get_counter(&json, PRISMA_DATASOURCE_QUERIES_TOTAL); + + match runner.max_bind_values() { + Some(x) if x > 18 => assert_eq!(counter, 6), // 4 queries in total (BEGIN/COMMIT are counted) + // Some queries are being split because of `QUERY_BATCH_SIZE` being set to `10` in dev. + Some(_) => assert_eq!(counter, 7), // 5 queries in total (BEGIN/COMMIT are counted) + _ => panic!("Expected max bind values to be set"), + } + + Ok(()) + } + + // LibSQL & co are ignored because they don't support metrics + #[connector_test(schema(schema_7), only(Sqlite("3")))] + async fn create_many_by_shape_counter_2(runner: Runner) -> TestResult<()> { + use query_engine_metrics::PRISMA_DATASOURCE_QUERIES_TOTAL; + + // Generated queries: + // INSERT INTO `main`.`Test` ( `opt_default_static`, `req_default_static`, `opt`, `req` ) VALUES (?, ?, null, ?), (?, ?, null, ?), (?, ?, null, ?) params=[1,1,1,2,1,2,1,3,3] + // INSERT INTO `main`.`Test` ( `opt_default_static`, `req_default_static`, `opt`, `req` ) VALUES (?, ?, ?, ?), (?, ?, ?, ?) params=[1,1,8,4,1,1,null,5] + // Note: Two queries are generated because QUERY_BATCH_SIZE is set to 10. In production, a single query would be generated for this example. + run_query!( + &runner, + r#"mutation { + createManyTest( + data: [ + { req: 1 } + { req: 2, opt_default_static: 2 }, + { req: 3, req_default_static: 3 }, + { req: 4, opt: 8 }, + { req: 5, opt: null }, + ] + ) { + count + } + }"# + ); + + let json = runner.get_metrics().to_json(Default::default()); + let counter = metrics::get_counter(&json, PRISMA_DATASOURCE_QUERIES_TOTAL); + + match runner.max_bind_values() { + Some(x) if x >= 18 => assert_eq!(counter, 3), // 1 createMany queries (BEGIN/COMMIT are counted) + // Some queries are being split because of `QUERY_BATCH_SIZE` being set to `10` in dev. + Some(_) => assert_eq!(counter, 4), // 2 createMany queries (BEGIN/COMMIT are counted) + _ => panic!("Expected max bind values to be set"), + } + + Ok(()) + } + + // LibSQL & co are ignored because they don't support metrics + #[connector_test(schema(schema_7), only(Sqlite("3")))] + async fn create_many_by_shape_counter_3(runner: Runner) -> TestResult<()> { + use query_engine_metrics::PRISMA_DATASOURCE_QUERIES_TOTAL; + + // Generated queries: + // INSERT INTO `main`.`Test` ( `req_default_static`, `req`, `opt_default`, `opt_default_static` ) VALUES (?, ?, ?, ?) params=[1,6,3,1] + // INSERT INTO `main`.`Test` ( `opt`, `req`, `req_default_static`, `opt_default_static` ) VALUES (null, ?, ?, ?), (null, ?, ?, ?), (null, ?, ?, ?) params=[1,1,1,2,1,2,3,3,1] + // INSERT INTO `main`.`Test` ( `opt`, `req`, `req_default_static`, `opt_default_static` ) VALUES (?, ?, ?, ?), (?, ?, ?, ?) params=[8,4,1,1,null,5,1,1] + // Note: The first two queries are split because QUERY_BATCH_SIZE is set to 10. In production, only two queries would be generated for this example. + run_query!( + &runner, + r#"mutation { + createManyTest( + data: [ + { req: 1 } + { req: 2, opt_default_static: 2 }, + { req: 3, req_default_static: 3 }, + { req: 4, opt: 8 }, + { req: 5, opt: null }, + { req: 6, opt_default: 3 }, + ] + ) { + count + } + }"# + ); + + let json = runner.get_metrics().to_json(Default::default()); + let counter = metrics::get_counter(&json, PRISMA_DATASOURCE_QUERIES_TOTAL); + + match runner.max_bind_values() { + Some(x) if x > 21 => assert_eq!(counter, 4), // 3 createMany queries in total (BEGIN/COMMIT are counted) + // Some queries are being split because of `QUERY_BATCH_SIZE` being set to `10` in dev. + Some(_) => assert_eq!(counter, 5), // 3 createMany queries in total (BEGIN/COMMIT are counted) + _ => panic!("Expected max bind values to be set"), + } + + Ok(()) + } } #[test_suite(schema(json_opt), exclude(MySql(5.6)), capabilities(CreateMany, Json))] diff --git a/query-engine/connectors/query-connector/src/write_args.rs b/query-engine/connectors/query-connector/src/write_args.rs index 445037bdbbe..b02fa873f83 100644 --- a/query-engine/connectors/query-connector/src/write_args.rs +++ b/query-engine/connectors/query-connector/src/write_args.rs @@ -16,7 +16,7 @@ pub struct WriteArgs { /// Wrapper struct to force a bit of a reflection whether or not the string passed /// to the write arguments is the data source field name, not the model field name. /// Also helps to avoid errors with convenient from-field conversions. -#[derive(Debug, PartialEq, Clone, Hash, Eq)] +#[derive(Debug, PartialEq, Clone, Hash, Eq, PartialOrd, Ord)] pub struct DatasourceFieldName(pub String); impl Deref for DatasourceFieldName { diff --git a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs index abcf73cb29c..c089f0834dc 100644 --- a/query-engine/connectors/sql-query-connector/src/query_builder/write.rs +++ b/query-engine/connectors/sql-query-connector/src/query_builder/write.rs @@ -59,6 +59,7 @@ pub(crate) fn create_records_nonempty( for field in affected_fields.iter() { let value = arg.take_field_value(field.db_name()); + match value { Some(write_op) => { let value: PrismaValue = write_op @@ -67,7 +68,10 @@ pub(crate) fn create_records_nonempty( row.push(field.value(value, ctx).into()); } - + // We can't use `DEFAULT` for SQLite so we provided an explicit `NULL` instead. + None if !field.is_required() && field.default_value().is_none() => { + row.push(Value::null_int32().raw().into()) + } None => row.push(default_value()), } } diff --git a/query-engine/core/src/interpreter/query_interpreters/write.rs b/query-engine/core/src/interpreter/query_interpreters/write.rs index 6d88c254312..ad50bbbae0c 100644 --- a/query-engine/core/src/interpreter/query_interpreters/write.rs +++ b/query-engine/core/src/interpreter/query_interpreters/write.rs @@ -1,9 +1,12 @@ +use std::collections::HashMap; + use crate::{ interpreter::{InterpretationResult, InterpreterError}, query_ast::*, QueryResult, RecordSelection, }; -use connector::{ConnectionLike, NativeUpsert}; +use connector::{ConnectionLike, DatasourceFieldName, NativeUpsert, WriteArgs}; +use query_structure::{ManyRecords, Model}; pub(crate) async fn execute( tx: &mut dyn ConnectionLike, @@ -60,6 +63,10 @@ async fn create_many( q: CreateManyRecords, trace_id: Option, ) -> InterpretationResult { + if q.split_by_shape { + return create_many_split_by_shape(tx, q, trace_id).await; + } + if let Some(selected_fields) = q.selected_fields { let records = tx .create_records_returning(&q.model, q.args, q.skip_duplicates, selected_fields.fields, trace_id) @@ -81,6 +88,109 @@ async fn create_many( } } +/// Performs bulk inserts grouped by record shape. +/// +/// This is required to support connectors which do not support `DEFAULT` in the list of values for `INSERT`. +/// See [`create_many_shape`] for more information as to which heuristic we use to group create many entries. +async fn create_many_split_by_shape( + tx: &mut dyn ConnectionLike, + q: CreateManyRecords, + trace_id: Option, +) -> InterpretationResult { + let mut args_by_shape: HashMap> = Default::default(); + let model = &q.model; + + for write_args in q.args { + let shape = create_many_shape(&write_args, model); + + args_by_shape.entry(shape).or_default().push(write_args); + } + + if let Some(selected_fields) = q.selected_fields { + let mut result: Option = None; + for args in args_by_shape.into_values() { + let current_batch = tx + .create_records_returning( + &q.model, + args, + q.skip_duplicates, + selected_fields.fields.clone(), + trace_id.clone(), + ) + .await?; + + if let Some(result) = &mut result { + // We assume that all records have the same set and order of fields, + // since we pass the same `selected_fields.fields` to the + // `create_records_returning()` above. + result.records.extend(current_batch.records.into_iter()); + } else { + result = Some(current_batch); + } + } + + let records = if let Some(result) = result { + result + } else { + // Empty result means that the list of arguments was empty as well. + tx.create_records_returning(&q.model, vec![], q.skip_duplicates, selected_fields.fields, trace_id) + .await? + }; + + let selection = RecordSelection { + name: q.name, + fields: selected_fields.order, + records, + nested: vec![], + model: q.model, + virtual_fields: vec![], + }; + + Ok(QueryResult::RecordSelection(Some(Box::new(selection)))) + } else { + let mut result = 0; + for args in args_by_shape.into_values() { + let affected_records = tx + .create_records(&q.model, args, q.skip_duplicates, trace_id.clone()) + .await?; + result += affected_records; + } + Ok(QueryResult::Count(result)) + } +} + +#[derive(Debug, PartialEq, Eq, Hash)] +struct CreateManyShape(Vec); + +/// Returns a [`CreateManyShape`] that can be used to group CreateMany entries optimally. +/// +/// This is needed for connectors that don't support the `DEFAULT` expression when inserting records in bulk. +/// `DEFAULT` is needed for fields that have a default value that the QueryEngine cannot generate at runtime (@autoincrement(), @dbgenerated()). +/// +/// Two CreateMany entries cannot be grouped together when they contain different fields that require the use of a `DEFAULT` expression. +/// - When they have the same set of fields that require `DEFAULT`, those fields can be ommited entirely from the `INSERT` expression, in which case `DEFAULT` is implied. +/// - When they don't, since all `VALUES` entries of the `INSERT` expression must be the same, we have to split the CreateMany entries into separate `INSERT` expressions. +/// +/// Consequently, if a field has a default value and is _not_ present in the [`WriteArgs`], this constitutes a discriminant that can be used to group CreateMany entries. +/// +/// As such, the [`CreateManyShape`] that we compute for a given CreateMany entry is the set of fields that are _not_ present in the [`WriteArgs`] and that have a default value. +/// Note: This works because the [`crate::QueryDocumentParser`] injects into the CreateMany entries, the default values that _can_ be generated at runtime. +/// Note: We can ignore optional fields without default values because they can be inserted as `NULL`. It is a value that the QueryEngine _can_ generate at runtime. +fn create_many_shape(write_args: &WriteArgs, model: &Model) -> CreateManyShape { + let mut shape = Vec::new(); + + for field in model.fields().scalar() { + if !write_args.args.contains_key(field.db_name()) && field.default_value().is_some() { + shape.push(DatasourceFieldName(field.db_name().to_string())); + } + } + + // This ensures that shapes are not dependent on order of fields. + shape.sort_unstable(); + + CreateManyShape(shape) +} + async fn update_one( tx: &mut dyn ConnectionLike, q: UpdateRecord, diff --git a/query-engine/core/src/query_ast/write.rs b/query-engine/core/src/query_ast/write.rs index 76c8ffb81cb..975a2be877b 100644 --- a/query-engine/core/src/query_ast/write.rs +++ b/query-engine/core/src/query_ast/write.rs @@ -272,6 +272,14 @@ pub struct CreateManyRecords { /// Fields of created records that client has requested to return. /// `None` if the connector does not support returning the created rows. pub selected_fields: Option, + /// If set to true, connector will perform the operation using multiple bulk `INSERT` queries. + /// One query will be issued per a unique set of fields present in the batch. For example, if + /// `args` contains records: + /// {a: 1, b: 1} + /// {a: 2, b: 2} + /// {a: 3, b: 3, c: 3} + /// Two queries will be issued: one containing first two records and one for the last record. + pub split_by_shape: bool, } #[derive(Debug, Clone)] diff --git a/query-engine/core/src/query_graph_builder/write/create.rs b/query-engine/core/src/query_graph_builder/write/create.rs index 014910a43aa..fe0e49a2937 100644 --- a/query-engine/core/src/query_graph_builder/write/create.rs +++ b/query-engine/core/src/query_graph_builder/write/create.rs @@ -66,7 +66,7 @@ pub(crate) fn create_record( /// Creates a create record query and adds it to the query graph, together with it's nested queries and companion read query. pub(crate) fn create_many_records( graph: &mut QueryGraph, - _query_schema: &QuerySchema, + query_schema: &QuerySchema, model: Model, mut field: ParsedField<'_>, ) -> QueryGraphBuilderResult<()> { @@ -99,6 +99,7 @@ pub(crate) fn create_many_records( args, skip_duplicates, selected_fields: None, + split_by_shape: !query_schema.has_capability(ConnectorCapability::SupportsDefaultInInsert), }; graph.create_node(Query::Write(WriteQuery::CreateManyRecords(query))); diff --git a/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs b/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs index aaea8d24efd..c3d6196b61e 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/create_nested.rs @@ -75,6 +75,7 @@ pub fn nested_create( args: data_maps.into_iter().map(|(args, _nested)| args).collect(), skip_duplicates: false, selected_fields, + split_by_shape: !query_schema.has_capability(ConnectorCapability::SupportsDefaultInInsert), }; let create_many_node = graph.create_node(Query::Write(WriteQuery::CreateManyRecords(query))); @@ -554,6 +555,7 @@ fn handle_one_to_one( pub fn nested_create_many( graph: &mut QueryGraph, + query_schema: &QuerySchema, parent_node: NodeRef, parent_relation_field: &RelationFieldRef, value: ParsedInputValue<'_>, @@ -585,6 +587,7 @@ pub fn nested_create_many( args, skip_duplicates, selected_fields: None, + split_by_shape: !query_schema.has_capability(ConnectorCapability::SupportsDefaultInInsert), }; let create_node = graph.create_node(Query::Write(WriteQuery::CreateManyRecords(query))); diff --git a/query-engine/core/src/query_graph_builder/write/nested/mod.rs b/query-engine/core/src/query_graph_builder/write/nested/mod.rs index 5d0ad21a4c7..a8f984edbbf 100644 --- a/query-engine/core/src/query_graph_builder/write/nested/mod.rs +++ b/query-engine/core/src/query_graph_builder/write/nested/mod.rs @@ -36,7 +36,7 @@ pub fn connect_nested_query( for (field_name, value) in data_map { match field_name.as_ref() { operations::CREATE => nested_create(graph, query_schema,parent, &parent_relation_field, value, &child_model)?, - operations::CREATE_MANY => nested_create_many(graph, parent, &parent_relation_field, value, &child_model)?, + operations::CREATE_MANY => nested_create_many(graph, query_schema, parent, &parent_relation_field, value, &child_model)?, operations::UPDATE => nested_update(graph, query_schema, &parent, &parent_relation_field, value, &child_model)?, operations::UPSERT => nested_upsert(graph, query_schema, parent, &parent_relation_field, value)?, operations::DELETE => nested_delete(graph, query_schema, &parent, &parent_relation_field, value, &child_model)?, From efe4a6425654274d4e32a71d656fb995c49c95a8 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 08:32:14 +0200 Subject: [PATCH 07/25] chore(deps): pin dependencies (#4776) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- query-engine/driver-adapters/executor/package.json | 10 +++++----- query-engine/driver-adapters/package.json | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/query-engine/driver-adapters/executor/package.json b/query-engine/driver-adapters/executor/package.json index 66b3ce0820c..3da91f92092 100644 --- a/query-engine/driver-adapters/executor/package.json +++ b/query-engine/driver-adapters/executor/package.json @@ -24,7 +24,7 @@ "sideEffects": false, "license": "Apache-2.0", "dependencies": { - "@effect/schema": "^0.64.2", + "@effect/schema": "0.64.16", "@prisma/adapter-d1": "workspace:*", "@prisma/adapter-libsql": "workspace:*", "@prisma/adapter-neon": "workspace:*", @@ -35,13 +35,13 @@ "mitata": "0.1.11", "query-engine-wasm-baseline": "npm:@prisma/query-engine-wasm@0.0.19", "query-engine-wasm-latest": "npm:@prisma/query-engine-wasm@latest", - "ts-pattern": "^5.0.8", - "undici": "6.7.0", - "wrangler": "^3.34.2", + "ts-pattern": "5.1.0", + "undici": "6.10.1", + "wrangler": "3.41.0", "ws": "8.16.0" }, "devDependencies": { - "@cloudflare/workers-types": "^4.20240314.0", + "@cloudflare/workers-types": "4.20240329.0", "@types/node": "20.11.24", "tsup": "8.0.2", "tsx": "4.7.1", diff --git a/query-engine/driver-adapters/package.json b/query-engine/driver-adapters/package.json index d3a787d3653..499f46070e5 100644 --- a/query-engine/driver-adapters/package.json +++ b/query-engine/driver-adapters/package.json @@ -22,6 +22,6 @@ "esbuild": "0.20.1", "tsup": "8.0.2", "tsx": "4.7.1", - "typescript": "5.3.3" + "typescript": "5.4.3" } } From c1288491e7204b7938a19091faa82642924f75ca Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 08:34:10 +0200 Subject: [PATCH 08/25] chore(deps): update cachix/install-nix-action action to v26 (#4777) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/on-push-to-main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/on-push-to-main.yml b/.github/workflows/on-push-to-main.yml index 5c095a39098..c2967ab9a8a 100644 --- a/.github/workflows/on-push-to-main.yml +++ b/.github/workflows/on-push-to-main.yml @@ -17,7 +17,7 @@ jobs: steps: - uses: actions/checkout@v4 - - uses: cachix/install-nix-action@v25 + - uses: cachix/install-nix-action@v26 with: # we need internet access for the moment extra_nix_config: | From 426215e07e56c6c7da6d2c8cd5e97adb9237a05f Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 08:34:28 +0200 Subject: [PATCH 09/25] chore(deps): update rtcamp/action-slack-notify action to v2.3.0 (#4801) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .github/workflows/publish-prisma-schema-wasm.yml | 2 +- .github/workflows/publish-query-engine-wasm.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish-prisma-schema-wasm.yml b/.github/workflows/publish-prisma-schema-wasm.yml index 9560ebeef3b..4b5db4c04b3 100644 --- a/.github/workflows/publish-prisma-schema-wasm.yml +++ b/.github/workflows/publish-prisma-schema-wasm.yml @@ -57,7 +57,7 @@ jobs: run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - name: Slack Notification on Failure if: ${{ failure() }} - uses: rtCamp/action-slack-notify@v2.2.1 + uses: rtCamp/action-slack-notify@v2.3.0 env: SLACK_TITLE: 'Building and publishing @prisma/prisma-schema-wasm failed :x:' SLACK_COLOR: '#FF0000' diff --git a/.github/workflows/publish-query-engine-wasm.yml b/.github/workflows/publish-query-engine-wasm.yml index 41d5d8611b1..bba9dc1eb65 100644 --- a/.github/workflows/publish-query-engine-wasm.yml +++ b/.github/workflows/publish-query-engine-wasm.yml @@ -57,7 +57,7 @@ jobs: run: echo "SLACK_FOOTER=<$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID|Click here to go to the job logs>" >> $GITHUB_ENV - name: Slack Notification on Failure if: ${{ failure() }} - uses: rtCamp/action-slack-notify@v2.2.1 + uses: rtCamp/action-slack-notify@v2.3.0 env: SLACK_TITLE: "Building and publishing @prisma/query-engine-wasm failed :x:" SLACK_COLOR: "#FF0000" From c3b400aec636e64322e31ece2b9198effc47f541 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Wed, 3 Apr 2024 08:52:40 +0200 Subject: [PATCH 10/25] chore(renovate): fix JSON (#4804) --- renovate.json | 42 ++++++++++-------------------------------- 1 file changed, 10 insertions(+), 32 deletions(-) diff --git a/renovate.json b/renovate.json index 83ea8d3b295..6490ec42b53 100644 --- a/renovate.json +++ b/renovate.json @@ -11,57 +11,35 @@ "sbt": { "enabled": false }, - "schedule": [ - "every weekend" - ], + "schedule": ["every weekend"], "minimumReleaseAge": "7 days", "rangeStrategy": "pin", "separateMinorPatch": true, "configMigration": true, - "ignoreDeps": [ - "query-engine-wasm-baseline", - ], + "ignoreDeps": ["query-engine-wasm-baseline"], "packageRules": [ { - "matchFileNames": [ - "docker-compose.yml" - ], - "matchUpdateTypes": [ - "minor", - "major" - ], + "matchFileNames": ["docker-compose.yml"], + "matchUpdateTypes": ["minor", "major"], "enabled": false }, { "groupName": "Weekly vitess docker image version update", - "matchPackageNames": [ - "vitess/vttestserver" - ], - "schedule": [ - "before 7am on Wednesday" - ] + "matchPackageNames": ["vitess/vttestserver"], + "schedule": ["before 7am on Wednesday"] }, { "groupName": "Prisma Driver Adapters", - "matchPackageNames": [ - "@prisma/driver-adapter-utils" - ], - "matchPackagePrefixes": [ - "@prisma/adapter" - ], - "schedule": [ - "at any time" - ] + "matchPackageNames": ["@prisma/driver-adapter-utils"], + "matchPackagePrefixes": ["@prisma/adapter"], + "schedule": ["at any time"] }, { "groupName": "Driver Adapters directory", "matchFileNames": ["query-engine/driver-adapters/**"] }, { - "matchPackageNames": [ - "node", - "pnpm" - ], + "matchPackageNames": ["node", "pnpm"], "enabled": false } ] From d5a80e20a6ca301482bfef00cfcc918e995f0057 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 09:33:04 +0200 Subject: [PATCH 11/25] chore(deps): update dependency typescript to v5.4.3 (#4800) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .../query-engine-wasm/analyse/package.json | 2 +- .../query-engine-wasm/analyse/pnpm-lock.yaml | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/query-engine/query-engine-wasm/analyse/package.json b/query-engine/query-engine-wasm/analyse/package.json index e752ad09078..718129e4f4c 100644 --- a/query-engine/query-engine-wasm/analyse/package.json +++ b/query-engine/query-engine-wasm/analyse/package.json @@ -10,6 +10,6 @@ "devDependencies": { "ts-node": "10.9.2", "tsx": "4.7.1", - "typescript": "5.4.2" + "typescript": "5.4.3" } } diff --git a/query-engine/query-engine-wasm/analyse/pnpm-lock.yaml b/query-engine/query-engine-wasm/analyse/pnpm-lock.yaml index a15028ee9f7..6f0e83bca27 100644 --- a/query-engine/query-engine-wasm/analyse/pnpm-lock.yaml +++ b/query-engine/query-engine-wasm/analyse/pnpm-lock.yaml @@ -7,13 +7,13 @@ settings: devDependencies: ts-node: specifier: 10.9.2 - version: 10.9.2(@types/node@20.10.8)(typescript@5.4.2) + version: 10.9.2(@types/node@20.10.8)(typescript@5.4.3) tsx: specifier: 4.7.1 version: 4.7.1 typescript: - specifier: 5.4.2 - version: 5.4.2 + specifier: 5.4.3 + version: 5.4.3 packages: @@ -346,7 +346,7 @@ packages: resolution: {integrity: sha512-seS2Tj26TBVOC2NIc2rOe2y2ZO7efxITtLZcGSOnHHNOQ7CkiUBfw0Iw2ck6xkIhPwLhKNLS8BO+hEpngQlqzw==} dev: true - /ts-node@10.9.2(@types/node@20.10.8)(typescript@5.4.2): + /ts-node@10.9.2(@types/node@20.10.8)(typescript@5.4.3): resolution: {integrity: sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==} hasBin: true peerDependencies: @@ -372,7 +372,7 @@ packages: create-require: 1.1.1 diff: 4.0.2 make-error: 1.3.6 - typescript: 5.4.2 + typescript: 5.4.3 v8-compile-cache-lib: 3.0.1 yn: 3.1.1 dev: true @@ -388,8 +388,8 @@ packages: fsevents: 2.3.3 dev: true - /typescript@5.4.2: - resolution: {integrity: sha512-+2/g0Fds1ERlP6JsakQQDXjZdZMM+rqpamFZJEKh4kwTIn3iDkgKtby0CeNd5ATNZ4Ry1ax15TMx0W2V+miizQ==} + /typescript@5.4.3: + resolution: {integrity: sha512-KrPd3PKaCLr78MalgiwJnA25Nm8HAmdwN3mYUYZgG/wizIo9EainNVQI9/yDavtVFRN2h3k8uf3GLHuhDMgEHg==} engines: {node: '>=14.17'} hasBin: true dev: true From fa0e26796abbdffc78fadf89512aa219aac0b5c0 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 09:34:55 +0200 Subject: [PATCH 12/25] chore(deps): update driver adapters directory (patch) (#4775) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- query-engine/driver-adapters/executor/package.json | 4 ++-- query-engine/driver-adapters/package.json | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/query-engine/driver-adapters/executor/package.json b/query-engine/driver-adapters/executor/package.json index 3da91f92092..492f118f407 100644 --- a/query-engine/driver-adapters/executor/package.json +++ b/query-engine/driver-adapters/executor/package.json @@ -42,9 +42,9 @@ }, "devDependencies": { "@cloudflare/workers-types": "4.20240329.0", - "@types/node": "20.11.24", + "@types/node": "20.11.30", "tsup": "8.0.2", "tsx": "4.7.1", - "typescript": "5.4.2" + "typescript": "5.4.3" } } diff --git a/query-engine/driver-adapters/package.json b/query-engine/driver-adapters/package.json index 499f46070e5..6c80c92a079 100644 --- a/query-engine/driver-adapters/package.json +++ b/query-engine/driver-adapters/package.json @@ -18,8 +18,8 @@ "keywords": [], "author": "", "devDependencies": { - "@types/node": "20.11.24", - "esbuild": "0.20.1", + "@types/node": "20.11.30", + "esbuild": "0.20.2", "tsup": "8.0.2", "tsx": "4.7.1", "typescript": "5.4.3" From cf53d7edae7bf394943b08ec4c70cb573fdc26d3 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 11:36:32 +0200 Subject: [PATCH 13/25] chore(deps): update driver adapters directory (minor) (#4806) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- query-engine/driver-adapters/executor/package.json | 8 ++++---- query-engine/driver-adapters/package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/query-engine/driver-adapters/executor/package.json b/query-engine/driver-adapters/executor/package.json index 492f118f407..d2fa9d7d23d 100644 --- a/query-engine/driver-adapters/executor/package.json +++ b/query-engine/driver-adapters/executor/package.json @@ -36,13 +36,13 @@ "query-engine-wasm-baseline": "npm:@prisma/query-engine-wasm@0.0.19", "query-engine-wasm-latest": "npm:@prisma/query-engine-wasm@latest", "ts-pattern": "5.1.0", - "undici": "6.10.1", - "wrangler": "3.41.0", + "undici": "6.11.1", + "wrangler": "3.44.0", "ws": "8.16.0" }, "devDependencies": { - "@cloudflare/workers-types": "4.20240329.0", - "@types/node": "20.11.30", + "@cloudflare/workers-types": "4.20240402.0", + "@types/node": "20.12.3", "tsup": "8.0.2", "tsx": "4.7.1", "typescript": "5.4.3" diff --git a/query-engine/driver-adapters/package.json b/query-engine/driver-adapters/package.json index 6c80c92a079..4d7b5a59e71 100644 --- a/query-engine/driver-adapters/package.json +++ b/query-engine/driver-adapters/package.json @@ -18,7 +18,7 @@ "keywords": [], "author": "", "devDependencies": { - "@types/node": "20.11.30", + "@types/node": "20.12.3", "esbuild": "0.20.2", "tsup": "8.0.2", "tsx": "4.7.1", From fc73af6cd8932d6a618a6c74cdaa015542d7adae Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 3 Apr 2024 11:42:21 +0200 Subject: [PATCH 14/25] fix(deps): update dependency @effect/schema to v0.64.18 (#4805) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- query-engine/driver-adapters/executor/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/query-engine/driver-adapters/executor/package.json b/query-engine/driver-adapters/executor/package.json index d2fa9d7d23d..e76453877a5 100644 --- a/query-engine/driver-adapters/executor/package.json +++ b/query-engine/driver-adapters/executor/package.json @@ -24,7 +24,7 @@ "sideEffects": false, "license": "Apache-2.0", "dependencies": { - "@effect/schema": "0.64.16", + "@effect/schema": "0.64.18", "@prisma/adapter-d1": "workspace:*", "@prisma/adapter-libsql": "workspace:*", "@prisma/adapter-neon": "workspace:*", From 3d9274825d2d30080cd9745910e305e8d38fdb76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=ABl=20Galeran?= Date: Thu, 4 Apr 2024 19:12:38 +0200 Subject: [PATCH 15/25] ci: update pnpm/action-setup to v3 (#4811) --- .github/workflows/test-query-engine-driver-adapters.yml | 2 +- .github/workflows/wasm-benchmarks.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test-query-engine-driver-adapters.yml b/.github/workflows/test-query-engine-driver-adapters.yml index 8429a08acfc..ce600021b41 100644 --- a/.github/workflows/test-query-engine-driver-adapters.yml +++ b/.github/workflows/test-query-engine-driver-adapters.yml @@ -70,7 +70,7 @@ jobs: node-version: ${{ matrix.node_version }} - name: "Setup pnpm" - uses: pnpm/action-setup@v2 + uses: pnpm/action-setup@v3.0.0 with: version: 8 diff --git a/.github/workflows/wasm-benchmarks.yml b/.github/workflows/wasm-benchmarks.yml index 48aff148a69..0428eaa0518 100644 --- a/.github/workflows/wasm-benchmarks.yml +++ b/.github/workflows/wasm-benchmarks.yml @@ -30,7 +30,7 @@ jobs: uses: actions/setup-node@v4 - name: "Setup pnpm" - uses: pnpm/action-setup@v2 + uses: pnpm/action-setup@v3.0.0 with: version: 8 From e66d30dc00faf0c9d53c02f47a511bb304054f91 Mon Sep 17 00:00:00 2001 From: Pranaya Tomar Date: Fri, 5 Apr 2024 10:46:39 +0200 Subject: [PATCH 16/25] feat(psl): Recommend type in error message if schema validation fails because of case (#4137) * feat(psl): error for ignore case validation * feat(psl): fix formatting * feat(psl): ignore Source & Generator when recommending type in error * feat(psl): enclose recommended type in quotes Closes #15174 --------- Co-authored-by: Sophie <29753584+Druue@users.noreply.github.com> --- psl/diagnostics/src/error.rs | 7 +++ psl/parser-database/src/names.rs | 2 +- psl/parser-database/src/types.rs | 79 +++++++++++++++++++++++++------- psl/psl/tests/base/basic.rs | 64 ++++++++++++++++++++++++++ 4 files changed, 134 insertions(+), 18 deletions(-) diff --git a/psl/diagnostics/src/error.rs b/psl/diagnostics/src/error.rs index c6a16dffdba..6a11d461a13 100644 --- a/psl/diagnostics/src/error.rs +++ b/psl/diagnostics/src/error.rs @@ -293,6 +293,13 @@ impl DatamodelError { Self::new(msg, span) } + pub fn new_type_for_case_not_found_error(type_name: &str, suggestion: &str, span: Span) -> DatamodelError { + let msg = format!( + "Type \"{type_name}\" is neither a built-in type, nor refers to another model, custom type, or enum. Did you mean \"{suggestion}\"?" + ); + Self::new(msg, span) + } + pub fn new_scalar_type_not_found_error(type_name: &str, span: Span) -> DatamodelError { Self::new(format!("Type \"{type_name}\" is not a built-in type."), span) } diff --git a/psl/parser-database/src/names.rs b/psl/parser-database/src/names.rs index 9ed71f98742..3208c1c3bdb 100644 --- a/psl/parser-database/src/names.rs +++ b/psl/parser-database/src/names.rs @@ -159,7 +159,7 @@ fn duplicate_top_error(existing: &ast::Top, duplicate: &ast::Top) -> DatamodelEr } fn assert_is_not_a_reserved_scalar_type(ident: &ast::Identifier, ctx: &mut Context<'_>) { - if ScalarType::try_from_str(&ident.name).is_some() { + if ScalarType::try_from_str(&ident.name, false).is_some() { ctx.push_error(DatamodelError::new_reserved_scalar_type_error(&ident.name, ident.span)); } } diff --git a/psl/parser-database/src/types.rs b/psl/parser-database/src/types.rs index 1668243247b..c5f2d222ce1 100644 --- a/psl/parser-database/src/types.rs +++ b/psl/parser-database/src/types.rs @@ -648,10 +648,41 @@ fn visit_model<'db>(model_id: ast::ModelId, ast_model: &'db ast::Model, ctx: &mu native_type: None, }); } - Err(supported) => ctx.push_error(DatamodelError::new_type_not_found_error( - supported, - ast_field.field_type.span(), - )), + Err(supported) => { + let top_names: Vec<_> = ctx + .ast + .iter_tops() + .filter_map(|(_, top)| match top { + ast::Top::Source(_) | ast::Top::Generator(_) => None, + _ => Some(&top.identifier().name), + }) + .collect(); + + match top_names.iter().find(|&name| name.to_lowercase() == supported) { + Some(ignore_case_match) => { + ctx.push_error(DatamodelError::new_type_for_case_not_found_error( + supported, + ignore_case_match.as_str(), + ast_field.field_type.span(), + )); + } + None => match ScalarType::try_from_str(supported, true) { + Some(ignore_case_match) => { + ctx.push_error(DatamodelError::new_type_for_case_not_found_error( + supported, + ignore_case_match.as_str(), + ast_field.field_type.span(), + )); + } + None => { + ctx.push_error(DatamodelError::new_type_not_found_error( + supported, + ast_field.field_type.span(), + )); + } + }, + } + } } } } @@ -699,7 +730,7 @@ fn field_type<'db>(field: &'db ast::Field, ctx: &mut Context<'db>) -> Result Option { - match s { - "Int" => Some(ScalarType::Int), - "BigInt" => Some(ScalarType::BigInt), - "Float" => Some(ScalarType::Float), - "Boolean" => Some(ScalarType::Boolean), - "String" => Some(ScalarType::String), - "DateTime" => Some(ScalarType::DateTime), - "Json" => Some(ScalarType::Json), - "Bytes" => Some(ScalarType::Bytes), - "Decimal" => Some(ScalarType::Decimal), - _ => None, + pub(crate) fn try_from_str(s: &str, ignore_case: bool) -> Option { + match ignore_case { + true => match s.to_lowercase().as_str() { + "int" => Some(ScalarType::Int), + "bigint" => Some(ScalarType::BigInt), + "float" => Some(ScalarType::Float), + "boolean" => Some(ScalarType::Boolean), + "string" => Some(ScalarType::String), + "datetime" => Some(ScalarType::DateTime), + "json" => Some(ScalarType::Json), + "bytes" => Some(ScalarType::Bytes), + "decimal" => Some(ScalarType::Decimal), + _ => None, + }, + _ => match s { + "Int" => Some(ScalarType::Int), + "BigInt" => Some(ScalarType::BigInt), + "Float" => Some(ScalarType::Float), + "Boolean" => Some(ScalarType::Boolean), + "String" => Some(ScalarType::String), + "DateTime" => Some(ScalarType::DateTime), + "Json" => Some(ScalarType::Json), + "Bytes" => Some(ScalarType::Bytes), + "Decimal" => Some(ScalarType::Decimal), + _ => None, + }, } } } diff --git a/psl/psl/tests/base/basic.rs b/psl/psl/tests/base/basic.rs index ca806fb6f51..a8c47884c21 100644 --- a/psl/psl/tests/base/basic.rs +++ b/psl/psl/tests/base/basic.rs @@ -239,3 +239,67 @@ fn type_aliases_must_error() { expectation.assert_eq(&error); } + +#[test] +fn must_return_good_error_message_for_type_match() { + let dml = indoc! {r#" + model User { + firstName String + } + model B { + a datetime + b footime + c user + d DB + e JS + } + + datasource db { + provider = "postgresql" + url = env("TEST_DATABASE_URL") + extensions = [citext, pg_trgm] + } + + generator js { + provider = "prisma-client-js" + previewFeatures = ["postgresqlExtensions"] + } + "#}; + + let error = parse_unwrap_err(dml); + + let expected = expect![[r#" + error: Type "datetime" is neither a built-in type, nor refers to another model, custom type, or enum. Did you mean "DateTime"? + --> schema.prisma:5 +  |  +  4 | model B { +  5 |  a datetime +  |  + error: Type "footime" is neither a built-in type, nor refers to another model, custom type, or enum. + --> schema.prisma:6 +  |  +  5 |  a datetime +  6 |  b footime +  |  + error: Type "user" is neither a built-in type, nor refers to another model, custom type, or enum. Did you mean "User"? + --> schema.prisma:7 +  |  +  6 |  b footime +  7 |  c user +  |  + error: Type "DB" is neither a built-in type, nor refers to another model, custom type, or enum. + --> schema.prisma:8 +  |  +  7 |  c user +  8 |  d DB +  |  + error: Type "JS" is neither a built-in type, nor refers to another model, custom type, or enum. + --> schema.prisma:9 +  |  +  8 |  d DB +  9 |  e JS +  |  + "#]]; + + expected.assert_eq(&error); +} From dcdb692a8946281c0c85b4a6f7081984eec92b92 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tom=20Houl=C3=A9?= <13155277+tomhoule@users.noreply.github.com> Date: Mon, 8 Apr 2024 10:42:49 +0200 Subject: [PATCH 17/25] Experiment with multi-file schema handling in PSL (#4243) * Implement multi-file schema handling in PSL This commit implements multi-file schema handling in the Prisma Schema Language. At a high level, instead of accepting a single string, `psl::validate_multi_file()` is an alternative to `psl::validate()` that accepts something morally equivalent to: ```json { "./prisma/schema/a.prisma": "datasource db { ... }", "./prisma/schema/nested/b.prisma": "model Test { ... }" } ``` There are tests for PSL validation with multiple schema files, but most of the rest of engines still consumes the single file version of `psl::validate()`. The implementation and the return type are shared between `psl::validate_multi_file()` and `psl::validate()`, so the change is completely transparent, other than the expectation of passing in a list of (file_name, file_contents) instead of a single string. The `psl::validate()` entry point should behave exactly the same as `psl::multi_schema()` with a single file named `schema.prisma`. In particular, it has the exact same return type. Implementation ============== This is achieved by extending `Span` to contain, in addition to a start and end offset, a `FileId`. The `FileId` is a unique identifier for a file and its parsed `SchemaAst` inside `ParserDatabase`. The identifier types for AST items in `ParserDatabase` are also extended to contain the `FileId`, so that they can be uniquely referred to in the context of the (multi-file) schema. After the analysis phase (the `parser_database` crate), consumers of the analyzed schema become multi-file aware completely transparently, no change is necessary in the other engines. The only changes that will be required at scattered points across the codebase are the `psl::validate()` call sites that will need to receive a `Vec, SourceFile>` instead of a single `SourceFile`. This PR does _not_ deal with that, but it makes where these call sites are obvious by what entry points they use: `psl::validate()`, `psl::parse_schema()` and the various `*_assert_single()` methods on `ParserDatabase`. The PR contains tests confirming that schema analysis, validation and displaying diagnostics across multiple files works as expected. Status of this PR ================= This is going to be directly mergeable after review, and it will not affect the current schema handling behaviour when dealing with a single schema file. Next steps ========== - Replace all calls to `psl::validate()` with calls to `psl::validate_multi_file()`. - The `*_assert_single()` calls should be progressively replaced with their multi-file counterparts across engines. - The language server should start sending multiple files to prisma-schema-wasm in all calls. This is not in the spirit of the language server spec, but that is the most immediate solution. We'll have to make `range_to_span()` in `prisma-fmt` multi-schema aware by taking a FileId param. Links ===== Relevant issue: https://github.com/prisma/prisma/issues/2377 Also see the [internal design doc](https://www.notion.so/prismaio/Multi-file-Schema-24d68fe8664048ad86252fe446caac24?d=68ef128f25974e619671a9855f65f44d#2889a038e68c4fe1ac9afe3cd34978bd). * chore(prisma-fmt): fix typo * chore(prisma-fmt): add comment * chore(prisma-fmt): fix compilation after https://github.com/prisma/prisma-engines/pull/4137 --------- Co-authored-by: Alberto Schiabel Co-authored-by: jkomyno --- prisma-fmt/src/code_actions.rs | 33 +++-- prisma-fmt/src/code_actions/multi_schema.rs | 2 +- prisma-fmt/src/lib.rs | 2 +- prisma-fmt/src/text_document_completion.rs | 6 +- .../text_document_completion/datasource.rs | 2 +- prisma-fmt/tests/code_actions/test_api.rs | 8 +- psl/diagnostics/src/lib.rs | 2 +- psl/diagnostics/src/span.rs | 26 +++- psl/parser-database/src/attributes.rs | 132 ++++++++++-------- psl/parser-database/src/attributes/default.rs | 26 ++-- psl/parser-database/src/attributes/id.rs | 22 +-- psl/parser-database/src/attributes/map.rs | 4 +- .../src/attributes/native_types.rs | 2 +- psl/parser-database/src/context.rs | 72 ++++++---- psl/parser-database/src/context/attributes.rs | 12 +- psl/parser-database/src/files.rs | 37 +++++ psl/parser-database/src/ids.rs | 23 +++ psl/parser-database/src/lib.rs | 85 ++++++++--- psl/parser-database/src/names.rs | 42 ++++-- psl/parser-database/src/relations.rs | 66 ++++++--- psl/parser-database/src/types.rs | 82 ++++++----- psl/parser-database/src/walkers.rs | 36 ++--- .../src/walkers/composite_type.rs | 17 ++- psl/parser-database/src/walkers/enum.rs | 9 +- psl/parser-database/src/walkers/field.rs | 14 +- psl/parser-database/src/walkers/index.rs | 4 +- psl/parser-database/src/walkers/model.rs | 16 +-- .../src/walkers/model/primary_key.rs | 4 +- psl/parser-database/src/walkers/relation.rs | 2 +- .../src/walkers/relation_field.rs | 7 +- .../src/walkers/scalar_field.rs | 10 +- .../postgres_datamodel_connector.rs | 2 +- .../src/configuration/configuration_struct.rs | 4 +- psl/psl-core/src/lib.rs | 53 ++++++- psl/psl-core/src/reformat.rs | 2 +- .../validations/composite_types.rs | 6 +- .../validations/constraint_namespace.rs | 31 ++-- .../validation_pipeline/validations/fields.rs | 4 +- .../validations/indexes.rs | 12 +- .../validation_pipeline/validations/models.rs | 14 +- .../validation_pipeline/validations/names.rs | 14 +- .../validations/relation_fields.rs | 2 +- .../validations/relations.rs | 2 +- psl/psl/src/lib.rs | 7 +- psl/psl/tests/common/asserts.rs | 6 +- psl/psl/tests/config/nice_warnings.rs | 2 +- psl/psl/tests/datamodel_tests.rs | 1 + psl/psl/tests/multi_file/basic.rs | 114 +++++++++++++++ psl/psl/tests/multi_file/mod.rs | 1 + psl/psl/tests/validation_tests.rs | 2 +- psl/schema-ast/src/ast/identifier.rs | 19 +-- psl/schema-ast/src/parser/parse_arguments.rs | 29 ++-- psl/schema-ast/src/parser/parse_attribute.rs | 13 +- .../src/parser/parse_composite_type.rs | 28 ++-- psl/schema-ast/src/parser/parse_enum.rs | 30 ++-- psl/schema-ast/src/parser/parse_expression.rs | 35 +++-- psl/schema-ast/src/parser/parse_field.rs | 17 +-- psl/schema-ast/src/parser/parse_model.rs | 20 ++- psl/schema-ast/src/parser/parse_schema.rs | 22 +-- .../src/parser/parse_source_and_generator.rs | 27 ++-- psl/schema-ast/src/parser/parse_types.rs | 21 +-- psl/schema-ast/src/parser/parse_view.rs | 18 ++- .../query-tests-setup/src/runner/mod.rs | 2 +- .../query-engine-node-api/src/engine.rs | 11 +- .../query-structure/src/composite_type.rs | 3 +- .../query-structure/src/field/composite.rs | 4 +- query-engine/query-structure/src/field/mod.rs | 6 +- .../query-structure/src/field/scalar.rs | 4 +- .../src/internal_data_model.rs | 6 +- .../query-structure/src/internal_enum.rs | 7 +- query-engine/query-structure/src/model.rs | 4 +- query-engine/schema/src/build.rs | 2 +- query-engine/schema/src/build/enum_types.rs | 3 +- query-engine/schema/src/output_types.rs | 8 +- query-engine/schema/src/query_schema.rs | 6 +- .../src/introspection_context.rs | 11 +- .../datamodel_calculator/context.rs | 6 +- .../src/introspection/introspection_map.rs | 16 +-- .../introspection_pair/enumerator.rs | 6 +- .../introspection/introspection_pair/model.rs | 6 +- .../introspection/introspection_pair/view.rs | 12 +- .../src/introspection/rendering/enums.rs | 4 +- .../src/sql_schema_calculator.rs | 22 +-- .../sql_schema_calculator_flavour/mssql.rs | 2 +- .../sql_schema_calculator_flavour/postgres.rs | 2 +- schema-engine/core/src/state.rs | 3 +- .../tests/referential_actions/mysql.rs | 2 +- 87 files changed, 967 insertions(+), 554 deletions(-) create mode 100644 psl/parser-database/src/files.rs create mode 100644 psl/parser-database/src/ids.rs create mode 100644 psl/psl/tests/multi_file/basic.rs create mode 100644 psl/psl/tests/multi_file/mod.rs diff --git a/prisma-fmt/src/code_actions.rs b/prisma-fmt/src/code_actions.rs index 4f072f60b41..371e791e49c 100644 --- a/prisma-fmt/src/code_actions.rs +++ b/prisma-fmt/src/code_actions.rs @@ -31,8 +31,13 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec let datasource = config.datasources.first(); - for source in validated_schema.db.ast().sources() { - relation_mode::edit_referential_integrity(&mut actions, ¶ms, validated_schema.db.source(), source) + for source in validated_schema.db.ast_assert_single().sources() { + relation_mode::edit_referential_integrity( + &mut actions, + ¶ms, + validated_schema.db.source_assert_single(), + source, + ) } // models AND views @@ -45,21 +50,27 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec multi_schema::add_schema_block_attribute_model( &mut actions, ¶ms, - validated_schema.db.source(), + validated_schema.db.source_assert_single(), config, model, ); - multi_schema::add_schema_to_schemas(&mut actions, ¶ms, validated_schema.db.source(), config, model); + multi_schema::add_schema_to_schemas( + &mut actions, + ¶ms, + validated_schema.db.source_assert_single(), + config, + model, + ); } if matches!(datasource, Some(ds) if ds.active_provider == "mongodb") { - mongodb::add_at_map_for_id(&mut actions, ¶ms, validated_schema.db.source(), model); + mongodb::add_at_map_for_id(&mut actions, ¶ms, validated_schema.db.source_assert_single(), model); mongodb::add_native_for_auto_id( &mut actions, ¶ms, - validated_schema.db.source(), + validated_schema.db.source_assert_single(), model, datasource.unwrap(), ); @@ -71,7 +82,7 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec multi_schema::add_schema_block_attribute_enum( &mut actions, ¶ms, - validated_schema.db.source(), + validated_schema.db.source_assert_single(), config, enumerator, ) @@ -88,7 +99,7 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec relations::add_referenced_side_unique( &mut actions, ¶ms, - validated_schema.db.source(), + validated_schema.db.source_assert_single(), complete_relation, ); @@ -96,7 +107,7 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec relations::add_referencing_side_unique( &mut actions, ¶ms, - validated_schema.db.source(), + validated_schema.db.source_assert_single(), complete_relation, ); } @@ -105,7 +116,7 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec relations::add_index_for_relation_fields( &mut actions, ¶ms, - validated_schema.db.source(), + validated_schema.db.source_assert_single(), complete_relation.referencing_field(), ); } @@ -114,7 +125,7 @@ pub(crate) fn available_actions(schema: String, params: CodeActionParams) -> Vec relation_mode::replace_set_default_mysql( &mut actions, ¶ms, - validated_schema.db.source(), + validated_schema.db.source_assert_single(), complete_relation, config, ) diff --git a/prisma-fmt/src/code_actions/multi_schema.rs b/prisma-fmt/src/code_actions/multi_schema.rs index 0e47a008a91..7e6aa9ceaf8 100644 --- a/prisma-fmt/src/code_actions/multi_schema.rs +++ b/prisma-fmt/src/code_actions/multi_schema.rs @@ -142,7 +142,7 @@ pub(super) fn add_schema_to_schemas( formatted_attribute, true, // todo: update spans so that we can just append to the end of the _inside_ of the array. Instead of needing to re-append the `]` or taking the span end -1 - Span::new(span.start, span.end - 1), + Span::new(span.start, span.end - 1, psl::parser_database::FileId::ZERO), params, ) } diff --git a/prisma-fmt/src/lib.rs b/prisma-fmt/src/lib.rs index 0449faf5266..ada79cd7290 100644 --- a/prisma-fmt/src/lib.rs +++ b/prisma-fmt/src/lib.rs @@ -225,7 +225,7 @@ pub(crate) fn range_to_span(range: Range, document: &str) -> ast::Span { let start = position_to_offset(&range.start, document).unwrap(); let end = position_to_offset(&range.end, document).unwrap(); - ast::Span::new(start, end) + ast::Span::new(start, end, psl::parser_database::FileId::ZERO) } /// Gives the LSP position right after the given span. diff --git a/prisma-fmt/src/text_document_completion.rs b/prisma-fmt/src/text_document_completion.rs index 4df8f3e9147..caca887c6ac 100644 --- a/prisma-fmt/src/text_document_completion.rs +++ b/prisma-fmt/src/text_document_completion.rs @@ -41,7 +41,7 @@ pub(crate) fn completion(schema: String, params: CompletionParams) -> Completion let db = { let mut diag = Diagnostics::new(); - ParserDatabase::new(source_file, &mut diag) + ParserDatabase::new_single_file(source_file, &mut diag) }; let ctx = CompletionContext { @@ -91,7 +91,7 @@ impl<'a> CompletionContext<'a> { } fn push_ast_completions(ctx: CompletionContext<'_>, completion_list: &mut CompletionList) { - match ctx.db.ast().find_at_position(ctx.position) { + match ctx.db.ast_assert_single().find_at_position(ctx.position) { ast::SchemaPosition::Model( _model_id, ast::ModelPosition::Field(_, ast::FieldPosition::Attribute("relation", _, Some(attr_name))), @@ -190,7 +190,7 @@ fn ds_has_prop(ctx: CompletionContext<'_>, prop: &str) -> bool { fn push_namespaces(ctx: CompletionContext<'_>, completion_list: &mut CompletionList) { for (namespace, _) in ctx.namespaces() { - let insert_text = if add_quotes(ctx.params, ctx.db.source()) { + let insert_text = if add_quotes(ctx.params, ctx.db.source_assert_single()) { format!(r#""{namespace}""#) } else { namespace.to_string() diff --git a/prisma-fmt/src/text_document_completion/datasource.rs b/prisma-fmt/src/text_document_completion/datasource.rs index 02b7d9f4377..22da182868a 100644 --- a/prisma-fmt/src/text_document_completion/datasource.rs +++ b/prisma-fmt/src/text_document_completion/datasource.rs @@ -144,7 +144,7 @@ pub(super) fn url_env_db_completion(completion_list: &mut CompletionList, kind: _ => unreachable!(), }; - let insert_text = if add_quotes(ctx.params, ctx.db.source()) { + let insert_text = if add_quotes(ctx.params, ctx.db.source_assert_single()) { format!(r#""{text}""#) } else { text.to_owned() diff --git a/prisma-fmt/tests/code_actions/test_api.rs b/prisma-fmt/tests/code_actions/test_api.rs index 2be0c978aa8..ff874cf8699 100644 --- a/prisma-fmt/tests/code_actions/test_api.rs +++ b/prisma-fmt/tests/code_actions/test_api.rs @@ -19,8 +19,8 @@ fn parse_schema_diagnostics(file: impl Into) -> Option) -> Option Span { - Span { start, end } + pub fn new(start: usize, end: usize, file_id: FileId) -> Span { + Span { start, end, file_id } } /// Creates a new empty span. pub fn empty() -> Span { - Span { start: 0, end: 0 } + Span { + start: 0, + end: 0, + file_id: FileId::ZERO, + } } /// Is the given position inside the span? (boundaries included) @@ -27,11 +42,12 @@ impl Span { } } -impl From> for Span { - fn from(s: pest::Span<'_>) -> Self { +impl From<(FileId, pest::Span<'_>)> for Span { + fn from((file_id, s): (FileId, pest::Span<'_>)) -> Self { Span { start: s.start(), end: s.end(), + file_id, } } } diff --git a/psl/parser-database/src/attributes.rs b/psl/parser-database/src/attributes.rs index e944b2fdc8c..0d0bbfe786d 100644 --- a/psl/parser-database/src/attributes.rs +++ b/psl/parser-database/src/attributes.rs @@ -23,12 +23,16 @@ pub(super) fn resolve_attributes(ctx: &mut Context<'_>) { visit_relation_field_attributes(rfid, ctx); } - for top in ctx.ast.iter_tops() { + for top in ctx.iter_tops() { match top { - (ast::TopId::Model(model_id), ast::Top::Model(_)) => resolve_model_attributes(model_id, ctx), - (ast::TopId::Enum(enum_id), ast::Top::Enum(ast_enum)) => resolve_enum_attributes(enum_id, ast_enum, ctx), - (ast::TopId::CompositeType(ctid), ast::Top::CompositeType(ct)) => { - resolve_composite_type_attributes(ctid, ct, ctx) + ((file_id, ast::TopId::Model(model_id)), ast::Top::Model(_)) => { + resolve_model_attributes((file_id, model_id), ctx) + } + ((file_id, ast::TopId::Enum(enum_id)), ast::Top::Enum(ast_enum)) => { + resolve_enum_attributes((file_id, enum_id), ast_enum, ctx) + } + ((file_id, ast::TopId::CompositeType(ctid)), ast::Top::CompositeType(ct)) => { + resolve_composite_type_attributes((file_id, ctid), ct, ctx) } _ => (), } @@ -36,14 +40,14 @@ pub(super) fn resolve_attributes(ctx: &mut Context<'_>) { } fn resolve_composite_type_attributes<'db>( - ctid: ast::CompositeTypeId, + ctid: crate::CompositeTypeId, ct: &'db ast::CompositeType, ctx: &mut Context<'db>, ) { for (field_id, field) in ct.iter_fields() { let CompositeTypeField { r#type, .. } = ctx.types.composite_type_fields[&(ctid, field_id)]; - ctx.visit_attributes((ctid, field_id).into()); + ctx.visit_attributes((ctid.0, (ctid.1, field_id))); if let ScalarFieldType::BuiltInScalar(_scalar_type) = r#type { // native type attributes @@ -52,7 +56,7 @@ fn resolve_composite_type_attributes<'db>( (ctid, field_id), datasource_name, type_name, - &ctx.ast[args], + &ctx.asts[args], ctx, ) } @@ -74,11 +78,11 @@ fn resolve_composite_type_attributes<'db>( } } -fn resolve_enum_attributes<'db>(enum_id: ast::EnumId, ast_enum: &'db ast::Enum, ctx: &mut Context<'db>) { +fn resolve_enum_attributes<'db>(enum_id: crate::EnumId, ast_enum: &'db ast::Enum, ctx: &mut Context<'db>) { let mut enum_attributes = EnumAttributes::default(); for value_idx in 0..ast_enum.values.len() { - ctx.visit_attributes((enum_id, value_idx as u32).into()); + ctx.visit_attributes((enum_id.0, (enum_id.1, value_idx as u32))); // @map if ctx.visit_optional_single_attr("map") { if let Some(mapped_name) = map::visit_map_attribute(ctx) { @@ -93,7 +97,7 @@ fn resolve_enum_attributes<'db>(enum_id: ast::EnumId, ast_enum: &'db ast::Enum, // Now validate the enum attributes. - ctx.visit_attributes(enum_id.into()); + ctx.visit_attributes(enum_id); // @@map if ctx.visit_optional_single_attr("map") { @@ -114,7 +118,7 @@ fn resolve_enum_attributes<'db>(enum_id: ast::EnumId, ast_enum: &'db ast::Enum, ctx.validate_visited_attributes(); } -fn resolve_model_attributes(model_id: ast::ModelId, ctx: &mut Context<'_>) { +fn resolve_model_attributes(model_id: crate::ModelId, ctx: &mut Context<'_>) { let mut model_attributes = ModelAttributes::default(); // First resolve all the attributes defined on fields **in isolation**. @@ -123,7 +127,7 @@ fn resolve_model_attributes(model_id: ast::ModelId, ctx: &mut Context<'_>) { } // Resolve all the attributes defined on the model itself **in isolation**. - ctx.visit_attributes(model_id.into()); + ctx.visit_attributes(model_id); // @@ignore if ctx.visit_optional_single_attr("ignore") { @@ -185,7 +189,7 @@ fn visit_scalar_field_attributes( r#type, .. } = ctx.types[scalar_field_id]; - let ast_model = &ctx.ast[model_id]; + let ast_model = &ctx.asts[model_id]; let ast_field = &ast_model[field_id]; ctx.visit_scalar_field_attributes(model_id, field_id); @@ -240,7 +244,7 @@ fn visit_scalar_field_attributes( if let ScalarFieldType::BuiltInScalar(_scalar_type) = r#type { // native type attributes if let Some((datasource_name, type_name, attribute_id)) = ctx.visit_datasource_scoped() { - let attribute = &ctx.ast[attribute_id]; + let attribute = &ctx.asts[attribute_id]; native_types::visit_model_field_native_type_attribute( scalar_field_id, datasource_name, @@ -297,7 +301,7 @@ fn visit_field_unique(scalar_field_id: ScalarFieldId, model_data: &mut ModelAttr let attribute_id = ctx.current_attribute_id(); model_data.ast_indexes.push(( - attribute_id, + attribute_id.1, IndexAttribute { r#type: IndexType::Unique, fields: vec![FieldWithArgs { @@ -316,8 +320,8 @@ fn visit_field_unique(scalar_field_id: ScalarFieldId, model_data: &mut ModelAttr fn visit_relation_field_attributes(rfid: RelationFieldId, ctx: &mut Context<'_>) { let RelationField { model_id, field_id, .. } = ctx.types[rfid]; - let ast_field = &ctx.ast[model_id][field_id]; - ctx.visit_attributes((model_id, field_id).into()); + let ast_field = &ctx.asts[model_id][field_id]; + ctx.visit_attributes((model_id.0, (model_id.1, field_id))); // @relation // Relation attributes are not required at this stage. @@ -364,7 +368,7 @@ fn visit_relation_field_attributes(rfid: RelationFieldId, ctx: &mut Context<'_>) for underlying_field in ctx.types[rfid].fields.iter().flatten() { let ScalarField { model_id, field_id, .. } = ctx.types[*underlying_field]; - suggested_fields.push(ctx.ast[model_id][field_id].name()); + suggested_fields.push(ctx.asts[model_id][field_id].name()); } let suggestion = match suggested_fields.len() { @@ -391,7 +395,7 @@ fn visit_relation_field_attributes(rfid: RelationFieldId, ctx: &mut Context<'_>) ctx.validate_visited_attributes(); } -fn visit_model_ignore(model_id: ast::ModelId, model_data: &mut ModelAttributes, ctx: &mut Context<'_>) { +fn visit_model_ignore(model_id: crate::ModelId, model_data: &mut ModelAttributes, ctx: &mut Context<'_>) { let ignored_field_errors: Vec<_> = ctx .types .range_model_scalar_fields(model_id) @@ -400,7 +404,7 @@ fn visit_model_ignore(model_id: ast::ModelId, model_data: &mut ModelAttributes, DatamodelError::new_attribute_validation_error( "Fields on an already ignored Model do not need an `@ignore` annotation.", "@ignore", - ctx.ast[sf.model_id][sf.field_id].span(), + ctx.asts[sf.model_id][sf.field_id].span(), ) }) .collect(); @@ -413,7 +417,7 @@ fn visit_model_ignore(model_id: ast::ModelId, model_data: &mut ModelAttributes, } /// Validate @@fulltext on models -fn model_fulltext(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Context<'_>) { +fn model_fulltext(data: &mut ModelAttributes, model_id: crate::ModelId, ctx: &mut Context<'_>) { let mut index_attribute = IndexAttribute { r#type: IndexType::Fulltext, ..Default::default() @@ -440,11 +444,11 @@ fn model_fulltext(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut index_attribute.mapped_name = mapped_name; - data.ast_indexes.push((ctx.current_attribute_id(), index_attribute)); + data.ast_indexes.push((ctx.current_attribute_id().1, index_attribute)); } /// Validate @@index on models. -fn model_index(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Context<'_>) { +fn model_index(data: &mut ModelAttributes, model_id: crate::ModelId, ctx: &mut Context<'_>) { let mut index_attribute = IndexAttribute { r#type: IndexType::Normal, ..Default::default() @@ -514,11 +518,11 @@ fn model_index(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Con index_attribute.algorithm = algo; index_attribute.clustered = validate_clustering_setting(ctx); - data.ast_indexes.push((ctx.current_attribute_id(), index_attribute)); + data.ast_indexes.push((ctx.current_attribute_id().1, index_attribute)); } /// Validate @@unique on models. -fn model_unique(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Context<'_>) { +fn model_unique(data: &mut ModelAttributes, model_id: crate::ModelId, ctx: &mut Context<'_>) { let mut index_attribute = IndexAttribute { r#type: IndexType::Unique, ..Default::default() @@ -533,7 +537,7 @@ fn model_unique(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Co let current_attribute = ctx.current_attribute(); let current_attribute_id = ctx.current_attribute_id(); - let ast_model = &ctx.ast[model_id]; + let ast_model = &ctx.asts[model_id]; let name = get_name_argument(ctx); let mapped_name = { @@ -570,12 +574,12 @@ fn model_unique(data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Co index_attribute.mapped_name = mapped_name; index_attribute.clustered = validate_clustering_setting(ctx); - data.ast_indexes.push((current_attribute_id, index_attribute)); + data.ast_indexes.push((current_attribute_id.1, index_attribute)); } fn common_index_validations( index_data: &mut IndexAttribute, - model_id: ast::ModelId, + model_id: crate::ModelId, resolving: FieldResolvingSetup, ctx: &mut Context<'_>, ) { @@ -599,9 +603,9 @@ fn common_index_validations( if !unresolvable_fields.is_empty() { let fields = unresolvable_fields .iter() - .map(|(top_id, field_name)| match top_id { + .map(|((file_id, top_id), field_name)| match top_id { ast::TopId::CompositeType(ctid) => { - let composite_type = &ctx.ast[*ctid].name(); + let composite_type = &ctx.asts[(*file_id, *ctid)].name(); Cow::from(format!("{field_name} in type {composite_type}")) } @@ -616,7 +620,7 @@ fn common_index_validations( if index_data.is_unique() { "unique " } else { "" }, fields.join(", "), ); - let model_name = ctx.ast[model_id].name(); + let model_name = ctx.asts[model_id].name(); DatamodelError::new_model_validation_error(message, "model", model_name, current_attribute.span) }); } @@ -636,7 +640,7 @@ fn common_index_validations( .flatten(); for underlying_field in fields { let ScalarField { model_id, field_id, .. } = ctx.types[*underlying_field]; - suggested_fields.push(ctx.ast[model_id][field_id].name()); + suggested_fields.push(ctx.asts[model_id][field_id].name()); } } @@ -658,7 +662,7 @@ fn common_index_validations( suggestion = suggestion ), "model", - ctx.ast[model_id].name(), + ctx.asts[model_id].name(), current_attribute.span, )); } @@ -667,9 +671,9 @@ fn common_index_validations( } /// @relation validation for relation fields. -fn visit_relation(model_id: ast::ModelId, relation_field_id: RelationFieldId, ctx: &mut Context<'_>) { +fn visit_relation(model_id: crate::ModelId, relation_field_id: RelationFieldId, ctx: &mut Context<'_>) { let attr = ctx.current_attribute(); - ctx.types[relation_field_id].relation_attribute = Some(ctx.current_attribute_id()); + ctx.types[relation_field_id].relation_attribute = Some(ctx.current_attribute_id().1); if let Some(fields) = ctx.visit_optional_arg("fields") { let fields = match resolve_field_array_without_args(fields, attr.span, model_id, ctx) { @@ -724,7 +728,7 @@ fn visit_relation(model_id: ast::ModelId, relation_field_id: RelationFieldId, ct unknown_fields, }) => { if !unknown_fields.is_empty() { - let model_name = ctx.ast[ctx.types[relation_field_id].referenced_model].name(); + let model_name = ctx.asts[ctx.types[relation_field_id].referenced_model].name(); let field_names = unknown_fields .into_iter() @@ -742,7 +746,7 @@ fn visit_relation(model_id: ast::ModelId, relation_field_id: RelationFieldId, ct if !relation_fields.is_empty() { let msg = format!( "The argument `references` must refer only to scalar fields in the related model `{}`. But it is referencing the following relation fields: {}", - ctx.ast[ctx.types[relation_field_id].referenced_model].name(), + ctx.asts[ctx.types[relation_field_id].referenced_model].name(), relation_fields.iter().map(|(f, _)| f.name()).collect::>().join(", "), ); ctx.push_error(DatamodelError::new_validation_error(&msg, attr.span)); @@ -806,7 +810,7 @@ enum FieldResolutionError<'ast> { AlreadyDealtWith, ProblematicFields { /// Fields that do not exist on the model. - unknown_fields: Vec<(ast::TopId, &'ast str)>, + unknown_fields: Vec<(crate::TopId, &'ast str)>, /// Fields that exist on the model but are relation fields. relation_fields: Vec<(&'ast ast::Field, ast::FieldId)>, }, @@ -818,9 +822,10 @@ enum FieldResolutionError<'ast> { fn resolve_field_array_without_args<'db>( values: &'db ast::Expression, attribute_span: ast::Span, - model_id: ast::ModelId, + model_id: crate::ModelId, ctx: &mut Context<'db>, ) -> Result, FieldResolutionError<'db>> { + let file_id = model_id.0; let constant_array = match coerce_array(values, &coerce::constant, ctx.diagnostics) { Some(values) => values, None => { @@ -831,11 +836,11 @@ fn resolve_field_array_without_args<'db>( let mut field_ids: Vec = Vec::with_capacity(constant_array.len()); let mut unknown_fields = Vec::new(); let mut relation_fields = Vec::new(); - let ast_model = &ctx.ast[model_id]; + let ast_model = &ctx.asts[model_id]; for field_name in constant_array { if field_name.contains('.') { - unknown_fields.push((ast::TopId::Model(model_id), field_name)); + unknown_fields.push(((file_id, ast::TopId::Model(model_id.1)), field_name)); continue; } @@ -843,7 +848,7 @@ fn resolve_field_array_without_args<'db>( let field_id = if let Some(field_id) = ctx.find_model_field(model_id, field_name) { field_id } else { - unknown_fields.push((ast::TopId::Model(model_id), field_name)); + unknown_fields.push(((file_id, ast::TopId::Model(model_id.1)), field_name)); continue; }; @@ -851,7 +856,7 @@ fn resolve_field_array_without_args<'db>( let sfid = if let Some(sfid) = ctx.types.find_model_scalar_field(model_id, field_id) { sfid } else { - relation_fields.push((&ctx.ast[model_id][field_id], field_id)); + relation_fields.push((&ctx.asts[model_id][field_id], field_id)); continue; }; @@ -900,10 +905,11 @@ impl FieldResolvingSetup { fn resolve_field_array_with_args<'db>( values: &'db ast::Expression, attribute_span: ast::Span, - model_id: ast::ModelId, + model_id: crate::ModelId, resolving: FieldResolvingSetup, ctx: &mut Context<'db>, ) -> Result, FieldResolutionError<'db>> { + let file_id = model_id.0; let constant_array = match crate::types::index_fields::coerce_field_array_with_args(values, ctx.diagnostics) { Some(values) => values, None => return Err(FieldResolutionError::AlreadyDealtWith), @@ -913,12 +919,12 @@ fn resolve_field_array_with_args<'db>( let mut unknown_fields = Vec::new(); let mut relation_fields = Vec::new(); - let ast_model = &ctx.ast[model_id]; + let ast_model = &ctx.asts[model_id]; 'fields: for attrs in &constant_array { let path = if attrs.field_name.contains('.') { if !resolving.follow_composites() { - unknown_fields.push((ast::TopId::Model(model_id), attrs.field_name)); + unknown_fields.push(((file_id, ast::TopId::Model(model_id.1)), attrs.field_name)); continue 'fields; } @@ -930,7 +936,7 @@ fn resolve_field_array_with_args<'db>( let field_id = match ctx.find_model_field(model_id, field_shard) { Some(field_id) => field_id, None => { - unknown_fields.push((ast::TopId::Model(model_id), field_shard)); + unknown_fields.push(((file_id, ast::TopId::Model(model_id.1)), field_shard)); continue 'fields; } }; @@ -938,14 +944,14 @@ fn resolve_field_array_with_args<'db>( let sfid = if let Some(sfid) = ctx.types.find_model_scalar_field(model_id, field_id) { sfid } else { - relation_fields.push((&ctx.ast[model_id][field_id], field_id)); + relation_fields.push((&ctx.asts[model_id][field_id], field_id)); continue 'fields; }; match &ctx.types[sfid].r#type { ScalarFieldType::CompositeType(ctid) => (IndexFieldPath::new(sfid), ctid), _ => { - unknown_fields.push((ast::TopId::Model(model_id), attrs.field_name)); + unknown_fields.push(((file_id, ast::TopId::Model(model_id.1)), attrs.field_name)); continue 'fields; } } @@ -961,7 +967,7 @@ fn resolve_field_array_with_args<'db>( let field_id = match ctx.find_composite_type_field(*next_type, field_shard) { Some(field_id) => field_id, None => { - unknown_fields.push((ast::TopId::CompositeType(*next_type), field_shard)); + unknown_fields.push(((next_type.0, ast::TopId::CompositeType(next_type.1)), field_shard)); continue 'fields; } }; @@ -973,7 +979,7 @@ fn resolve_field_array_with_args<'db>( next_type = ctid; } _ if i < field_count - 1 => { - unknown_fields.push((ast::TopId::Model(model_id), attrs.field_name)); + unknown_fields.push(((model_id.0, ast::TopId::Model(model_id.1)), attrs.field_name)); continue 'fields; } _ => (), @@ -986,12 +992,12 @@ fn resolve_field_array_with_args<'db>( match ctx.types.find_model_scalar_field(model_id, field_id) { Some(sfid) => IndexFieldPath::new(sfid), None => { - relation_fields.push((&ctx.ast[model_id][field_id], field_id)); + relation_fields.push((&ctx.asts[model_id][field_id], field_id)); continue; } } } else { - unknown_fields.push((ast::TopId::Model(model_id), attrs.field_name)); + unknown_fields.push(((model_id.0, ast::TopId::Model(model_id.1)), attrs.field_name)); continue; }; @@ -1000,8 +1006,8 @@ fn resolve_field_array_with_args<'db>( let path_str = match path.field_in_index() { either::Either::Left(_) => Cow::from(attrs.field_name), either::Either::Right((ctid, field_id)) => { - let field_name = &ctx.ast[ctid][field_id].name(); - let composite_type = &ctx.ast[ctid].name(); + let field_name = &ctx.asts[ctid][field_id].name(); + let composite_type = &ctx.asts[ctid].name(); Cow::from(format!("{field_name} in type {composite_type}")) } @@ -1097,13 +1103,17 @@ fn validate_clustering_setting(ctx: &mut Context<'_>) -> Option { /// access their corresponding entries in the attributes map in the database even in the presence /// of name and type resolution errors. This is useful for the language tools. pub(super) fn create_default_attributes(ctx: &mut Context<'_>) { - for top in ctx.ast.iter_tops() { + for ((file_id, top), _) in ctx.iter_tops() { match top { - (ast::TopId::Model(model_id), ast::Top::Model(_)) => { - ctx.types.model_attributes.insert(model_id, ModelAttributes::default()); + ast::TopId::Model(model_id) => { + ctx.types + .model_attributes + .insert((file_id, model_id), ModelAttributes::default()); } - (ast::TopId::Enum(enum_id), ast::Top::Enum(_)) => { - ctx.types.enum_attributes.insert(enum_id, EnumAttributes::default()); + ast::TopId::Enum(enum_id) => { + ctx.types + .enum_attributes + .insert((file_id, enum_id), EnumAttributes::default()); } _ => (), } diff --git a/psl/parser-database/src/attributes/default.rs b/psl/parser-database/src/attributes/default.rs index dcd22d31636..e2be240f152 100644 --- a/psl/parser-database/src/attributes/default.rs +++ b/psl/parser-database/src/attributes/default.rs @@ -9,7 +9,7 @@ use crate::{ /// @default on model scalar fields pub(super) fn visit_model_field_default( scalar_field_id: ScalarFieldId, - model_id: ast::ModelId, + model_id: crate::ModelId, field_id: ast::FieldId, r#type: ScalarFieldType, ctx: &mut Context<'_>, @@ -19,7 +19,7 @@ pub(super) fn visit_model_field_default( Err(err) => return ctx.push_error(err), }; - let ast_model = &ctx.ast[model_id]; + let ast_model = &ctx.asts[model_id]; let ast_field = &ast_model[field_id]; let mapped_name = default_attribute_mapped_name(ctx); @@ -74,7 +74,7 @@ pub(super) fn visit_model_field_default( /// @default on composite type fields pub(super) fn visit_composite_field_default( - ct_id: ast::CompositeTypeId, + ct_id: crate::CompositeTypeId, field_id: ast::FieldId, r#type: ScalarFieldType, ctx: &mut Context<'_>, @@ -84,7 +84,7 @@ pub(super) fn visit_composite_field_default( Err(err) => return ctx.push_error(err), }; - let ast_model = &ctx.ast[ct_id]; + let ast_model = &ctx.asts[ct_id]; let ast_field = &ast_model[field_id]; if ctx.visit_optional_arg("map").is_some() { @@ -181,10 +181,10 @@ fn validate_model_builtin_scalar_type_default( value: &ast::Expression, mapped_name: Option, accept: AcceptFn<'_>, - field_id: (ast::ModelId, ast::FieldId), + field_id: (crate::ModelId, ast::FieldId), ctx: &mut Context<'_>, ) { - let arity = ctx.ast[field_id.0][field_id.1].arity; + let arity = ctx.asts[field_id.0][field_id.1].arity; match (scalar_type, value) { // Functions (_, ast::Expression::Function(funcname, _, _)) if funcname == FN_AUTOINCREMENT && mapped_name.is_some() => { @@ -324,9 +324,13 @@ fn validate_invalid_function_default(fn_name: &str, scalar_type: ScalarType, ctx )); } -fn validate_default_value_on_composite_type(ctid: ast::CompositeTypeId, ast_field: &ast::Field, ctx: &mut Context<'_>) { +fn validate_default_value_on_composite_type( + ctid: crate::CompositeTypeId, + ast_field: &ast::Field, + ctx: &mut Context<'_>, +) { let attr = ctx.current_attribute(); - let ct_name = ctx.ast[ctid].name(); + let ct_name = ctx.asts[ctid].name(); ctx.push_error(DatamodelError::new_composite_type_field_validation_error( "Defaults on fields of type composite are not supported. Please remove the `@default` attribute.", @@ -395,13 +399,13 @@ fn validate_nanoid_args(args: &[ast::Argument], accept: AcceptFn<'_>, ctx: &mut fn validate_enum_default( found_value: &ast::Expression, - enum_id: ast::EnumId, + enum_id: crate::EnumId, accept: AcceptFn<'_>, ctx: &mut Context<'_>, ) { match found_value { ast::Expression::ConstantValue(enum_value, _) => { - if ctx.ast[enum_id].values.iter().any(|v| v.name() == enum_value) { + if ctx.asts[enum_id].values.iter().any(|v| v.name() == enum_value) { accept(ctx) } else { validate_invalid_default_enum_value(enum_value, ctx); @@ -413,7 +417,7 @@ fn validate_enum_default( fn validate_enum_list_default( found_value: &ast::Expression, - enum_id: ast::EnumId, + enum_id: crate::EnumId, accept: AcceptFn<'_>, ctx: &mut Context<'_>, ) { diff --git a/psl/parser-database/src/attributes/id.rs b/psl/parser-database/src/attributes/id.rs index 96892587c86..13618bbea73 100644 --- a/psl/parser-database/src/attributes/id.rs +++ b/psl/parser-database/src/attributes/id.rs @@ -10,7 +10,7 @@ use crate::{ use std::borrow::Cow; /// @@id on models -pub(super) fn model(model_data: &mut ModelAttributes, model_id: ast::ModelId, ctx: &mut Context<'_>) { +pub(super) fn model(model_data: &mut ModelAttributes, model_id: crate::ModelId, ctx: &mut Context<'_>) { let attr = ctx.current_attribute(); let fields = match ctx.visit_default_arg("fields") { Ok(value) => value, @@ -29,9 +29,9 @@ pub(super) fn model(model_data: &mut ModelAttributes, model_id: ast::ModelId, ct if !unresolvable_fields.is_empty() { let fields_str = unresolvable_fields .into_iter() - .map(|(top_id, field_name)| match top_id { + .map(|((file_id, top_id), field_name)| match top_id { ast::TopId::CompositeType(ctid) => { - let ct_name = &ctx.ast[ctid].name(); + let ct_name = ctx.asts[(file_id, ctid)].name(); Cow::from(format!("{field_name} in type {ct_name}")) } @@ -43,7 +43,7 @@ pub(super) fn model(model_data: &mut ModelAttributes, model_id: ast::ModelId, ct let msg = format!("The multi field id declaration refers to the unknown fields {fields_str}."); let error = - DatamodelError::new_model_validation_error(&msg, "model", ctx.ast[model_id].name(), fields.span()); + DatamodelError::new_model_validation_error(&msg, "model", ctx.asts[model_id].name(), fields.span()); ctx.push_error(error); } @@ -60,7 +60,7 @@ pub(super) fn model(model_data: &mut ModelAttributes, model_id: ast::ModelId, ct ctx.push_error(DatamodelError::new_model_validation_error( &msg, "model", - ctx.ast[model_id].name(), + ctx.asts[model_id].name(), attr.span, )); } @@ -69,7 +69,7 @@ pub(super) fn model(model_data: &mut ModelAttributes, model_id: ast::ModelId, ct } }; - let ast_model = &ctx.ast[model_id]; + let ast_model = &ctx.asts[model_id]; // ID attribute fields must reference only required fields. let fields_that_are_not_required: Vec<&str> = resolved_fields @@ -77,7 +77,7 @@ pub(super) fn model(model_data: &mut ModelAttributes, model_id: ast::ModelId, ct .filter_map(|field| match field.path.field_in_index() { either::Either::Left(id) => { let ScalarField { model_id, field_id, .. } = ctx.types[id]; - let field = &ctx.ast[model_id][field_id]; + let field = &ctx.asts[model_id][field_id]; if field.arity.is_required() { None @@ -86,7 +86,7 @@ pub(super) fn model(model_data: &mut ModelAttributes, model_id: ast::ModelId, ct } } either::Either::Right((ctid, field_id)) => { - let field = &ctx.ast[ctid][field_id]; + let field = &ctx.asts[ctid][field_id]; if field.arity.is_required() { None @@ -198,7 +198,7 @@ pub(super) fn field<'db>( } pub(super) fn validate_id_field_arities( - model_id: ast::ModelId, + model_id: crate::ModelId, model_attributes: &ModelAttributes, ctx: &mut Context<'_>, ) { @@ -213,7 +213,7 @@ pub(super) fn validate_id_field_arities( }; let ast_field = if let Some(field_id) = pk.source_field { - &ctx.ast[model_id][field_id] + &ctx.asts[model_id][field_id] } else { return; }; @@ -222,7 +222,7 @@ pub(super) fn validate_id_field_arities( ctx.push_error(DatamodelError::new_attribute_validation_error( "Fields that are marked as id must be required.", "@id", - ctx.ast[pk.source_attribute].span, + ctx.asts[pk.source_attribute].span, )) } } diff --git a/psl/parser-database/src/attributes/map.rs b/psl/parser-database/src/attributes/map.rs index b4bf82835eb..d910447f96c 100644 --- a/psl/parser-database/src/attributes/map.rs +++ b/psl/parser-database/src/attributes/map.rs @@ -19,7 +19,7 @@ pub(super) fn scalar_field( sfid: ScalarFieldId, ast_model: &ast::Model, ast_field: &ast::Field, - model_id: ast::ModelId, + model_id: crate::ModelId, field_id: ast::FieldId, ctx: &mut Context<'_>, ) { @@ -71,7 +71,7 @@ pub(super) fn scalar_field( pub(super) fn composite_type_field( ct: &ast::CompositeType, ast_field: &ast::Field, - ctid: ast::CompositeTypeId, + ctid: crate::CompositeTypeId, field_id: ast::FieldId, ctx: &mut Context<'_>, ) { diff --git a/psl/parser-database/src/attributes/native_types.rs b/psl/parser-database/src/attributes/native_types.rs index d9deccb99eb..704df89e23a 100644 --- a/psl/parser-database/src/attributes/native_types.rs +++ b/psl/parser-database/src/attributes/native_types.rs @@ -14,7 +14,7 @@ pub(super) fn visit_model_field_native_type_attribute( } pub(super) fn visit_composite_type_field_native_type_attribute( - id: (ast::CompositeTypeId, ast::FieldId), + id: (crate::CompositeTypeId, ast::FieldId), datasource_name: StringId, type_name: StringId, attr: &ast::Attribute, diff --git a/psl/parser-database/src/context.rs b/psl/parser-database/src/context.rs index 45014695302..6d4d7223982 100644 --- a/psl/parser-database/src/context.rs +++ b/psl/parser-database/src/context.rs @@ -3,7 +3,7 @@ mod attributes; use self::attributes::AttributesValidationState; use crate::{ ast, interner::StringInterner, names::Names, relations::Relations, types::Types, DatamodelError, Diagnostics, - StringId, + InFile, StringId, }; use schema_ast::ast::{Expression, WithName}; use std::collections::{HashMap, HashSet}; @@ -21,7 +21,7 @@ use std::collections::{HashMap, HashSet}; /// /// See `visit_attributes()`. pub(crate) struct Context<'db> { - pub(crate) ast: &'db ast::SchemaAst, + pub(crate) asts: &'db crate::Files, pub(crate) interner: &'db mut StringInterner, pub(crate) names: &'db mut Names, pub(crate) types: &'db mut Types, @@ -30,15 +30,15 @@ pub(crate) struct Context<'db> { attributes: AttributesValidationState, // state machine for attribute validation // @map'ed names indexes. These are not in the db because they are only used for validation. - pub(super) mapped_model_scalar_field_names: HashMap<(ast::ModelId, StringId), ast::FieldId>, - pub(super) mapped_composite_type_names: HashMap<(ast::CompositeTypeId, StringId), ast::FieldId>, - pub(super) mapped_enum_names: HashMap, - pub(super) mapped_enum_value_names: HashMap<(ast::EnumId, StringId), u32>, + pub(super) mapped_model_scalar_field_names: HashMap<(crate::ModelId, StringId), ast::FieldId>, + pub(super) mapped_composite_type_names: HashMap<(crate::CompositeTypeId, StringId), ast::FieldId>, + pub(super) mapped_enum_names: HashMap, + pub(super) mapped_enum_value_names: HashMap<(crate::EnumId, StringId), u32>, } impl<'db> Context<'db> { pub(super) fn new( - ast: &'db ast::SchemaAst, + asts: &'db crate::Files, interner: &'db mut StringInterner, names: &'db mut Names, types: &'db mut Types, @@ -46,7 +46,7 @@ impl<'db> Context<'db> { diagnostics: &'db mut Diagnostics, ) -> Self { Context { - ast, + asts, interner, names, types, @@ -68,7 +68,7 @@ impl<'db> Context<'db> { /// Return the attribute currently being validated. Panics if the context is not in the right /// state. #[track_caller] - pub(crate) fn current_attribute_id(&self) -> ast::AttributeId { + pub(crate) fn current_attribute_id(&self) -> crate::AttributeId { self.attributes.attribute.unwrap() } @@ -76,8 +76,7 @@ impl<'db> Context<'db> { /// state. #[track_caller] pub(crate) fn current_attribute(&self) -> &'db ast::Attribute { - let id = self.attributes.attribute.unwrap(); - &self.ast[id] + &self.asts[self.attributes.attribute.unwrap()] } /// Discard arguments without validation. @@ -102,8 +101,8 @@ impl<'db> Context<'db> { /// /// Other than for this peculiarity, this method is identical to /// `visit_attributes()`. - pub(super) fn visit_scalar_field_attributes(&mut self, model_id: ast::ModelId, field_id: ast::FieldId) { - self.visit_attributes((model_id, field_id).into()); + pub(super) fn visit_scalar_field_attributes(&mut self, model_id: crate::ModelId, field_id: ast::FieldId) { + self.visit_attributes((model_id.0, (model_id.1, field_id))); } /// All attribute validation should go through `visit_attributes()`. It lets @@ -116,7 +115,11 @@ impl<'db> Context<'db> { /// `validate_visited_arguments()`. Otherwise, Context will helpfully panic. /// - When you are done validating an attribute set, you must call /// `validate_visited_attributes()`. Otherwise, Context will helpfully panic. - pub(super) fn visit_attributes(&mut self, ast_attributes: ast::AttributeContainer) { + pub(super) fn visit_attributes(&mut self, ast_attributes: InFile) + where + T: Into, + { + let ast_attributes: crate::AttributeContainer = (ast_attributes.0, ast_attributes.1.into()); if self.attributes.attributes.is_some() || !self.attributes.unused_attributes.is_empty() { panic!( "`ctx.visit_attributes() called with {:?} while the Context is still validating previous attribute set on {:?}`", @@ -125,7 +128,8 @@ impl<'db> Context<'db> { ); } - self.attributes.set_attributes(ast_attributes, self.ast); + self.attributes + .set_attributes(ast_attributes, &self.asts[ast_attributes.0].2); } /// Look for an optional attribute with a name of the form @@ -136,8 +140,8 @@ impl<'db> Context<'db> { /// arguments to other attributes: everywhere else, attributes are named, /// with a default that can be first, but with native types, arguments are /// purely positional. - pub(crate) fn visit_datasource_scoped(&mut self) -> Option<(StringId, StringId, ast::AttributeId)> { - let attrs = iter_attributes(self.attributes.attributes.as_ref(), self.ast) + pub(crate) fn visit_datasource_scoped(&mut self) -> Option<(StringId, StringId, crate::AttributeId)> { + let attrs = iter_attributes(self.attributes.attributes.as_ref(), self.asts) .filter(|(_, attr)| attr.name.name.contains('.')); let mut native_type_attr = None; let diagnostics = &mut self.diagnostics; @@ -172,7 +176,7 @@ impl<'db> Context<'db> { #[must_use] pub(crate) fn visit_optional_single_attr(&mut self, name: &'static str) -> bool { let mut attrs = - iter_attributes(self.attributes.attributes.as_ref(), self.ast).filter(|(_, a)| a.name.name == name); + iter_attributes(self.attributes.attributes.as_ref(), self.asts).filter(|(_, a)| a.name.name == name); let (first_idx, first) = match attrs.next() { Some(first) => first, None => return false, @@ -181,7 +185,7 @@ impl<'db> Context<'db> { if attrs.next().is_some() { for (idx, attr) in - iter_attributes(self.attributes.attributes.as_ref(), self.ast).filter(|(_, a)| a.name.name == name) + iter_attributes(self.attributes.attributes.as_ref(), self.asts).filter(|(_, a)| a.name.name == name) { diagnostics.push_error(DatamodelError::new_duplicate_attribute_error( &attr.name.name, @@ -205,7 +209,7 @@ impl<'db> Context<'db> { let mut has_valid_attribute = false; while !has_valid_attribute { - let first_attr = iter_attributes(self.attributes.attributes.as_ref(), self.ast) + let first_attr = iter_attributes(self.attributes.attributes.as_ref(), self.asts) .filter(|(_, attr)| attr.name.name == name) .find(|(attr_id, _)| self.attributes.unused_attributes.contains(attr_id)); let (attr_id, attr) = if let Some(first_attr) = first_attr { @@ -267,7 +271,7 @@ impl<'db> Context<'db> { /// otherwise. pub(crate) fn validate_visited_arguments(&mut self) { let attr = if let Some(attrid) = self.attributes.attribute { - &self.ast[attrid] + &self.asts[attrid] } else { panic!("State error: missing attribute in validate_visited_arguments.") }; @@ -290,7 +294,7 @@ impl<'db> Context<'db> { let diagnostics = &mut self.diagnostics; for attribute_id in &self.attributes.unused_attributes { - let attribute = &self.ast[*attribute_id]; + let attribute = &self.asts[*attribute_id]; diagnostics.push_error(DatamodelError::new_attribute_not_known_error( &attribute.name.name, attribute.span, @@ -308,7 +312,7 @@ impl<'db> Context<'db> { } /// Find a specific field in a specific model. - pub(crate) fn find_model_field(&self, model_id: ast::ModelId, field_name: &str) -> Option { + pub(crate) fn find_model_field(&self, model_id: crate::ModelId, field_name: &str) -> Option { let name = self.interner.lookup(field_name)?; self.names.model_fields.get(&(model_id, name)).cloned() } @@ -316,7 +320,7 @@ impl<'db> Context<'db> { /// Find a specific field in a specific composite type. pub(crate) fn find_composite_type_field( &self, - composite_type_id: ast::CompositeTypeId, + composite_type_id: crate::CompositeTypeId, field_name: &str, ) -> Option { let name = self.interner.lookup(field_name)?; @@ -327,9 +331,15 @@ impl<'db> Context<'db> { .cloned() } + pub(crate) fn iter_tops(&self) -> impl Iterator + 'db { + self.asts + .iter() + .flat_map(|(file_id, _, _, ast)| ast.iter_tops().map(move |(top_id, top)| ((file_id, top_id), top))) + } + /// Starts validating the arguments for an attribute, checking for duplicate arguments in the /// process. Returns whether the attribute is valid enough to be usable. - fn set_attribute(&mut self, attribute_id: ast::AttributeId, attribute: &'db ast::Attribute) -> bool { + fn set_attribute(&mut self, attribute_id: crate::AttributeId, attribute: &'db ast::Attribute) -> bool { if self.attributes.attribute.is_some() || !self.attributes.args.is_empty() { panic!("State error: we cannot start validating new arguments before `validate_visited_arguments()` or `discard_arguments()` has been called.\n{:#?}", self.attributes); } @@ -430,13 +440,15 @@ impl<'db> Context<'db> { // Implementation detail. Used for arguments validation. fn iter_attributes<'a, 'ast: 'a>( - attrs: Option<&'a ast::AttributeContainer>, - ast: &'ast ast::SchemaAst, -) -> impl Iterator + 'a { + attrs: Option<&'a crate::AttributeContainer>, + asts: &'ast crate::Files, +) -> impl Iterator + 'a { attrs .into_iter() - .flat_map(move |container| ast[*container].iter().enumerate().map(|a| (a, *container))) - .map(|((idx, attr), container)| (ast::AttributeId::new_in_container(container, idx), attr)) + .flat_map(move |container| asts[*container].iter().enumerate().map(|a| (a, *container))) + .map(|((idx, attr), (file_id, container))| { + ((file_id, ast::AttributeId::new_in_container(container, idx)), attr) + }) } impl std::ops::Index for Context<'_> { diff --git a/psl/parser-database/src/context/attributes.rs b/psl/parser-database/src/context/attributes.rs index 9f35f5cc364..48b75756004 100644 --- a/psl/parser-database/src/context/attributes.rs +++ b/psl/parser-database/src/context/attributes.rs @@ -4,17 +4,19 @@ use crate::interner::StringId; #[derive(Default, Debug)] pub(super) struct AttributesValidationState { /// The attributes list being validated. - pub(super) attributes: Option, - pub(super) unused_attributes: HashSet, // the _remaining_ attributes + pub(super) attributes: Option, + pub(super) unused_attributes: HashSet, // the _remaining_ attributes /// The attribute being validated. - pub(super) attribute: Option, + pub(super) attribute: Option, pub(super) args: HashMap, usize>, // the _remaining_ arguments of `attribute` } impl AttributesValidationState { - pub(super) fn set_attributes(&mut self, attributes: ast::AttributeContainer, ast: &ast::SchemaAst) { - let attribute_ids = (0..ast[attributes].len()).map(|idx| ast::AttributeId::new_in_container(attributes, idx)); + pub(super) fn set_attributes(&mut self, attributes: crate::AttributeContainer, ast: &ast::SchemaAst) { + let file_id = attributes.0; + let attribute_ids = + (0..ast[attributes.1].len()).map(|idx| (file_id, ast::AttributeId::new_in_container(attributes.1, idx))); self.unused_attributes.clear(); self.unused_attributes.extend(attribute_ids); diff --git a/psl/parser-database/src/files.rs b/psl/parser-database/src/files.rs new file mode 100644 index 00000000000..f201c839eea --- /dev/null +++ b/psl/parser-database/src/files.rs @@ -0,0 +1,37 @@ +use crate::FileId; +use schema_ast::ast; +use std::ops::Index; + +/// The content is a list of (file path, file source text, file AST). +/// +/// The file path can be anything, the PSL implementation will only use it to display the file name +/// in errors. For example, files can come from nested directories. +pub(crate) struct Files(pub(super) Vec<(String, schema_ast::SourceFile, ast::SchemaAst)>); + +impl Files { + pub(crate) fn iter(&self) -> impl Iterator { + self.0 + .iter() + .enumerate() + .map(|(idx, (path, contents, ast))| (FileId(idx as u32), path, contents, ast)) + } +} + +impl Index for Files { + type Output = (String, schema_ast::SourceFile, ast::SchemaAst); + + fn index(&self, index: crate::FileId) -> &Self::Output { + &self.0[index.0 as usize] + } +} + +impl Index> for Files +where + ast::SchemaAst: Index, +{ + type Output = >::Output; + + fn index(&self, index: crate::InFile) -> &Self::Output { + &self[index.0].2[index.1] + } +} diff --git a/psl/parser-database/src/ids.rs b/psl/parser-database/src/ids.rs new file mode 100644 index 00000000000..55e5836f17f --- /dev/null +++ b/psl/parser-database/src/ids.rs @@ -0,0 +1,23 @@ +use diagnostics::FileId; +use schema_ast::ast; + +/// An AST identifier with the accompanyin file ID. +pub type InFile = (FileId, Id); + +/// See [ast::ModelId] +pub type ModelId = InFile; + +/// See [ast::EnumId] +pub type EnumId = InFile; + +/// See [ast::CompositeTypeId] +pub type CompositeTypeId = InFile; + +/// See [ast::TopId] +pub type TopId = InFile; + +/// See [ast::AttributeId] +pub type AttributeId = InFile; + +/// See [ast::AttributeContainer] +pub type AttributeContainer = InFile; diff --git a/psl/parser-database/src/lib.rs b/psl/parser-database/src/lib.rs index d57ff8c98dd..e1dd7b72b25 100644 --- a/psl/parser-database/src/lib.rs +++ b/psl/parser-database/src/lib.rs @@ -31,12 +31,16 @@ pub mod walkers; mod attributes; mod coerce_expression; mod context; +mod files; +mod ids; mod interner; mod names; mod relations; mod types; pub use coerce_expression::{coerce, coerce_array, coerce_opt}; +pub use diagnostics::FileId; +pub use ids::*; pub use names::is_reserved_type_name; pub use relations::{ManyToManyRelationId, ReferentialAction, RelationId}; pub use schema_ast::{ast, SourceFile}; @@ -45,7 +49,7 @@ pub use types::{ ScalarType, SortOrder, }; -use self::{context::Context, interner::StringId, relations::Relations, types::Types}; +use self::{context::Context, files::Files, interner::StringId, relations::Relations, types::Types}; use diagnostics::{DatamodelError, Diagnostics}; use names::Names; @@ -69,8 +73,7 @@ use names::Names; /// - Global validations are then performed on the mostly validated schema. /// Currently only index name collisions. pub struct ParserDatabase { - ast: ast::SchemaAst, - file: schema_ast::SourceFile, + asts: Files, interner: interner::StringInterner, names: Names, types: Types, @@ -79,14 +82,35 @@ pub struct ParserDatabase { impl ParserDatabase { /// See the docs on [ParserDatabase](/struct.ParserDatabase.html). - pub fn new(file: schema_ast::SourceFile, diagnostics: &mut Diagnostics) -> Self { - let ast = schema_ast::parse_schema(file.as_str(), diagnostics); + pub fn new_single_file(file: SourceFile, diagnostics: &mut Diagnostics) -> Self { + Self::new(vec![("schema.prisma".to_owned(), file)], diagnostics) + } + + /// See the docs on [ParserDatabase](/struct.ParserDatabase.html). + pub fn new(schemas: Vec<(String, schema_ast::SourceFile)>, diagnostics: &mut Diagnostics) -> Self { + let asts = schemas + .into_iter() + .enumerate() + .map(|(file_idx, (path, source))| { + let id = FileId(file_idx as u32); + let ast = schema_ast::parse_schema(source.as_str(), diagnostics, id); + (path, source, ast) + }) + .collect(); + let asts = Files(asts); let mut interner = Default::default(); let mut names = Default::default(); let mut types = Default::default(); let mut relations = Default::default(); - let mut ctx = Context::new(&ast, &mut interner, &mut names, &mut types, &mut relations, diagnostics); + let mut ctx = Context::new( + &asts, + &mut interner, + &mut names, + &mut types, + &mut relations, + diagnostics, + ); // First pass: resolve names. names::resolve_names(&mut ctx); @@ -96,8 +120,7 @@ impl ParserDatabase { attributes::create_default_attributes(&mut ctx); return ParserDatabase { - ast, - file, + asts, interner, names, types, @@ -113,8 +136,7 @@ impl ParserDatabase { attributes::create_default_attributes(&mut ctx); return ParserDatabase { - ast, - file, + asts, interner, names, types, @@ -131,8 +153,7 @@ impl ParserDatabase { relations::infer_relations(&mut ctx); ParserDatabase { - ast, - file, + asts, interner, names, types, @@ -140,9 +161,23 @@ impl ParserDatabase { } } - /// The parsed AST. - pub fn ast(&self) -> &ast::SchemaAst { - &self.ast + /// The parsed AST. This methods asserts that there is a single prisma schema file. As + /// multi-file schemas are implemented, calls to this methods should be replaced with + /// `ParserDatabase::ast()` and `ParserDatabase::iter_asts()`. + /// TODO: consider removing once the `multiFileSchema` preview feature goes GA. + pub fn ast_assert_single(&self) -> &ast::SchemaAst { + assert_eq!(self.asts.0.len(), 1); + &self.asts.0.first().unwrap().2 + } + + /// Iterate all parsed ASTs. + pub fn iter_asts(&self) -> impl Iterator { + self.asts.iter().map(|(_, _, _, ast)| ast) + } + + /// A parsed AST. + pub fn ast(&self, file_id: FileId) -> &ast::SchemaAst { + &self.asts[file_id].2 } /// The total number of enums in the schema. This is O(1). @@ -155,9 +190,25 @@ impl ParserDatabase { self.types.model_attributes.len() } + /// The source file contents. This methods asserts that there is a single prisma schema file. + /// As multi-file schemas are implemented, calls to this methods should be replaced with + /// `ParserDatabase::source()` and `ParserDatabase::iter_sources()`. + pub fn source_assert_single(&self) -> &str { + assert_eq!(self.asts.0.len(), 1); + self.asts.0[0].1.as_str() + } + /// The source file contents. - pub fn source(&self) -> &str { - self.file.as_str() + pub(crate) fn source(&self, file_id: FileId) -> &str { + self.asts[file_id].1.as_str() + } +} + +impl std::ops::Index for ParserDatabase { + type Output = (String, SourceFile, ast::SchemaAst); + + fn index(&self, index: FileId) -> &Self::Output { + &self.asts[index] } } diff --git a/psl/parser-database/src/names.rs b/psl/parser-database/src/names.rs index 3208c1c3bdb..dff646ca510 100644 --- a/psl/parser-database/src/names.rs +++ b/psl/parser-database/src/names.rs @@ -5,7 +5,7 @@ pub use reserved_model_names::is_reserved_type_name; use crate::{ ast::{self, ConfigBlockProperty, TopId, WithAttributes, WithIdentifier, WithName, WithSpan}, types::ScalarType, - Context, DatamodelError, StringId, + Context, DatamodelError, FileId, StringId, }; use reserved_model_names::{validate_enum_name, validate_model_name}; use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; @@ -14,13 +14,13 @@ use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet}; #[derive(Default)] pub(super) struct Names { /// Models, enums, composite types and type aliases - pub(super) tops: HashMap, + pub(super) tops: HashMap, /// Generators have their own namespace. - pub(super) generators: HashMap, + pub(super) generators: HashMap, /// Datasources have their own namespace. - pub(super) datasources: HashMap, - pub(super) model_fields: HashMap<(ast::ModelId, StringId), ast::FieldId>, - pub(super) composite_type_fields: HashMap<(ast::CompositeTypeId, StringId), ast::FieldId>, + pub(super) datasources: HashMap, + pub(super) model_fields: HashMap<(crate::ModelId, StringId), ast::FieldId>, + pub(super) composite_type_fields: HashMap<(crate::CompositeTypeId, StringId), ast::FieldId>, } /// `resolve_names()` is responsible for populating `ParserDatabase.names` and @@ -35,7 +35,7 @@ pub(super) fn resolve_names(ctx: &mut Context<'_>) { let mut tmp_names: HashSet<&str> = HashSet::default(); // throwaway container for duplicate checking let mut names = Names::default(); - for (top_id, top) in ctx.ast.iter_tops() { + for ((file_id, top_id), top) in ctx.iter_tops() { assert_is_not_a_reserved_scalar_type(top.identifier(), ctx); let namespace = match (top_id, top) { @@ -70,7 +70,11 @@ pub(super) fn resolve_names(ctx: &mut Context<'_>) { validate_attribute_identifiers(field, ctx); let field_name_id = ctx.interner.intern(field.name()); - if names.model_fields.insert((model_id, field_name_id), field_id).is_some() { + if names + .model_fields + .insert(((file_id, model_id), field_name_id), field_id) + .is_some() + { ctx.push_error(DatamodelError::new_duplicate_field_error( model.name(), field.name(), @@ -92,7 +96,11 @@ pub(super) fn resolve_names(ctx: &mut Context<'_>) { validate_attribute_identifiers(field, ctx); let field_name_id = ctx.interner.intern(field.name()); - if names.model_fields.insert((model_id, field_name_id), field_id).is_some() { + if names + .model_fields + .insert(((file_id, model_id), field_name_id), field_id) + .is_some() + { ctx.push_error(DatamodelError::new_duplicate_field_error( model.name(), field.name(), @@ -112,7 +120,7 @@ pub(super) fn resolve_names(ctx: &mut Context<'_>) { // Check that there is no duplicate field on the composite type if names .composite_type_fields - .insert((ctid, field_name_id), field_id) + .insert(((file_id, ctid), field_name_id), field_id) .is_some() { ctx.push_error(DatamodelError::new_composite_type_duplicate_field_error( @@ -136,16 +144,22 @@ pub(super) fn resolve_names(ctx: &mut Context<'_>) { _ => unreachable!(), }; - insert_name(top_id, top, namespace, ctx) + insert_name(file_id, top_id, top, namespace, ctx) } let _ = std::mem::replace(ctx.names, names); } -fn insert_name(top_id: TopId, top: &ast::Top, namespace: &mut HashMap, ctx: &mut Context<'_>) { +fn insert_name( + file_id: FileId, + top_id: TopId, + top: &ast::Top, + namespace: &mut HashMap, + ctx: &mut Context<'_>, +) { let name = ctx.interner.intern(top.name()); - if let Some(existing) = namespace.insert(name, top_id) { - ctx.push_error(duplicate_top_error(&ctx.ast[existing], top)); + if let Some(existing_top) = namespace.insert(name, (file_id, top_id)) { + ctx.push_error(duplicate_top_error(&ctx.asts[existing_top], top)); } } diff --git a/psl/parser-database/src/relations.rs b/psl/parser-database/src/relations.rs index 33bc8236cff..0c1e0a454c6 100644 --- a/psl/parser-database/src/relations.rs +++ b/psl/parser-database/src/relations.rs @@ -2,7 +2,7 @@ use crate::{ ast::{self, WithName}, interner::StringId, walkers::RelationFieldId, - DatamodelError, Diagnostics, + DatamodelError, Diagnostics, FileId, {context::Context, types::RelationField}, }; use enumflags2::bitflags; @@ -75,11 +75,11 @@ pub(crate) struct Relations { /// (model_a, model_b, relation_idx) /// /// This can be interpreted as the relations _from_ a model. - forward: BTreeSet<(ast::ModelId, ast::ModelId, RelationId)>, + forward: BTreeSet<(crate::ModelId, crate::ModelId, RelationId)>, /// (model_b, model_a, relation_idx) /// /// This can be interpreted as the relations _to_ a model. - back: BTreeSet<(ast::ModelId, ast::ModelId, RelationId)>, + back: BTreeSet<(crate::ModelId, crate::ModelId, RelationId)>, } impl std::ops::Index for Relations { @@ -117,17 +117,23 @@ impl Relations { /// Iterator over relations where the provided model is model A, or the forward side of the /// relation. #[allow(clippy::wrong_self_convention)] // this is the name we want - pub(crate) fn from_model(&self, model_a_id: ast::ModelId) -> impl Iterator + '_ { + pub(crate) fn from_model(&self, model_a_id: crate::ModelId) -> impl Iterator + '_ { self.forward - .range((model_a_id, ast::ModelId::ZERO, RelationId::MIN)..(model_a_id, ast::ModelId::MAX, RelationId::MAX)) + .range( + (model_a_id, (FileId::ZERO, ast::ModelId::ZERO), RelationId::MIN) + ..(model_a_id, (FileId::MAX, ast::ModelId::MAX), RelationId::MAX), + ) .map(move |(_, _, relation_id)| *relation_id) } /// Iterator over relationss where the provided model is model B, or the backrelation side of /// the relation. - pub(crate) fn to_model(&self, model_a_id: ast::ModelId) -> impl Iterator + '_ { + pub(crate) fn to_model(&self, model_a_id: crate::ModelId) -> impl Iterator + '_ { self.back - .range((model_a_id, ast::ModelId::ZERO, RelationId::MIN)..(model_a_id, ast::ModelId::MAX, RelationId::MAX)) + .range( + (model_a_id, (FileId::ZERO, ast::ModelId::ZERO), RelationId::MIN) + ..(model_a_id, (FileId::MAX, ast::ModelId::MAX), RelationId::MAX), + ) .map(move |(_, _, relation_id)| *relation_id) } } @@ -180,8 +186,8 @@ pub(crate) struct Relation { /// The `name` argument in `@relation`. pub(super) relation_name: Option, pub(super) attributes: RelationAttributes, - pub(super) model_a: ast::ModelId, - pub(super) model_b: ast::ModelId, + pub(super) model_a: crate::ModelId, + pub(super) model_b: crate::ModelId, } impl Relation { @@ -209,7 +215,6 @@ impl Relation { // Implementation detail for this module. Should stay private. pub(super) struct RelationEvidence<'db> { pub(super) ast_model: &'db ast::Model, - pub(super) model_id: ast::ModelId, pub(super) ast_field: &'db ast::Field, pub(super) field_id: RelationFieldId, pub(super) is_self_relation: bool, @@ -219,14 +224,26 @@ pub(super) struct RelationEvidence<'db> { pub(super) opposite_relation_field: Option<(RelationFieldId, &'db ast::Field, &'db RelationField)>, } +impl RelationEvidence<'_> { + fn model_id(&self) -> crate::ModelId { + self.relation_field.model_id + } + + fn referenced_model_id(&self) -> crate::ModelId { + self.relation_field.referenced_model + } +} + pub(super) fn relation_evidence<'db>( (relation_field_id, relation_field): (RelationFieldId, &'db RelationField), ctx: &'db Context<'db>, ) -> RelationEvidence<'db> { - let ast = ctx.ast; - let ast_model = &ast[relation_field.model_id]; + let rf = &ctx.types[relation_field_id]; + let referencing_ast = &ctx.asts[rf.model_id.0].2; + let referenced_ast = &ctx.asts[rf.referenced_model.0].2; + let ast_model = &referencing_ast[relation_field.model_id.1]; let ast_field = &ast_model[relation_field.field_id]; - let opposite_model = &ast[relation_field.referenced_model]; + let opposite_model = &referenced_ast[relation_field.referenced_model.1]; let is_self_relation = relation_field.model_id == relation_field.referenced_model; let opposite_relation_field: Option<(RelationFieldId, &ast::Field, &'db RelationField)> = ctx .types @@ -238,7 +255,13 @@ pub(super) fn relation_evidence<'db>( !is_self_relation || opposite_relation_field.field_id != relation_field.field_id }) .find(|(_, opposite_relation_field)| opposite_relation_field.name == relation_field.name) - .map(|(opp_field_id, opp_rf)| (opp_field_id, &ast[opp_rf.model_id][opp_rf.field_id], opp_rf)); + .map(|(opp_field_id, opp_rf)| { + ( + opp_field_id, + &referenced_ast[opp_rf.model_id.1][opp_rf.field_id], + opp_rf, + ) + }); let is_two_way_embedded_many_to_many_relation = match (relation_field, opposite_relation_field) { (left, Some((_, _, right))) => left.fields.is_some() || right.fields.is_some(), @@ -247,7 +270,6 @@ pub(super) fn relation_evidence<'db>( RelationEvidence { ast_model, - model_id: relation_field.model_id, ast_field, field_id: relation_field_id, relation_field, @@ -359,7 +381,7 @@ pub(super) fn ingest_relation<'db>(evidence: RelationEvidence<'db>, relations: & match &evidence.relation_field.fields { Some(fields) => { let fields_are_unique = - ctx.types.model_attributes[&evidence.model_id] + ctx.types.model_attributes[&evidence.model_id()] .ast_indexes .iter() .any(|(_, idx)| { @@ -387,14 +409,14 @@ pub(super) fn ingest_relation<'db>(evidence: RelationEvidence<'db>, relations: & RelationAttributes::OneToMany(OneToManyRelationFields::Back(_)) => Relation { attributes: relation_type, relation_name: evidence.relation_field.name, - model_a: evidence.relation_field.referenced_model, - model_b: evidence.model_id, + model_a: evidence.referenced_model_id(), + model_b: evidence.model_id(), }, _ => Relation { attributes: relation_type, relation_name: evidence.relation_field.name, - model_a: evidence.model_id, - model_b: evidence.relation_field.referenced_model, + model_a: evidence.model_id(), + model_b: evidence.referenced_model_id(), }, }; @@ -408,11 +430,11 @@ pub(super) fn ingest_relation<'db>(evidence: RelationEvidence<'db>, relations: & relations .forward - .insert((evidence.model_id, evidence.relation_field.referenced_model, relation_id)); + .insert((evidence.model_id(), evidence.referenced_model_id(), relation_id)); relations .back - .insert((evidence.relation_field.referenced_model, evidence.model_id, relation_id)); + .insert((evidence.referenced_model_id(), evidence.model_id(), relation_id)); } /// An action describing the way referential integrity is managed in the system. diff --git a/psl/parser-database/src/types.rs b/psl/parser-database/src/types.rs index c5f2d222ce1..c7626e08649 100644 --- a/psl/parser-database/src/types.rs +++ b/psl/parser-database/src/types.rs @@ -8,11 +8,13 @@ use schema_ast::ast::{self, WithName}; use std::{collections::BTreeMap, fmt}; pub(super) fn resolve_types(ctx: &mut Context<'_>) { - for (top_id, top) in ctx.ast.iter_tops() { + for ((file_id, top_id), top) in ctx.iter_tops() { match (top_id, top) { - (ast::TopId::Model(model_id), ast::Top::Model(model)) => visit_model(model_id, model, ctx), + (ast::TopId::Model(model_id), ast::Top::Model(model)) => visit_model((file_id, model_id), model, ctx), (ast::TopId::Enum(_), ast::Top::Enum(enm)) => visit_enum(enm, ctx), - (ast::TopId::CompositeType(ct_id), ast::Top::CompositeType(ct)) => visit_composite_type(ct_id, ct, ctx), + (ast::TopId::CompositeType(ct_id), ast::Top::CompositeType(ct)) => { + visit_composite_type((file_id, ct_id), ct, ctx) + } (_, ast::Top::Source(_)) | (_, ast::Top::Generator(_)) => (), _ => unreachable!(), } @@ -21,13 +23,13 @@ pub(super) fn resolve_types(ctx: &mut Context<'_>) { #[derive(Debug, Default)] pub(super) struct Types { - pub(super) composite_type_fields: BTreeMap<(ast::CompositeTypeId, ast::FieldId), CompositeTypeField>, + pub(super) composite_type_fields: BTreeMap<(crate::CompositeTypeId, ast::FieldId), CompositeTypeField>, scalar_fields: Vec, /// This contains only the relation fields actually present in the schema /// source text. relation_fields: Vec, - pub(super) enum_attributes: HashMap, - pub(super) model_attributes: HashMap, + pub(super) enum_attributes: HashMap, + pub(super) model_attributes: HashMap, /// Sorted array of scalar fields that have an `@default()` attribute with a function that is /// not part of the base Prisma ones. This is meant for later validation in the datamodel /// connector. @@ -37,7 +39,7 @@ pub(super) struct Types { impl Types { pub(super) fn find_model_scalar_field( &self, - model_id: ast::ModelId, + model_id: crate::ModelId, field_id: ast::FieldId, ) -> Option { self.scalar_fields @@ -48,7 +50,7 @@ impl Types { pub(super) fn range_model_scalar_fields( &self, - model_id: ast::ModelId, + model_id: crate::ModelId, ) -> impl Iterator + Clone { let start = self.scalar_fields.partition_point(|sf| sf.model_id < model_id); self.scalar_fields[start..] @@ -71,7 +73,7 @@ impl Types { pub(super) fn range_model_scalar_field_ids( &self, - model_id: ast::ModelId, + model_id: crate::ModelId, ) -> impl Iterator + Clone { let end = self.scalar_fields.partition_point(|sf| sf.model_id <= model_id); let start = self.scalar_fields[..end].partition_point(|sf| sf.model_id < model_id); @@ -80,7 +82,7 @@ impl Types { pub(super) fn range_model_relation_fields( &self, - model_id: ast::ModelId, + model_id: crate::ModelId, ) -> impl Iterator + Clone { let first_relation_field_idx = self.relation_fields.partition_point(|rf| rf.model_id < model_id); self.relation_fields[first_relation_field_idx..] @@ -90,7 +92,7 @@ impl Types { .map(move |(idx, rf)| (RelationFieldId((first_relation_field_idx + idx) as u32), rf)) } - pub(super) fn refine_field(&self, id: (ast::ModelId, ast::FieldId)) -> Either { + pub(super) fn refine_field(&self, id: (crate::ModelId, ast::FieldId)) -> Either { self.relation_fields .binary_search_by_key(&id, |rf| (rf.model_id, rf.field_id)) .map(|idx| Either::Left(RelationFieldId(idx as u32))) @@ -158,7 +160,7 @@ pub(super) struct CompositeTypeField { #[derive(Debug)] enum FieldType { - Model(ast::ModelId), + Model(crate::ModelId), Scalar(ScalarFieldType), } @@ -177,9 +179,9 @@ impl UnsupportedType { #[derive(Debug, Clone, Copy, PartialEq)] pub enum ScalarFieldType { /// A composite type - CompositeType(ast::CompositeTypeId), + CompositeType(crate::CompositeTypeId), /// An enum - Enum(ast::EnumId), + Enum(crate::EnumId), /// A Prisma scalar type BuiltInScalar(ScalarType), /// An `Unsupported("...")` type @@ -196,7 +198,7 @@ impl ScalarFieldType { } /// Try to interpret this field type as a Composite Type. - pub fn as_composite_type(self) -> Option { + pub fn as_composite_type(self) -> Option { match self { ScalarFieldType::CompositeType(id) => Some(id), _ => None, @@ -204,7 +206,7 @@ impl ScalarFieldType { } /// Try to interpret this field type as an enum. - pub fn as_enum(self) -> Option { + pub fn as_enum(self) -> Option { match self { ScalarFieldType::Enum(id) => Some(id), _ => None, @@ -261,12 +263,12 @@ impl ScalarFieldType { pub(crate) struct DefaultAttribute { pub(crate) mapped_name: Option, pub(crate) argument_idx: usize, - pub(crate) default_attribute: ast::AttributeId, + pub(crate) default_attribute: crate::AttributeId, } #[derive(Debug)] pub(crate) struct ScalarField { - pub(crate) model_id: ast::ModelId, + pub(crate) model_id: crate::ModelId, pub(crate) field_id: ast::FieldId, pub(crate) r#type: ScalarFieldType, pub(crate) is_ignored: bool, @@ -284,9 +286,9 @@ pub(crate) struct ScalarField { #[derive(Debug)] pub(crate) struct RelationField { - pub(crate) model_id: ast::ModelId, + pub(crate) model_id: crate::ModelId, pub(crate) field_id: ast::FieldId, - pub(crate) referenced_model: ast::ModelId, + pub(crate) referenced_model: crate::ModelId, pub(crate) on_delete: Option<(crate::ReferentialAction, ast::Span)>, pub(crate) on_update: Option<(crate::ReferentialAction, ast::Span)>, /// The fields _explicitly present_ in the AST. @@ -302,7 +304,7 @@ pub(crate) struct RelationField { } impl RelationField { - fn new(model_id: ast::ModelId, field_id: ast::FieldId, referenced_model: ast::ModelId) -> Self { + fn new(model_id: crate::ModelId, field_id: ast::FieldId, referenced_model: crate::ModelId) -> Self { RelationField { model_id, field_id, @@ -491,7 +493,7 @@ impl IndexAttribute { pub(crate) struct IdAttribute { pub(crate) fields: Vec, pub(super) source_field: Option, - pub(super) source_attribute: ast::AttributeId, + pub(super) source_attribute: crate::AttributeId, pub(super) name: Option, pub(super) mapped_name: Option, pub(super) clustered: Option, @@ -545,7 +547,7 @@ pub struct IndexFieldPath { /// // ^this one is the path. in this case a vector of one element /// } /// ``` - path: Vec<(ast::CompositeTypeId, ast::FieldId)>, + path: Vec<(crate::CompositeTypeId, ast::FieldId)>, } impl IndexFieldPath { @@ -553,7 +555,7 @@ impl IndexFieldPath { Self { root, path: Vec::new() } } - pub(crate) fn push_field(&mut self, ctid: ast::CompositeTypeId, field_id: ast::FieldId) { + pub(crate) fn push_field(&mut self, ctid: crate::CompositeTypeId, field_id: ast::FieldId) { self.path.push((ctid, field_id)); } @@ -593,7 +595,7 @@ impl IndexFieldPath { /// @@index([a.field]) /// } /// ``` - pub fn path(&self) -> &[(ast::CompositeTypeId, ast::FieldId)] { + pub fn path(&self) -> &[(crate::CompositeTypeId, ast::FieldId)] { &self.path } @@ -601,10 +603,10 @@ impl IndexFieldPath { /// or in a composite type embedded in the model. Returns the same value as /// the [`root`](Self::root()) method if the field is in a model rather than in a /// composite type. - pub fn field_in_index(&self) -> Either { + pub fn field_in_index(&self) -> Either { self.path .last() - .map(|id| Either::Right(*id)) + .map(|(ct, field)| Either::Right((*ct, *field))) .unwrap_or(Either::Left(self.root)) } } @@ -629,7 +631,7 @@ pub(super) struct EnumAttributes { pub(crate) schema: Option<(StringId, ast::Span)>, } -fn visit_model<'db>(model_id: ast::ModelId, ast_model: &'db ast::Model, ctx: &mut Context<'db>) { +fn visit_model<'db>(model_id: crate::ModelId, ast_model: &'db ast::Model, ctx: &mut Context<'db>) { for (field_id, ast_field) in ast_model.iter_fields() { match field_type(ast_field, ctx) { Ok(FieldType::Model(referenced_model)) => { @@ -650,7 +652,6 @@ fn visit_model<'db>(model_id: ast::ModelId, ast_model: &'db ast::Model, ctx: &mu } Err(supported) => { let top_names: Vec<_> = ctx - .ast .iter_tops() .filter_map(|(_, top)| match top { ast::Top::Source(_) | ast::Top::Generator(_) => None, @@ -687,7 +688,7 @@ fn visit_model<'db>(model_id: ast::ModelId, ast_model: &'db ast::Model, ctx: &mu } } -fn visit_composite_type<'db>(ct_id: ast::CompositeTypeId, ct: &'db ast::CompositeType, ctx: &mut Context<'db>) { +fn visit_composite_type<'db>(ct_id: crate::CompositeTypeId, ct: &'db ast::CompositeType, ctx: &mut Context<'db>) { for (field_id, ast_field) in ct.iter_fields() { match field_type(ast_field, ctx) { Ok(FieldType::Scalar(scalar_type)) => { @@ -700,7 +701,7 @@ fn visit_composite_type<'db>(ct_id: ast::CompositeTypeId, ct: &'db ast::Composit ctx.types.composite_type_fields.insert((ct_id, field_id), field); } Ok(FieldType::Model(referenced_model_id)) => { - let referenced_model_name = ctx.ast[referenced_model_id].name(); + let referenced_model_name = ctx.asts[referenced_model_id].name(); ctx.push_error(DatamodelError::new_composite_type_validation_error(&format!("{referenced_model_name} refers to a model, making this a relation field. Relation fields inside composite types are not supported."), ct.name(), ast_field.field_type.span())) } Err(supported) => ctx.push_error(DatamodelError::new_type_not_found_error( @@ -734,13 +735,20 @@ fn field_type<'db>(field: &'db ast::Field, ctx: &mut Context<'db>) -> Result Ok(FieldType::Model(model_id)), - Some((ast::TopId::Enum(enum_id), ast::Top::Enum(_))) => Ok(FieldType::Scalar(ScalarFieldType::Enum(enum_id))), - Some((ast::TopId::CompositeType(ctid), ast::Top::CompositeType(_))) => { - Ok(FieldType::Scalar(ScalarFieldType::CompositeType(ctid))) + match ctx + .names + .tops + .get(&supported_string_id) + .map(|id| (id.0, id.1, &ctx.asts[*id])) + { + Some((file_id, ast::TopId::Model(model_id), ast::Top::Model(_))) => Ok(FieldType::Model((file_id, model_id))), + Some((file_id, ast::TopId::Enum(enum_id), ast::Top::Enum(_))) => { + Ok(FieldType::Scalar(ScalarFieldType::Enum((file_id, enum_id)))) + } + Some((file_id, ast::TopId::CompositeType(ctid), ast::Top::CompositeType(_))) => { + Ok(FieldType::Scalar(ScalarFieldType::CompositeType((file_id, ctid)))) } - Some((_, ast::Top::Generator(_))) | Some((_, ast::Top::Source(_))) => unreachable!(), + Some((_, _, ast::Top::Generator(_))) | Some((_, _, ast::Top::Source(_))) => unreachable!(), None => Err(supported), _ => unreachable!(), } diff --git a/psl/parser-database/src/walkers.rs b/psl/parser-database/src/walkers.rs index 7ee92e3e3f7..abfe290b5bd 100644 --- a/psl/parser-database/src/walkers.rs +++ b/psl/parser-database/src/walkers.rs @@ -25,6 +25,8 @@ pub use relation::*; pub use relation_field::*; pub use scalar_field::*; +use crate::{ast, FileId}; + /// A generic walker. Only walkers intantiated with a concrete ID type (`I`) are useful. #[derive(Clone, Copy)] pub struct Walker<'db, I> { @@ -52,12 +54,18 @@ where } impl crate::ParserDatabase { + fn iter_tops(&self) -> impl Iterator + '_ { + self.asts + .iter() + .flat_map(move |(file_id, _, _, ast)| ast.iter_tops().map(move |(top_id, top)| (file_id, top_id, top))) + } + /// Find an enum by name. pub fn find_enum<'db>(&'db self, name: &str) -> Option> { self.interner .lookup(name) .and_then(|name_id| self.names.tops.get(&name_id)) - .and_then(|top_id| top_id.as_enum_id()) + .and_then(|(file_id, top_id)| top_id.as_enum_id().map(|id| (*file_id, id))) .map(|enum_id| self.walk(enum_id)) } @@ -66,7 +74,7 @@ impl crate::ParserDatabase { self.interner .lookup(name) .and_then(|name_id| self.names.tops.get(&name_id)) - .and_then(|top_id| top_id.as_model_id()) + .and_then(|(file_id, top_id)| top_id.as_model_id().map(|id| (*file_id, id))) .map(|model_id| self.walk(model_id)) } @@ -77,35 +85,31 @@ impl crate::ParserDatabase { /// Walk all enums in the schema. pub fn walk_enums(&self) -> impl Iterator> { - self.ast() - .iter_tops() - .filter_map(|(top_id, _)| top_id.as_enum_id()) - .map(move |enum_id| Walker { db: self, id: enum_id }) + self.iter_tops() + .filter_map(|(file_id, top_id, _)| top_id.as_enum_id().map(|id| (file_id, id))) + .map(move |enum_id| self.walk(enum_id)) } /// Walk all the models in the schema. pub fn walk_models(&self) -> impl Iterator> + '_ { - self.ast() - .iter_tops() - .filter_map(|(top_id, _)| top_id.as_model_id()) - .map(move |model_id| self.walk(model_id)) + self.iter_tops() + .filter_map(|(file_id, top_id, _)| top_id.as_model_id().map(|id| (file_id, id))) + .map(move |(file_id, model_id)| self.walk((file_id, model_id))) .filter(|m| !m.ast_model().is_view()) } /// Walk all the views in the schema. pub fn walk_views(&self) -> impl Iterator> + '_ { - self.ast() - .iter_tops() - .filter_map(|(top_id, _)| top_id.as_model_id()) + self.iter_tops() + .filter_map(|(file_id, top_id, _)| top_id.as_model_id().map(|id| (file_id, id))) .map(move |model_id| self.walk(model_id)) .filter(|m| m.ast_model().is_view()) } /// Walk all the composite types in the schema. pub fn walk_composite_types(&self) -> impl Iterator> + '_ { - self.ast() - .iter_tops() - .filter_map(|(top_id, _)| top_id.as_composite_type_id()) + self.iter_tops() + .filter_map(|(file_id, top_id, _)| top_id.as_composite_type_id().map(|id| (file_id, id))) .map(|id| self.walk(id)) } diff --git a/psl/parser-database/src/walkers/composite_type.rs b/psl/parser-database/src/walkers/composite_type.rs index f22648e286e..af286e9d0f2 100644 --- a/psl/parser-database/src/walkers/composite_type.rs +++ b/psl/parser-database/src/walkers/composite_type.rs @@ -1,5 +1,5 @@ use super::Walker; -use crate::{ast, ScalarFieldType, ScalarType}; +use crate::{ast, FileId, ScalarFieldType, ScalarType}; use diagnostics::Span; use schema_ast::ast::{WithDocumentation, WithName}; @@ -17,20 +17,20 @@ use schema_ast::ast::{WithDocumentation, WithName}; /// countryCode String /// } /// ``` -pub type CompositeTypeWalker<'db> = Walker<'db, ast::CompositeTypeId>; +pub type CompositeTypeWalker<'db> = Walker<'db, crate::CompositeTypeId>; /// A field in a composite type. -pub type CompositeTypeFieldWalker<'db> = Walker<'db, (ast::CompositeTypeId, ast::FieldId)>; +pub type CompositeTypeFieldWalker<'db> = Walker<'db, (crate::CompositeTypeId, ast::FieldId)>; impl<'db> CompositeTypeWalker<'db> { /// The ID of the composite type node in the AST. - pub fn composite_type_id(self) -> ast::CompositeTypeId { + pub fn composite_type_id(self) -> (FileId, ast::CompositeTypeId) { self.id } /// The composite type node in the AST. pub fn ast_composite_type(self) -> &'db ast::CompositeType { - &self.db.ast()[self.id] + &self.db.asts[self.id] } /// The name of the composite type in the schema. @@ -53,7 +53,7 @@ impl<'db> CompositeTypeFieldWalker<'db> { /// The AST node for the field. pub fn ast_field(self) -> &'db ast::Field { - &self.db.ast[self.id.0][self.id.1] + &self.db.asts[self.id.0][self.id.1] } /// The composite type containing the field. @@ -101,7 +101,10 @@ impl<'db> CompositeTypeFieldWalker<'db> { /// The `@default()` AST attribute on the field, if any. pub fn default_attribute(self) -> Option<&'db ast::Attribute> { - self.field().default.as_ref().map(|d| &self.db.ast[d.default_attribute]) + self.field() + .default + .as_ref() + .map(|d| &self.db.asts[(self.id.0 .0, d.default_attribute.1)]) } /// (attribute scope, native type name, arguments, span) diff --git a/psl/parser-database/src/walkers/enum.rs b/psl/parser-database/src/walkers/enum.rs index c97b420a59f..07624527bb1 100644 --- a/psl/parser-database/src/walkers/enum.rs +++ b/psl/parser-database/src/walkers/enum.rs @@ -1,11 +1,10 @@ -use schema_ast::ast::{IndentationType, NewlineType}; - use crate::{ast, ast::WithDocumentation, types, walkers::Walker}; +use schema_ast::ast::{IndentationType, NewlineType}; /// An `enum` declaration in the schema. -pub type EnumWalker<'db> = Walker<'db, ast::EnumId>; +pub type EnumWalker<'db> = Walker<'db, crate::EnumId>; /// One value in an `enum` declaration in the schema. -pub type EnumValueWalker<'db> = Walker<'db, (ast::EnumId, usize)>; +pub type EnumValueWalker<'db> = Walker<'db, (crate::EnumId, usize)>; impl<'db> EnumWalker<'db> { fn attributes(self) -> &'db types::EnumAttributes { @@ -19,7 +18,7 @@ impl<'db> EnumWalker<'db> { /// The AST node. pub fn ast_enum(self) -> &'db ast::Enum { - &self.db.ast()[self.id] + &self.db.asts[self.id] } /// The database name of the enum. diff --git a/psl/parser-database/src/walkers/field.rs b/psl/parser-database/src/walkers/field.rs index d8babd99339..87bea656034 100644 --- a/psl/parser-database/src/walkers/field.rs +++ b/psl/parser-database/src/walkers/field.rs @@ -6,12 +6,12 @@ use crate::{ use schema_ast::ast; /// A model field, scalar or relation. -pub type FieldWalker<'db> = Walker<'db, (ast::ModelId, ast::FieldId)>; +pub type FieldWalker<'db> = Walker<'db, (crate::ModelId, ast::FieldId)>; impl<'db> FieldWalker<'db> { /// The AST node for the field. pub fn ast_field(self) -> &'db ast::Field { - &self.db.ast[self.id.0][self.id.1] + &self.db.asts[self.id.0][self.id.1] } /// The field name. @@ -45,20 +45,14 @@ pub enum RefinedFieldWalker<'db> { impl<'db> From> for FieldWalker<'db> { fn from(w: ScalarFieldWalker<'db>) -> Self { let ScalarField { model_id, field_id, .. } = w.db.types[w.id]; - Walker { - db: w.db, - id: (model_id, field_id), - } + w.db.walk((model_id, field_id)) } } impl<'db> From> for FieldWalker<'db> { fn from(w: RelationFieldWalker<'db>) -> Self { let RelationField { model_id, field_id, .. } = w.db.types[w.id]; - Walker { - db: w.db, - id: (model_id, field_id), - } + w.db.walk((model_id, field_id)) } } diff --git a/psl/parser-database/src/walkers/index.rs b/psl/parser-database/src/walkers/index.rs index e75c4c58fc8..63b6b30b7b4 100644 --- a/psl/parser-database/src/walkers/index.rs +++ b/psl/parser-database/src/walkers/index.rs @@ -11,7 +11,7 @@ use crate::{ /// An index, unique or fulltext attribute. #[derive(Copy, Clone)] pub struct IndexWalker<'db> { - pub(crate) model_id: ast::ModelId, + pub(crate) model_id: crate::ModelId, pub(crate) index: ast::AttributeId, pub(crate) db: &'db ParserDatabase, pub(crate) index_attribute: &'db IndexAttribute, @@ -69,7 +69,7 @@ impl<'db> IndexWalker<'db> { /// The AST node of the index/unique attribute. pub fn ast_attribute(self) -> &'db ast::Attribute { - &self.db.ast[self.index] + &self.db.asts[(self.model_id.0, self.index)] } pub(crate) fn attribute(self) -> &'db IndexAttribute { diff --git a/psl/parser-database/src/walkers/model.rs b/psl/parser-database/src/walkers/model.rs index 313efd0ca81..e4290a1a00f 100644 --- a/psl/parser-database/src/walkers/model.rs +++ b/psl/parser-database/src/walkers/model.rs @@ -12,11 +12,12 @@ use super::{ use crate::{ ast::{self, WithName}, types::ModelAttributes, + FileId, }; use schema_ast::ast::{IndentationType, NewlineType, WithSpan}; /// A `model` declaration in the Prisma schema. -pub type ModelWalker<'db> = super::Walker<'db, ast::ModelId>; +pub type ModelWalker<'db> = super::Walker<'db, (FileId, ast::ModelId)>; impl<'db> ModelWalker<'db> { /// The name of the model. @@ -59,14 +60,9 @@ impl<'db> ModelWalker<'db> { .is_some() } - /// The ID of the model in the db - pub fn model_id(self) -> ast::ModelId { - self.id - } - /// The AST node. pub fn ast_model(self) -> &'db ast::Model { - &self.db.ast[self.id] + &self.db.asts[self.id] } /// The parsed attributes. @@ -86,7 +82,7 @@ impl<'db> ModelWalker<'db> { self.attributes() .mapped_name .map(|id| &self.db[id]) - .unwrap_or_else(|| self.db.ast[self.id].name()) + .unwrap_or_else(|| self.ast_model().name()) } /// Used in validation. True only if the model has a single field id. @@ -216,7 +212,7 @@ impl<'db> ModelWalker<'db> { None => return IndentationType::default(), }; - let src = self.db.source(); + let src = self.db.source(self.id.0); let start = field.ast_field().span().start; let mut spaces = 0; @@ -241,7 +237,7 @@ impl<'db> ModelWalker<'db> { None => return NewlineType::default(), }; - let src = self.db.source(); + let src = self.db.source(self.id.0); let start = field.ast_field().span().end - 2; match src.chars().nth(start) { diff --git a/psl/parser-database/src/walkers/model/primary_key.rs b/psl/parser-database/src/walkers/model/primary_key.rs index ba3de30ea63..71792dce770 100644 --- a/psl/parser-database/src/walkers/model/primary_key.rs +++ b/psl/parser-database/src/walkers/model/primary_key.rs @@ -8,7 +8,7 @@ use crate::{ /// An `@(@)id` attribute in the schema. #[derive(Copy, Clone)] pub struct PrimaryKeyWalker<'db> { - pub(crate) model_id: ast::ModelId, + pub(crate) model_id: crate::ModelId, pub(crate) attribute: &'db IdAttribute, pub(crate) db: &'db ParserDatabase, } @@ -16,7 +16,7 @@ pub struct PrimaryKeyWalker<'db> { impl<'db> PrimaryKeyWalker<'db> { /// The `@(@)id` AST node. pub fn ast_attribute(self) -> &'db ast::Attribute { - &self.db.ast[self.attribute.source_attribute] + &self.db.asts[(self.model_id.0, self.attribute.source_attribute.1)] } /// The mapped name of the id. diff --git a/psl/parser-database/src/walkers/relation.rs b/psl/parser-database/src/walkers/relation.rs index 1557633fbc0..26e3ec61e05 100644 --- a/psl/parser-database/src/walkers/relation.rs +++ b/psl/parser-database/src/walkers/relation.rs @@ -14,7 +14,7 @@ pub type RelationWalker<'db> = Walker<'db, RelationId>; impl<'db> RelationWalker<'db> { /// The models at each end of the relation. [model A, model B]. Can be the same model twice. - pub fn models(self) -> [ast::ModelId; 2] { + pub fn models(self) -> [(FileId, ast::ModelId); 2] { let rel = self.get(); [rel.model_a, rel.model_b] } diff --git a/psl/parser-database/src/walkers/relation_field.rs b/psl/parser-database/src/walkers/relation_field.rs index b96380f03bf..7f6b2e8037a 100644 --- a/psl/parser-database/src/walkers/relation_field.rs +++ b/psl/parser-database/src/walkers/relation_field.rs @@ -28,7 +28,7 @@ impl<'db> RelationFieldWalker<'db> { /// The AST node of the field. pub fn ast_field(self) -> &'db ast::Field { let RelationField { model_id, field_id, .. } = self.db.types[self.id]; - &self.db.ast[model_id][field_id] + &self.db.asts[model_id][field_id] } pub(crate) fn attributes(self) -> &'db RelationField { @@ -83,11 +83,12 @@ impl<'db> RelationFieldWalker<'db> { /// The `@relation` attribute in the field AST. pub fn relation_attribute(self) -> Option<&'db ast::Attribute> { - self.attributes().relation_attribute.map(|id| &self.db.ast[id]) + let attrs = self.attributes(); + attrs.relation_attribute.map(|id| &self.db.asts[(attrs.model_id.0, id)]) } /// Does the relation field reference the passed in model? - pub fn references_model(self, other: ast::ModelId) -> bool { + pub fn references_model(self, other: crate::ModelId) -> bool { self.attributes().referenced_model == other } diff --git a/psl/parser-database/src/walkers/scalar_field.rs b/psl/parser-database/src/walkers/scalar_field.rs index 9cea79b8485..7a9a0984584 100644 --- a/psl/parser-database/src/walkers/scalar_field.rs +++ b/psl/parser-database/src/walkers/scalar_field.rs @@ -19,7 +19,7 @@ impl<'db> ScalarFieldWalker<'db> { /// The field node in the AST. pub fn ast_field(self) -> &'db ast::Field { let ScalarField { model_id, field_id, .. } = self.attributes(); - &self.db.ast[*model_id][*field_id] + &self.db.asts[*model_id][*field_id] } /// Is this field unique? This method will return true if: @@ -53,7 +53,7 @@ impl<'db> ScalarFieldWalker<'db> { .default .as_ref() .map(|d| d.default_attribute) - .map(|id| &self.db.ast[id]) + .map(|id| &self.db.asts[id]) } /// The final database name of the field. See crate docs for explanations on database names. @@ -169,7 +169,7 @@ pub struct DefaultValueWalker<'db> { impl<'db> DefaultValueWalker<'db> { /// The AST node of the attribute. pub fn ast_attribute(self) -> &'db ast::Attribute { - &self.db.ast[self.default.default_attribute] + &self.db.asts[self.default.default_attribute] } /// The value expression in the `@default` attribute. @@ -374,7 +374,7 @@ impl<'db> ScalarFieldAttributeWalker<'db> { let mut result = vec![(root_name, None)]; for (ctid, field_id) in path.path() { - let ct = &self.db.ast[*ctid]; + let ct = &self.db.asts[*ctid]; let field = ct[*field_id].name(); result.push((field, Some(ct.name()))); @@ -400,7 +400,7 @@ impl<'db> ScalarFieldAttributeWalker<'db> { let mut result = vec![(root, None)]; for (ctid, field_id) in path.path() { - let ct = &self.db.ast[*ctid]; + let ct = &self.db.asts[*ctid]; let field = &self.db.types.composite_type_fields[&(*ctid, *field_id)] .mapped_name diff --git a/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs index d5cebd189bc..3bb04eed451 100644 --- a/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs @@ -497,7 +497,7 @@ impl Connector for PostgresDatamodelConnector { let index_field = db .walk_models() .chain(db.walk_views()) - .find(|model| model.model_id() == model_id) + .find(|model| model.id.1 == model_id) .and_then(|model| { model.indexes().find(|index| { index.attribute_id() diff --git a/psl/psl-core/src/configuration/configuration_struct.rs b/psl/psl-core/src/configuration/configuration_struct.rs index 3da58f6efde..41d3d6ebf41 100644 --- a/psl/psl-core/src/configuration/configuration_struct.rs +++ b/psl/psl-core/src/configuration/configuration_struct.rs @@ -6,7 +6,7 @@ use crate::{ }; use enumflags2::BitFlags; -#[derive(Debug)] +#[derive(Debug, Default)] pub struct Configuration { pub generators: Vec, pub datasources: Vec, @@ -18,7 +18,7 @@ impl Configuration { if self.datasources.is_empty() { Err(DatamodelError::new_validation_error( "You defined no datasource. You must define exactly one datasource.", - schema_ast::ast::Span::new(0, 0), + schema_ast::ast::Span::new(0, 0, diagnostics::FileId::ZERO), ) .into()) } else { diff --git a/psl/psl-core/src/lib.rs b/psl/psl-core/src/lib.rs index ca0ce37cc0f..9d1877bd26d 100644 --- a/psl/psl-core/src/lib.rs +++ b/psl/psl-core/src/lib.rs @@ -52,14 +52,57 @@ impl ValidatedSchema { pub fn relation_mode(&self) -> datamodel_connector::RelationMode { self.relation_mode } + + pub fn render_diagnostics(&self) -> String { + let mut out = Vec::new(); + + for error in self.diagnostics.errors() { + let (file_name, source, _) = &self.db[error.span().file_id]; + error.pretty_print(&mut out, file_name, source.as_str()).unwrap(); + } + + String::from_utf8(out).unwrap() + } } /// The most general API for dealing with Prisma schemas. It accumulates what analysis and /// validation information it can, and returns it along with any error and warning diagnostics. pub fn validate(file: SourceFile, connectors: ConnectorRegistry<'_>) -> ValidatedSchema { let mut diagnostics = Diagnostics::new(); - let db = ParserDatabase::new(file, &mut diagnostics); - let configuration = validate_configuration(db.ast(), &mut diagnostics, connectors); + let db = ParserDatabase::new_single_file(file, &mut diagnostics); + let configuration = validate_configuration(db.ast_assert_single(), &mut diagnostics, connectors); + let datasources = &configuration.datasources; + let out = validate::validate(db, datasources, configuration.preview_features(), diagnostics); + + ValidatedSchema { + diagnostics: out.diagnostics, + configuration, + connector: out.connector, + db: out.db, + relation_mode: out.relation_mode, + } +} + +/// The most general API for dealing with Prisma schemas. It accumulates what analysis and +/// validation information it can, and returns it along with any error and warning diagnostics. +pub fn validate_multi_file(files: Vec<(String, SourceFile)>, connectors: ConnectorRegistry<'_>) -> ValidatedSchema { + assert!( + !files.is_empty(), + "psl::validate_multi_file() must be called with at least one file" + ); + let mut diagnostics = Diagnostics::new(); + let db = ParserDatabase::new(files, &mut diagnostics); + + // TODO: the bulk of configuration block analysis should be part of ParserDatabase::new(). + let mut configuration = Configuration::default(); + for ast in db.iter_asts() { + let new_config = validate_configuration(ast, &mut diagnostics, connectors); + + configuration.datasources.extend(new_config.datasources.into_iter()); + configuration.generators.extend(new_config.generators.into_iter()); + configuration.warnings.extend(new_config.warnings.into_iter()); + } + let datasources = &configuration.datasources; let out = validate::validate(db, datasources, configuration.preview_features(), diagnostics); @@ -77,8 +120,8 @@ pub fn validate(file: SourceFile, connectors: ConnectorRegistry<'_>) -> Validate /// computationally or in terms of bundle size (e.g., for `query-engine-wasm`). pub fn parse_without_validation(file: SourceFile, connectors: ConnectorRegistry<'_>) -> ValidatedSchema { let mut diagnostics = Diagnostics::new(); - let db = ParserDatabase::new(file, &mut diagnostics); - let configuration = validate_configuration(db.ast(), &mut diagnostics, connectors); + let db = ParserDatabase::new_single_file(file, &mut diagnostics); + let configuration = validate_configuration(db.ast_assert_single(), &mut diagnostics, connectors); let datasources = &configuration.datasources; let out = validate::parse_without_validation(db, datasources); @@ -97,7 +140,7 @@ pub fn parse_configuration( connectors: ConnectorRegistry<'_>, ) -> Result { let mut diagnostics = Diagnostics::default(); - let ast = schema_ast::parse_schema(schema, &mut diagnostics); + let ast = schema_ast::parse_schema(schema, &mut diagnostics, diagnostics::FileId::ZERO); let out = validate_configuration(&ast, &mut diagnostics, connectors); diagnostics.to_result().map(|_| out) } diff --git a/psl/psl-core/src/reformat.rs b/psl/psl-core/src/reformat.rs index eaf8aa5400b..09d21c731b3 100644 --- a/psl/psl-core/src/reformat.rs +++ b/psl/psl-core/src/reformat.rs @@ -9,7 +9,7 @@ pub fn reformat(source: &str, indent_width: usize) -> Option { let file = SourceFile::new_allocated(Arc::from(source.to_owned().into_boxed_str())); let mut diagnostics = diagnostics::Diagnostics::new(); - let db = parser_database::ParserDatabase::new(file, &mut diagnostics); + let db = parser_database::ParserDatabase::new_single_file(file, &mut diagnostics); let source_to_reformat = if diagnostics.has_errors() { Cow::Borrowed(source) diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs b/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs index da0a3db3a51..fbaaa3525a4 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/composite_types.rs @@ -2,7 +2,7 @@ use super::default_value; use crate::{datamodel_connector::ConnectorCapability, validate::validation_pipeline::context::Context}; use diagnostics::DatamodelError; use parser_database::{ - ast::{self, WithSpan}, + ast::WithSpan, walkers::{CompositeTypeFieldWalker, CompositeTypeWalker}, ScalarFieldType, }; @@ -11,8 +11,8 @@ use std::{fmt, rc::Rc}; /// Detect compound type chains that form a cycle, that is not broken with either an optional or an /// array type. pub(super) fn detect_composite_cycles(ctx: &mut Context<'_>) { - let mut visited: Vec = Vec::new(); - let mut errors: Vec<(ast::CompositeTypeId, DatamodelError)> = Vec::new(); + let mut visited: Vec = Vec::new(); + let mut errors: Vec<(parser_database::CompositeTypeId, DatamodelError)> = Vec::new(); let mut fields_to_traverse: Vec<(CompositeTypeFieldWalker<'_>, Option>>)> = ctx .db diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/constraint_namespace.rs b/psl/psl-core/src/validate/validation_pipeline/validations/constraint_namespace.rs index e4b02ebc930..495aa9b4467 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/constraint_namespace.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/constraint_namespace.rs @@ -1,5 +1,4 @@ use crate::datamodel_connector::{walker_ext_traits::*, ConstraintScope}; -use parser_database::ast; use std::{borrow::Cow, collections::HashMap, ops::Deref}; /// A constraint namespace consists of two kinds of namespaces: @@ -10,8 +9,8 @@ use std::{borrow::Cow, collections::HashMap, ops::Deref}; pub(crate) struct ConstraintNamespace<'db> { // (ConstraintScope, schema name, name) -> occurrences global: HashMap<(ConstraintScope, Option<&'db str>, Cow<'db, str>), usize>, - local: HashMap<(ast::ModelId, ConstraintScope, Cow<'db, str>), usize>, - local_custom_name: HashMap<(ast::ModelId, Cow<'db, str>), usize>, + local: HashMap<(parser_database::ModelId, ConstraintScope, Cow<'db, str>), usize>, + local_custom_name: HashMap<(parser_database::ModelId, Cow<'db, str>), usize>, } impl<'db> ConstraintNamespace<'db> { @@ -19,7 +18,7 @@ impl<'db> ConstraintNamespace<'db> { /// local violations in the given model. pub(crate) fn constraint_name_scope_violations( &self, - model_id: ast::ModelId, + model_id: parser_database::ModelId, name: ConstraintName<'db>, ctx: &super::Context<'db>, ) -> impl Iterator + '_ { @@ -43,7 +42,7 @@ impl<'db> ConstraintNamespace<'db> { fn local_constraint_name_scope_violations( &self, - model_id: ast::ModelId, + model_id: parser_database::ModelId, name: ConstraintName<'db>, ) -> impl Iterator + '_ { name.possible_scopes().filter(move |scope| { @@ -54,7 +53,11 @@ impl<'db> ConstraintNamespace<'db> { }) } - pub(crate) fn local_custom_name_scope_violations(&self, model_id: ast::ModelId, name: &'db str) -> bool { + pub(crate) fn local_custom_name_scope_violations( + &self, + model_id: parser_database::ModelId, + name: &'db str, + ) -> bool { match self.local_custom_name.get(&(model_id, Cow::from(name))) { Some(count) => *count > 1, None => false, @@ -127,7 +130,7 @@ impl<'db> ConstraintNamespace<'db> { for index in model.indexes() { let counter = self .local - .entry((model.model_id(), scope, index.constraint_name(ctx.connector))) + .entry((model.id, scope, index.constraint_name(ctx.connector))) .or_default(); *counter += 1; @@ -139,7 +142,7 @@ impl<'db> ConstraintNamespace<'db> { pub(super) fn add_local_primary_keys(&mut self, scope: ConstraintScope, ctx: &super::Context<'db>) { for model in ctx.db.walk_models().chain(ctx.db.walk_views()) { if let Some(name) = model.primary_key().and_then(|pk| pk.constraint_name(ctx.connector)) { - let counter = self.local.entry((model.model_id(), scope, name)).or_default(); + let counter = self.local.entry((model.id, scope, name)).or_default(); *counter += 1; } } @@ -149,18 +152,12 @@ impl<'db> ConstraintNamespace<'db> { pub(super) fn add_local_custom_names_for_primary_keys_and_uniques(&mut self, ctx: &super::Context<'db>) { for model in ctx.db.walk_models().chain(ctx.db.walk_views()) { if let Some(name) = model.primary_key().and_then(|pk| pk.name()) { - let counter = self - .local_custom_name - .entry((model.model_id(), Cow::from(name))) - .or_default(); + let counter = self.local_custom_name.entry((model.id, Cow::from(name))).or_default(); *counter += 1; } for index in model.indexes() { if let Some(name) = index.name() { - let counter = self - .local_custom_name - .entry((model.model_id(), Cow::from(name))) - .or_default(); + let counter = self.local_custom_name.entry((model.id, Cow::from(name))).or_default(); *counter += 1; } } @@ -175,7 +172,7 @@ impl<'db> ConstraintNamespace<'db> { .filter_map(|r| r.refine().as_inline()) .map(|r| r.constraint_name(ctx.connector)) { - let counter = self.local.entry((model.model_id(), scope, name)).or_default(); + let counter = self.local.entry((model.id, scope, name)).or_default(); *counter += 1; } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs b/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs index 0613fda2a48..674d8e50d3b 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/fields.rs @@ -21,7 +21,7 @@ pub(super) fn validate_client_name(field: FieldWalker<'_>, names: &Names<'_>, ct "model" }; - for taken in names.name_taken(model.model_id(), field.name()).into_iter() { + for taken in names.name_taken(model.id, field.name()).into_iter() { match taken { NameTaken::Index => { let message = format!( @@ -82,7 +82,7 @@ pub(super) fn has_a_unique_default_constraint_name( }; for violation in names.constraint_namespace.constraint_name_scope_violations( - field.model().model_id(), + field.model().id, ConstraintName::Default(name.as_ref()), ctx, ) { diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs index 9a7ac919fff..e9bae626f37 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/indexes.rs @@ -14,11 +14,11 @@ pub(super) fn has_a_unique_constraint_name(index: IndexWalker<'_>, names: &super let name = index.constraint_name(ctx.connector); let model = index.model(); - for violation in names.constraint_namespace.constraint_name_scope_violations( - model.model_id(), - ConstraintName::Index(name.as_ref()), - ctx, - ) { + for violation in + names + .constraint_namespace + .constraint_name_scope_violations(model.id, ConstraintName::Index(name.as_ref()), ctx) + { let message = format!( "The given constraint name `{}` has to be unique in the following namespace: {}. Please provide a different name using the `map` argument.", name, @@ -52,7 +52,7 @@ pub(super) fn unique_index_has_a_unique_custom_name_per_model( if let Some(name) = index.name() { if names .constraint_namespace - .local_custom_name_scope_violations(model.model_id(), name.as_ref()) + .local_custom_name_scope_violations(model.id, name.as_ref()) { let message = format!( "The given custom name `{name}` has to be unique on the model. Please provide a different name for the `name` argument." diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/models.rs b/psl/psl-core/src/validate/validation_pipeline/validations/models.rs index a8c222c9160..a53063624b2 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/models.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/models.rs @@ -1,6 +1,5 @@ use super::database_name::validate_db_name; use crate::{ - ast, datamodel_connector::{walker_ext_traits::*, ConnectorCapability}, diagnostics::DatamodelError, parser_database::ast::{WithName, WithSpan}, @@ -77,7 +76,7 @@ pub(super) fn has_a_unique_primary_key_name(model: ModelWalker<'_>, names: &supe ); for violation in names.constraint_namespace.constraint_name_scope_violations( - model.model_id(), + model.id, super::constraint_namespace::ConstraintName::PrimaryKey(name.as_ref()), ctx, ) { @@ -115,7 +114,7 @@ pub(super) fn has_a_unique_custom_primary_key_name_per_model( if let Some(name) = pk.name() { if names .constraint_namespace - .local_custom_name_scope_violations(model.model_id(), name.as_ref()) + .local_custom_name_scope_violations(model.id, name.as_ref()) { let message = format!( "The given custom name `{name}` has to be unique on the model. Please provide a different name for the `name` argument." @@ -362,15 +361,16 @@ pub(super) fn schema_attribute_missing(model: ModelWalker<'_>, ctx: &mut Context pub(super) fn database_name_clashes(ctx: &mut Context<'_>) { // (schema_name, model_database_name) -> ModelId - let mut database_names: HashMap<(Option<&str>, &str), ast::ModelId> = HashMap::with_capacity(ctx.db.models_count()); + let mut database_names: HashMap<(Option<&str>, &str), parser_database::ModelId> = + HashMap::with_capacity(ctx.db.models_count()); for model in ctx.db.walk_models().chain(ctx.db.walk_views()) { let key = (model.schema().map(|(name, _)| name), model.database_name()); - match database_names.insert(key, model.model_id()) { + match database_names.insert(key, model.id) { // Two branches because we want to put the error on the @@map attribute, and it can be // on either model. Some(existing) if model.mapped_name().is_some() => { - let existing_model_name = &ctx.db.ast()[existing].name(); + let existing_model_name = &ctx.db.ast(existing.0)[existing.1].name(); let attribute = model .ast_model() .attributes @@ -385,7 +385,7 @@ pub(super) fn database_name_clashes(ctx: &mut Context<'_>) { )); } Some(existing) => { - let existing_model = &ctx.db.ast()[existing]; + let existing_model = &ctx.db.ast(existing.0)[existing.1]; let attribute = existing_model .attributes .iter() diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/names.rs b/psl/psl-core/src/validate/validation_pipeline/validations/names.rs index 0c818610f08..fdc0afaf7b8 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/names.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/names.rs @@ -1,6 +1,8 @@ use super::constraint_namespace::ConstraintNamespace; -use crate::ast::ModelId; -use parser_database::walkers::{RelationFieldId, RelationName}; +use parser_database::{ + walkers::{RelationFieldId, RelationName}, + ModelId, +}; use std::collections::{HashMap, HashSet}; type RelationIdentifier<'db> = (ModelId, ModelId, RelationName<'db>); @@ -28,11 +30,11 @@ impl<'db> Names<'db> { let mut primary_key_names: HashMap = HashMap::new(); for model in ctx.db.walk_models().chain(ctx.db.walk_views()) { - let model_id = model.model_id(); + let model_id = model.id; for field in model.relation_fields() { - let model_id = field.model().model_id(); - let related_model_id = field.related_model().model_id(); + let model_id = field.model().id; + let related_model_id = field.related_model().id; let identifier = (model_id, related_model_id, field.relation_name()); let field_ids = relation_names.entry(identifier).or_default(); @@ -51,7 +53,7 @@ impl<'db> Names<'db> { } if let Some(pk) = model.primary_key().and_then(|pk| pk.name()) { - primary_key_names.insert(model.model_id(), pk); + primary_key_names.insert(model.id, pk); } } diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs b/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs index 146f119f149..6d1b9cb5166 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/relation_fields.rs @@ -59,7 +59,7 @@ pub(super) fn ambiguity(field: RelationFieldWalker<'_>, names: &Names<'_>) -> Re let model = field.model(); let related_model = field.related_model(); - let identifier = (model.model_id(), related_model.model_id(), field.relation_name()); + let identifier = (model.id, related_model.id, field.relation_name()); match names.relation_names.get(&identifier) { Some(fields) if fields.len() > 1 => { diff --git a/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs b/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs index ec78b9a61a3..e834fe3b54e 100644 --- a/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs +++ b/psl/psl-core/src/validate/validation_pipeline/validations/relations.rs @@ -38,7 +38,7 @@ pub(super) fn has_a_unique_constraint_name( let model = relation.referencing_model(); for violation in names.constraint_namespace.constraint_name_scope_violations( - model.model_id(), + model.id, ConstraintName::Relation(name.as_ref()), ctx, ) { diff --git a/psl/psl/src/lib.rs b/psl/psl/src/lib.rs index 9d7fb8f2616..d1c38eaf433 100644 --- a/psl/psl/src/lib.rs +++ b/psl/psl/src/lib.rs @@ -44,7 +44,7 @@ pub fn parse_schema(file: impl Into) -> Result ValidatedSchema { pub fn parse_without_validation(file: SourceFile, connector_registry: ConnectorRegistry<'_>) -> ValidatedSchema { psl_core::parse_without_validation(file, connector_registry) } +/// The most general API for dealing with Prisma schemas. It accumulates what analysis and +/// validation information it can, and returns it along with any error and warning diagnostics. +pub fn validate_multi_file(files: Vec<(String, SourceFile)>) -> ValidatedSchema { + psl_core::validate_multi_file(files, builtin_connectors::BUILTIN_CONNECTORS) +} diff --git a/psl/psl/tests/common/asserts.rs b/psl/psl/tests/common/asserts.rs index 81d5472d4c1..4278f5cb77e 100644 --- a/psl/psl/tests/common/asserts.rs +++ b/psl/psl/tests/common/asserts.rs @@ -3,7 +3,7 @@ use std::fmt::Debug; use either::Either::{Left, Right}; use psl::datamodel_connector::Connector; use psl::diagnostics::DatamodelWarning; -use psl::parser_database::{walkers, IndexAlgorithm, OperatorClass, ReferentialAction, ScalarType, SortOrder}; +use psl::parser_database::{walkers, IndexAlgorithm, ModelId, OperatorClass, ReferentialAction, ScalarType, SortOrder}; use psl::schema_ast::ast::WithDocumentation; use psl::schema_ast::ast::{self, FieldArity}; use psl::{Diagnostics, StringFromEnvVar}; @@ -67,7 +67,7 @@ pub(crate) trait CompositeFieldAssert { pub(crate) trait RelationFieldAssert { fn assert_ignored(&self, ignored: bool) -> &Self; - fn assert_relation_to(&self, model_id: ast::ModelId) -> &Self; + fn assert_relation_to(&self, model_id: ModelId) -> &Self; fn assert_relation_delete_strategy(&self, action: ReferentialAction) -> &Self; fn assert_relation_update_strategy(&self, action: ReferentialAction) -> &Self; } @@ -151,7 +151,7 @@ impl<'a> DatamodelAssert<'a> for psl::ValidatedSchema { impl<'a> RelationFieldAssert for walkers::RelationFieldWalker<'a> { #[track_caller] - fn assert_relation_to(&self, model_id: ast::ModelId) -> &Self { + fn assert_relation_to(&self, model_id: ModelId) -> &Self { assert!(self.references_model(model_id)); self } diff --git a/psl/psl/tests/config/nice_warnings.rs b/psl/psl/tests/config/nice_warnings.rs index 4e1c7ed2bfa..955cbbd89fd 100644 --- a/psl/psl/tests/config/nice_warnings.rs +++ b/psl/psl/tests/config/nice_warnings.rs @@ -14,6 +14,6 @@ fn nice_warning_for_deprecated_generator_preview_feature() { res.warnings.assert_is(DatamodelWarning::new_feature_deprecated( "middlewares", - Span::new(88, 103), + Span::new(88, 103, psl_core::parser_database::FileId::ZERO), )); } diff --git a/psl/psl/tests/datamodel_tests.rs b/psl/psl/tests/datamodel_tests.rs index b950ff6fc2f..ba723194a4f 100644 --- a/psl/psl/tests/datamodel_tests.rs +++ b/psl/psl/tests/datamodel_tests.rs @@ -8,6 +8,7 @@ mod capabilities; mod common; mod config; mod functions; +mod multi_file; mod parsing; mod reformat; mod types; diff --git a/psl/psl/tests/multi_file/basic.rs b/psl/psl/tests/multi_file/basic.rs new file mode 100644 index 00000000000..fd1c2d0e4f9 --- /dev/null +++ b/psl/psl/tests/multi_file/basic.rs @@ -0,0 +1,114 @@ +use crate::common::expect; + +fn expect_errors(schemas: &[[&'static str; 2]], expectation: expect_test::Expect) { + let out = psl::validate_multi_file( + schemas + .iter() + .map(|[file_name, contents]| ((*file_name).into(), (*contents).into())) + .collect(), + ); + + let actual = out.render_diagnostics(); + expectation.assert_eq(&actual) +} + +#[test] +fn multi_file_errors_single_file() { + let files: &[[&'static str; 2]] = &[["a.prisma", "meow"]]; + + let expected = expect![[r#" + error: Error validating: This line is invalid. It does not start with any known Prisma schema keyword. + --> a.prisma:1 +  |  +  |  +  1 | meow +  |  + "#]]; + expect_errors(files, expected); +} + +#[test] +fn multi_file_errors_two_files() { + let files: &[[&'static str; 2]] = &[ + ["a.prisma", "meow"], + ["b.prisma", "woof woof"], + ["c.prisma", "choo choo"], + ]; + + let expected = expect![[r#" + error: Error validating: This line is invalid. It does not start with any known Prisma schema keyword. + --> a.prisma:1 +  |  +  |  +  1 | meow +  |  + error: Error validating: This line is invalid. It does not start with any known Prisma schema keyword. + --> b.prisma:1 +  |  +  |  +  1 | woof woof +  |  + error: Error validating: This line is invalid. It does not start with any known Prisma schema keyword. + --> c.prisma:1 +  |  +  |  +  1 | choo choo +  |  + "#]]; + expect_errors(files, expected); +} + +#[test] +fn multi_file_errors_relation() { + let files: &[[&'static str; 2]] = &[ + [ + "b.prisma", + r#" +generator client { + provider = "prisma-client-js" +} + +model Post { + id Int @id + test String @db.Text + user_id Int + user User @relation(fields: [user_id], references: [id]) +} +"#, + ], + [ + "a.prisma", + r#" +datasource db { + provider = "postgresql" + url = env("TEST_DATABASE_URL") +} + +model User { + id Int @id + test String @db.FunnyText + post_id Int @unique + post Post +} + +"#, + ], + ]; + + let expected = expect![[r#" + error: Native type FunnyText is not supported for postgresql connector. + --> a.prisma:9 +  |  +  8 |  id Int @id +  9 |  test String @db.FunnyText +  |  + error: Error parsing attribute "@relation": A one-to-one relation must use unique fields on the defining side. Either add an `@unique` attribute to the field `user_id`, or change the relation to one-to-many. + --> b.prisma:10 +  |  +  9 |  user_id Int + 10 |  user User @relation(fields: [user_id], references: [id]) + 11 | } +  |  + "#]]; + expect_errors(files, expected); +} diff --git a/psl/psl/tests/multi_file/mod.rs b/psl/psl/tests/multi_file/mod.rs new file mode 100644 index 00000000000..1bca5f8cba7 --- /dev/null +++ b/psl/psl/tests/multi_file/mod.rs @@ -0,0 +1 @@ +mod basic; diff --git a/psl/psl/tests/validation_tests.rs b/psl/psl/tests/validation_tests.rs index b6efaa4215c..6d0120cf933 100644 --- a/psl/psl/tests/validation_tests.rs +++ b/psl/psl/tests/validation_tests.rs @@ -10,7 +10,7 @@ fn parse_schema_fail_on_diagnostics(file: impl Into) -> Result Ok(schema), diff --git a/psl/schema-ast/src/ast/identifier.rs b/psl/schema-ast/src/ast/identifier.rs index d1c72732a54..92eccefecf1 100644 --- a/psl/schema-ast/src/ast/identifier.rs +++ b/psl/schema-ast/src/ast/identifier.rs @@ -1,4 +1,5 @@ use super::{Span, WithSpan}; +use diagnostics::FileId; /// An identifier. #[derive(Debug, Clone, PartialEq)] @@ -9,17 +10,17 @@ pub struct Identifier { pub span: Span, } -impl WithSpan for Identifier { - fn span(&self) -> Span { - self.span - } -} - -impl From> for Identifier { - fn from(pair: pest::iterators::Pair<'_, T>) -> Self { +impl Identifier { + pub(crate) fn new(pair: pest::iterators::Pair<'_, T>, file_id: FileId) -> Self { Identifier { name: pair.as_str().to_owned(), - span: pair.as_span().into(), + span: (file_id, pair.as_span()).into(), } } } + +impl WithSpan for Identifier { + fn span(&self) -> Span { + self.span + } +} diff --git a/psl/schema-ast/src/parser/parse_arguments.rs b/psl/schema-ast/src/parser/parse_arguments.rs index 67b5d930f83..b2579c6e6cd 100644 --- a/psl/schema-ast/src/parser/parse_arguments.rs +++ b/psl/schema-ast/src/parser/parse_arguments.rs @@ -4,20 +4,25 @@ use super::{ Rule, }; use crate::ast; -use diagnostics::Diagnostics; +use diagnostics::{Diagnostics, FileId}; -pub(crate) fn parse_arguments_list(token: Pair<'_>, arguments: &mut ast::ArgumentsList, diagnostics: &mut Diagnostics) { +pub(crate) fn parse_arguments_list( + token: Pair<'_>, + arguments: &mut ast::ArgumentsList, + diagnostics: &mut Diagnostics, + file_id: FileId, +) { debug_assert_eq!(token.as_rule(), Rule::arguments_list); for current in token.into_inner() { let current_span = current.as_span(); match current.as_rule() { // This is a named arg. - Rule::named_argument => arguments.arguments.push(parse_named_arg(current, diagnostics)), + Rule::named_argument => arguments.arguments.push(parse_named_arg(current, diagnostics, file_id)), // This is an unnamed arg. Rule::expression => arguments.arguments.push(ast::Argument { name: None, - value: parse_expression(current, diagnostics), - span: ast::Span::from(current_span), + value: parse_expression(current, diagnostics, file_id), + span: ast::Span::from((file_id, current_span)), }), // This is an argument without a value. // It is not valid, but we parse it for autocompletion. @@ -26,17 +31,19 @@ pub(crate) fn parse_arguments_list(token: Pair<'_>, arguments: &mut ast::Argumen .into_inner() .find(|tok| tok.as_rule() == Rule::identifier) .unwrap(); - arguments.empty_arguments.push(ast::EmptyArgument { name: name.into() }) + arguments.empty_arguments.push(ast::EmptyArgument { + name: ast::Identifier::new(name, file_id), + }) } Rule::trailing_comma => { - arguments.trailing_comma = Some(current.as_span().into()); + arguments.trailing_comma = Some((file_id, current.as_span()).into()); } _ => parsing_catch_all(¤t, "attribute arguments"), } } } -fn parse_named_arg(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> ast::Argument { +fn parse_named_arg(pair: Pair<'_>, diagnostics: &mut Diagnostics, file_id: FileId) -> ast::Argument { debug_assert_eq!(pair.as_rule(), Rule::named_argument); let mut name: Option = None; let mut argument: Option = None; @@ -44,8 +51,8 @@ fn parse_named_arg(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> ast::Argume for current in pair.into_inner() { match current.as_rule() { - Rule::identifier => name = Some(current.into()), - Rule::expression => argument = Some(parse_expression(current, diagnostics)), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), + Rule::expression => argument = Some(parse_expression(current, diagnostics, file_id)), _ => parsing_catch_all(¤t, "attribute argument"), } } @@ -54,7 +61,7 @@ fn parse_named_arg(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> ast::Argume (Some(name), Some(value)) => ast::Argument { name: Some(name), value, - span: ast::Span::from(pair_span), + span: ast::Span::from((file_id, pair_span)), }, _ => panic!("Encountered impossible attribute arg during parsing: {pair_str:?}"), } diff --git a/psl/schema-ast/src/parser/parse_attribute.rs b/psl/schema-ast/src/parser/parse_attribute.rs index 16983303097..6420d796ad6 100644 --- a/psl/schema-ast/src/parser/parse_attribute.rs +++ b/psl/schema-ast/src/parser/parse_attribute.rs @@ -3,16 +3,21 @@ use super::{ Rule, }; use crate::{ast::*, parser::parse_arguments::parse_arguments_list}; +use diagnostics::FileId; -pub(crate) fn parse_attribute(pair: Pair<'_>, diagnostics: &mut diagnostics::Diagnostics) -> Attribute { - let span = Span::from(pair.as_span()); +pub(crate) fn parse_attribute( + pair: Pair<'_>, + diagnostics: &mut diagnostics::Diagnostics, + file_id: FileId, +) -> Attribute { + let span = Span::from((file_id, pair.as_span())); let mut name = None; let mut arguments: ArgumentsList = ArgumentsList::default(); for current in pair.into_inner() { match current.as_rule() { - Rule::path => name = Some(current.into()), - Rule::arguments_list => parse_arguments_list(current, &mut arguments, diagnostics), + Rule::path => name = Some(Identifier::new(current, file_id)), + Rule::arguments_list => parse_arguments_list(current, &mut arguments, diagnostics, file_id), _ => parsing_catch_all(¤t, "attribute"), } } diff --git a/psl/schema-ast/src/parser/parse_composite_type.rs b/psl/schema-ast/src/parser/parse_composite_type.rs index 6ada40e61e1..28873fbf701 100644 --- a/psl/schema-ast/src/parser/parse_composite_type.rs +++ b/psl/schema-ast/src/parser/parse_composite_type.rs @@ -6,12 +6,13 @@ use super::{ Rule, }; use crate::ast; -use diagnostics::{DatamodelError, Diagnostics, Span}; +use diagnostics::{DatamodelError, Diagnostics, FileId, Span}; pub(crate) fn parse_composite_type( pair: Pair<'_>, doc_comment: Option>, diagnostics: &mut Diagnostics, + file_id: FileId, ) -> ast::CompositeType { let pair_span = pair.as_span(); let mut name: Option = None; @@ -22,53 +23,53 @@ pub(crate) fn parse_composite_type( match current.as_rule() { Rule::BLOCK_OPEN | Rule::BLOCK_CLOSE => {} Rule::TYPE_KEYWORD => (), - Rule::identifier => name = Some(current.into()), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), Rule::model_contents => { let mut pending_field_comment: Option> = None; - inner_span = Some(current.as_span().into()); + inner_span = Some((file_id, current.as_span()).into()); for item in current.into_inner() { let current_span = item.as_span(); match item.as_rule() { Rule::block_attribute => { - let attr = parse_attribute(item, diagnostics); + let attr = parse_attribute(item, diagnostics, file_id); let err = match attr.name.name.as_str() { "map" => { DatamodelError::new_validation_error( "The name of a composite type is not persisted in the database, therefore it does not need a mapped database name.", - current_span.into(), + (file_id, current_span).into(), ) } "unique" => { DatamodelError::new_validation_error( "A unique constraint should be defined in the model containing the embed.", - current_span.into(), + (file_id, current_span).into(), ) } "index" => { DatamodelError::new_validation_error( "An index should be defined in the model containing the embed.", - current_span.into(), + (file_id, current_span).into(), ) } "fulltext" => { DatamodelError::new_validation_error( "A fulltext index should be defined in the model containing the embed.", - current_span.into(), + (file_id, current_span).into(), ) } "id" => { DatamodelError::new_validation_error( "A composite type cannot define an id.", - current_span.into(), + (file_id, current_span).into(), ) } _ => { DatamodelError::new_validation_error( "A composite type cannot have block-level attributes.", - current_span.into(), + (file_id, current_span).into(), ) } }; @@ -81,6 +82,7 @@ pub(crate) fn parse_composite_type( item, pending_field_comment.take(), diagnostics, + file_id, ) { Ok(field) => { for attr in field.attributes.iter() { @@ -92,7 +94,7 @@ pub(crate) fn parse_composite_type( "Defining `@{name}` attribute for a field in a composite type is not allowed." ); - DatamodelError::new_validation_error(&msg, current_span.into()) + DatamodelError::new_validation_error(&msg, (file_id, current_span).into()) } _ => continue, }; @@ -107,7 +109,7 @@ pub(crate) fn parse_composite_type( Rule::comment_block => pending_field_comment = Some(item), Rule::BLOCK_LEVEL_CATCH_ALL => diagnostics.push_error(DatamodelError::new_validation_error( "This line is not a valid field or attribute definition.", - item.as_span().into(), + (file_id, item.as_span()).into(), )), _ => parsing_catch_all(&item, "composite type"), } @@ -122,7 +124,7 @@ pub(crate) fn parse_composite_type( name, fields, documentation: doc_comment.and_then(parse_comment_block), - span: ast::Span::from(pair_span), + span: ast::Span::from((file_id, pair_span)), inner_span: inner_span.unwrap(), }, _ => panic!("Encountered impossible model declaration during parsing",), diff --git a/psl/schema-ast/src/parser/parse_enum.rs b/psl/schema-ast/src/parser/parse_enum.rs index 5e5109de1a9..2dc1f8e7e3f 100644 --- a/psl/schema-ast/src/parser/parse_enum.rs +++ b/psl/schema-ast/src/parser/parse_enum.rs @@ -4,10 +4,15 @@ use super::{ parse_comments::*, Rule, }; -use crate::ast::{Attribute, Comment, Enum, EnumValue, Identifier}; -use diagnostics::{DatamodelError, Diagnostics, Span}; +use crate::ast::{self, Attribute, Comment, Enum, EnumValue, Identifier}; +use diagnostics::{DatamodelError, Diagnostics, FileId, Span}; -pub fn parse_enum(pair: Pair<'_>, doc_comment: Option>, diagnostics: &mut Diagnostics) -> Enum { +pub fn parse_enum( + pair: Pair<'_>, + doc_comment: Option>, + diagnostics: &mut Diagnostics, + file_id: FileId, +) -> Enum { let comment: Option = doc_comment.and_then(parse_comment_block); let pair_span = pair.as_span(); let mut name: Option = None; @@ -19,16 +24,16 @@ pub fn parse_enum(pair: Pair<'_>, doc_comment: Option>, diagnostics: &m for current in pairs { match current.as_rule() { Rule::BLOCK_OPEN | Rule::BLOCK_CLOSE | Rule::ENUM_KEYWORD => {} - Rule::identifier => name = Some(current.into()), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), Rule::enum_contents => { let mut pending_value_comment = None; - inner_span = Some(current.as_span().into()); + inner_span = Some((file_id, current.as_span()).into()); for item in current.into_inner() { match item.as_rule() { - Rule::block_attribute => attributes.push(parse_attribute(item, diagnostics)), + Rule::block_attribute => attributes.push(parse_attribute(item, diagnostics, file_id)), Rule::enum_value_declaration => { - match parse_enum_value(item, pending_value_comment.take(), diagnostics) { + match parse_enum_value(item, pending_value_comment.take(), diagnostics, file_id) { Ok(enum_value) => values.push(enum_value), Err(err) => diagnostics.push_error(err), } @@ -36,7 +41,7 @@ pub fn parse_enum(pair: Pair<'_>, doc_comment: Option>, diagnostics: &m Rule::comment_block => pending_value_comment = Some(item), Rule::BLOCK_LEVEL_CATCH_ALL => diagnostics.push_error(DatamodelError::new_validation_error( "This line is not an enum value definition.", - item.as_span().into(), + (file_id, item.as_span()).into(), )), _ => parsing_catch_all(&item, "enum"), } @@ -52,7 +57,7 @@ pub fn parse_enum(pair: Pair<'_>, doc_comment: Option>, diagnostics: &m values, attributes, documentation: comment, - span: Span::from(pair_span), + span: Span::from((file_id, pair_span)), inner_span: inner_span.unwrap(), }, _ => panic!("Encountered impossible enum declaration during parsing, name is missing.",), @@ -63,6 +68,7 @@ fn parse_enum_value( pair: Pair<'_>, doc_comment: Option>, diagnostics: &mut Diagnostics, + file_id: FileId, ) -> Result { let (pair_str, pair_span) = (pair.as_str(), pair.as_span()); let mut name: Option = None; @@ -71,8 +77,8 @@ fn parse_enum_value( for current in pair.into_inner() { match current.as_rule() { - Rule::identifier => name = Some(current.into()), - Rule::field_attribute => attributes.push(parse_attribute(current, diagnostics)), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), + Rule::field_attribute => attributes.push(parse_attribute(current, diagnostics, file_id)), Rule::trailing_comment => { comment = match (comment, parse_trailing_comment(current)) { (None, a) | (a, None) => a, @@ -93,7 +99,7 @@ fn parse_enum_value( name, attributes, documentation: comment, - span: Span::from(pair_span), + span: Span::from((file_id, pair_span)), }), _ => panic!("Encountered impossible enum value declaration during parsing, name is missing: {pair_str:?}",), } diff --git a/psl/schema-ast/src/parser/parse_expression.rs b/psl/schema-ast/src/parser/parse_expression.rs index c5a9b68b17f..f252bbbc41b 100644 --- a/psl/schema-ast/src/parser/parse_expression.rs +++ b/psl/schema-ast/src/parser/parse_expression.rs @@ -4,17 +4,21 @@ use super::{ Rule, }; use crate::ast::*; -use diagnostics::{DatamodelError, Diagnostics}; +use diagnostics::{DatamodelError, Diagnostics, FileId}; -pub(crate) fn parse_expression(token: Pair<'_>, diagnostics: &mut diagnostics::Diagnostics) -> Expression { +pub(crate) fn parse_expression( + token: Pair<'_>, + diagnostics: &mut diagnostics::Diagnostics, + file_id: FileId, +) -> Expression { let first_child = token.into_inner().next().unwrap(); - let span = Span::from(first_child.as_span()); + let span = Span::from((file_id, first_child.as_span())); match first_child.as_rule() { Rule::numeric_literal => Expression::NumericValue(first_child.as_str().to_string(), span), - Rule::string_literal => Expression::StringValue(parse_string_literal(first_child, diagnostics), span), + Rule::string_literal => Expression::StringValue(parse_string_literal(first_child, diagnostics, file_id), span), Rule::path => Expression::ConstantValue(first_child.as_str().to_string(), span), - Rule::function_call => parse_function(first_child, diagnostics), - Rule::array_expression => parse_array(first_child, diagnostics), + Rule::function_call => parse_function(first_child, diagnostics, file_id), + Rule::array_expression => parse_array(first_child, diagnostics, file_id), _ => unreachable!( "Encountered impossible literal during parsing: {:?}", first_child.tokens() @@ -22,7 +26,7 @@ pub(crate) fn parse_expression(token: Pair<'_>, diagnostics: &mut diagnostics::D } } -fn parse_function(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> Expression { +fn parse_function(pair: Pair<'_>, diagnostics: &mut Diagnostics, file_id: FileId) -> Expression { let mut name: Option = None; let mut arguments = ArgumentsList::default(); let (pair_str, span) = (pair.as_str(), pair.as_span()); @@ -30,32 +34,32 @@ fn parse_function(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> Expression { for current in pair.into_inner() { match current.as_rule() { Rule::path => name = Some(current.as_str().to_string()), - Rule::arguments_list => parse_arguments_list(current, &mut arguments, diagnostics), + Rule::arguments_list => parse_arguments_list(current, &mut arguments, diagnostics, file_id), _ => parsing_catch_all(¤t, "function"), } } match name { - Some(name) => Expression::Function(name, arguments, Span::from(span)), + Some(name) => Expression::Function(name, arguments, Span::from((file_id, span))), _ => unreachable!("Encountered impossible function during parsing: {:?}", pair_str), } } -fn parse_array(token: Pair<'_>, diagnostics: &mut Diagnostics) -> Expression { +fn parse_array(token: Pair<'_>, diagnostics: &mut Diagnostics, file_id: FileId) -> Expression { let mut elements: Vec = vec![]; let span = token.as_span(); for current in token.into_inner() { match current.as_rule() { - Rule::expression => elements.push(parse_expression(current, diagnostics)), + Rule::expression => elements.push(parse_expression(current, diagnostics, file_id)), _ => parsing_catch_all(¤t, "array"), } } - Expression::Array(elements, Span::from(span)) + Expression::Array(elements, Span::from((file_id, span))) } -fn parse_string_literal(token: Pair<'_>, diagnostics: &mut Diagnostics) -> String { +fn parse_string_literal(token: Pair<'_>, diagnostics: &mut Diagnostics, file_id: FileId) -> String { assert!(token.as_rule() == Rule::string_literal); let contents = token.clone().into_inner().next().unwrap(); let contents_str = contents.as_str(); @@ -98,6 +102,7 @@ fn parse_string_literal(token: Pair<'_>, diagnostics: &mut Diagnostics) -> Strin &contents_str[start..], contents.as_span().start() + start, diagnostics, + file_id, ); if let Some(char) = char { @@ -109,7 +114,7 @@ fn parse_string_literal(token: Pair<'_>, diagnostics: &mut Diagnostics) -> Strin } } (_, c) => { - let mut final_span: crate::ast::Span = contents.as_span().into(); + let mut final_span: crate::ast::Span = (file_id, contents.as_span()).into(); final_span.start += start; final_span.end = final_span.start + 1 + c.len_utf8(); diagnostics.push_error(DatamodelError::new_static( @@ -132,11 +137,13 @@ fn try_parse_unicode_codepoint( slice: &str, slice_offset: usize, diagnostics: &mut Diagnostics, + file_id: FileId, ) -> (usize, Option) { let unicode_sequence_error = |consumed| { let span = crate::ast::Span { start: slice_offset, end: (slice_offset + slice.len()).min(slice_offset + consumed), + file_id, }; DatamodelError::new_static("Invalid unicode escape sequence.", span) }; diff --git a/psl/schema-ast/src/parser/parse_field.rs b/psl/schema-ast/src/parser/parse_field.rs index 6f11da80aaf..488a315b66b 100644 --- a/psl/schema-ast/src/parser/parse_field.rs +++ b/psl/schema-ast/src/parser/parse_field.rs @@ -5,8 +5,8 @@ use super::{ parse_types::parse_field_type, Rule, }; -use crate::ast::*; -use diagnostics::{DatamodelError, Diagnostics}; +use crate::ast::{self, *}; +use diagnostics::{DatamodelError, Diagnostics, FileId}; pub(crate) fn parse_field( model_name: &str, @@ -14,6 +14,7 @@ pub(crate) fn parse_field( pair: Pair<'_>, block_comment: Option>, diagnostics: &mut Diagnostics, + file_id: FileId, ) -> Result { let pair_span = pair.as_span(); let mut name: Option = None; @@ -23,15 +24,15 @@ pub(crate) fn parse_field( for current in pair.into_inner() { match current.as_rule() { - Rule::identifier => name = Some(current.into()), - Rule::field_type => field_type = Some(parse_field_type(current, diagnostics)?), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), + Rule::field_type => field_type = Some(parse_field_type(current, diagnostics, file_id)?), Rule::LEGACY_COLON => { return Err(DatamodelError::new_legacy_parser_error( "Field declarations don't require a `:`.", - current.as_span().into(), + (file_id, current.as_span()).into(), )) } - Rule::field_attribute => attributes.push(parse_attribute(current, diagnostics)), + Rule::field_attribute => attributes.push(parse_attribute(current, diagnostics, file_id)), Rule::trailing_comment => { comment = match (comment, parse_trailing_comment(current)) { (c, None) | (None, c) => c, @@ -51,13 +52,13 @@ pub(crate) fn parse_field( arity, attributes, documentation: comment, - span: Span::from(pair_span), + span: Span::from((file_id, pair_span)), }), _ => Err(DatamodelError::new_model_validation_error( "This field declaration is invalid. It is either missing a name or a type.", container_type, model_name, - pair_span.into(), + (file_id, pair_span).into(), )), } } diff --git a/psl/schema-ast/src/parser/parse_model.rs b/psl/schema-ast/src/parser/parse_model.rs index f2aec884d61..549ba52c532 100644 --- a/psl/schema-ast/src/parser/parse_model.rs +++ b/psl/schema-ast/src/parser/parse_model.rs @@ -5,10 +5,15 @@ use super::{ parse_field::parse_field, Rule, }; -use crate::ast::*; -use diagnostics::{DatamodelError, Diagnostics}; +use crate::ast::{self, *}; +use diagnostics::{DatamodelError, Diagnostics, FileId}; -pub(crate) fn parse_model(pair: Pair<'_>, doc_comment: Option>, diagnostics: &mut Diagnostics) -> Model { +pub(crate) fn parse_model( + pair: Pair<'_>, + doc_comment: Option>, + diagnostics: &mut Diagnostics, + file_id: FileId, +) -> Model { let pair_span = pair.as_span(); let mut name: Option = None; let mut attributes: Vec = Vec::new(); @@ -17,19 +22,20 @@ pub(crate) fn parse_model(pair: Pair<'_>, doc_comment: Option>, diagnos for current in pair.into_inner() { match current.as_rule() { Rule::MODEL_KEYWORD | Rule::BLOCK_OPEN | Rule::BLOCK_CLOSE => {} - Rule::identifier => name = Some(current.into()), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), Rule::model_contents => { let mut pending_field_comment: Option> = None; for item in current.into_inner() { match item.as_rule() { - Rule::block_attribute => attributes.push(parse_attribute(item, diagnostics)), + Rule::block_attribute => attributes.push(parse_attribute(item, diagnostics, file_id)), Rule::field_declaration => match parse_field( &name.as_ref().unwrap().name, "model", item, pending_field_comment.take(), diagnostics, + file_id, ) { Ok(field) => fields.push(field), Err(err) => diagnostics.push_error(err), @@ -37,7 +43,7 @@ pub(crate) fn parse_model(pair: Pair<'_>, doc_comment: Option>, diagnos Rule::comment_block => pending_field_comment = Some(item), Rule::BLOCK_LEVEL_CATCH_ALL => diagnostics.push_error(DatamodelError::new_validation_error( "This line is not a valid field or attribute definition.", - item.as_span().into(), + (file_id, item.as_span()).into(), )), _ => parsing_catch_all(&item, "model"), } @@ -54,7 +60,7 @@ pub(crate) fn parse_model(pair: Pair<'_>, doc_comment: Option>, diagnos attributes, documentation: doc_comment.and_then(parse_comment_block), is_view: false, - span: Span::from(pair_span), + span: Span::from((file_id, pair_span)), }, _ => panic!("Encountered impossible model declaration during parsing",), } diff --git a/psl/schema-ast/src/parser/parse_schema.rs b/psl/schema-ast/src/parser/parse_schema.rs index 6782caab9e4..eb26a48478b 100644 --- a/psl/schema-ast/src/parser/parse_schema.rs +++ b/psl/schema-ast/src/parser/parse_schema.rs @@ -3,11 +3,11 @@ use super::{ parse_source_and_generator::parse_config_block, parse_view::parse_view, PrismaDatamodelParser, Rule, }; use crate::ast::*; -use diagnostics::{DatamodelError, Diagnostics}; +use diagnostics::{DatamodelError, Diagnostics, FileId}; use pest::Parser; /// Parse a PSL string and return its AST. -pub fn parse_schema(datamodel_string: &str, diagnostics: &mut Diagnostics) -> SchemaAst { +pub fn parse_schema(datamodel_string: &str, diagnostics: &mut Diagnostics, file_id: FileId) -> SchemaAst { let datamodel_result = PrismaDatamodelParser::parse(Rule::schema, datamodel_string); match datamodel_result { @@ -24,26 +24,26 @@ pub fn parse_schema(datamodel_string: &str, diagnostics: &mut Diagnostics) -> Sc match keyword.as_rule() { Rule::TYPE_KEYWORD => { - top_level_definitions.push(Top::CompositeType(parse_composite_type(current, pending_block_comment.take(), diagnostics))) + top_level_definitions.push(Top::CompositeType(parse_composite_type(current, pending_block_comment.take(), diagnostics, file_id))) } Rule::MODEL_KEYWORD => { - top_level_definitions.push(Top::Model(parse_model(current, pending_block_comment.take(), diagnostics))) + top_level_definitions.push(Top::Model(parse_model(current, pending_block_comment.take(), diagnostics, file_id))) } Rule::VIEW_KEYWORD => { - top_level_definitions.push(Top::Model(parse_view(current, pending_block_comment.take(), diagnostics))) + top_level_definitions.push(Top::Model(parse_view(current, pending_block_comment.take(), diagnostics, file_id))) } _ => unreachable!(), } }, - Rule::enum_declaration => top_level_definitions.push(Top::Enum(parse_enum(current,pending_block_comment.take(), diagnostics))), + Rule::enum_declaration => top_level_definitions.push(Top::Enum(parse_enum(current,pending_block_comment.take(), diagnostics, file_id))), Rule::config_block => { - top_level_definitions.push(parse_config_block(current, diagnostics)); + top_level_definitions.push(parse_config_block(current, diagnostics, file_id)); }, Rule::type_alias => { let error = DatamodelError::new_validation_error( "Invalid type definition. Please check the documentation in https://pris.ly/d/composite-types", - current.as_span().into() + (file_id, current.as_span()).into() ); diagnostics.push_error(error); @@ -62,12 +62,12 @@ pub fn parse_schema(datamodel_string: &str, diagnostics: &mut Diagnostics) -> Sc Rule::EOI => {} Rule::CATCH_ALL => diagnostics.push_error(DatamodelError::new_validation_error( "This line is invalid. It does not start with any known Prisma schema keyword.", - current.as_span().into(), + (file_id, current.as_span()).into(), )), // TODO: Add view when we want it to be more visible as a feature. Rule::arbitrary_block => diagnostics.push_error(DatamodelError::new_validation_error( "This block is invalid. It does not start with any known Prisma schema keyword. Valid keywords include \'model\', \'enum\', \'type\', \'datasource\' and \'generator\'.", - current.as_span().into(), + (file_id, current.as_span()).into(), )), Rule::empty_lines => (), _ => unreachable!(), @@ -89,7 +89,7 @@ pub fn parse_schema(datamodel_string: &str, diagnostics: &mut Diagnostics) -> Sc _ => panic!("Could not construct parsing error. This should never happend."), }; - diagnostics.push_error(DatamodelError::new_parser_error(expected, location.into())); + diagnostics.push_error(DatamodelError::new_parser_error(expected, (file_id, location).into())); SchemaAst { tops: Vec::new() } } diff --git a/psl/schema-ast/src/parser/parse_source_and_generator.rs b/psl/schema-ast/src/parser/parse_source_and_generator.rs index d5abb6935fc..4c8285e0b5f 100644 --- a/psl/schema-ast/src/parser/parse_source_and_generator.rs +++ b/psl/schema-ast/src/parser/parse_source_and_generator.rs @@ -4,11 +4,10 @@ use super::{ parse_expression::parse_expression, Rule, }; -use crate::ast::*; -use diagnostics::{DatamodelError, Diagnostics}; +use crate::ast::{self, *}; +use diagnostics::{DatamodelError, Diagnostics, FileId}; -#[track_caller] -pub(crate) fn parse_config_block(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> Top { +pub(crate) fn parse_config_block(pair: Pair<'_>, diagnostics: &mut Diagnostics, file_id: FileId) -> Top { let pair_span = pair.as_span(); let mut name: Option = None; let mut properties = Vec::new(); @@ -19,10 +18,10 @@ pub(crate) fn parse_config_block(pair: Pair<'_>, diagnostics: &mut Diagnostics) for current in pair.into_inner() { match current.as_rule() { Rule::config_contents => { - inner_span = Some(current.as_span().into()); + inner_span = Some((file_id, current.as_span()).into()); for item in current.into_inner() { match item.as_rule() { - Rule::key_value => properties.push(parse_key_value(item, diagnostics)), + Rule::key_value => properties.push(parse_key_value(item, diagnostics, file_id)), Rule::comment_block => comment = parse_comment_block(item), Rule::BLOCK_LEVEL_CATCH_ALL => { let msg = format!( @@ -30,14 +29,14 @@ pub(crate) fn parse_config_block(pair: Pair<'_>, diagnostics: &mut Diagnostics) kw.unwrap_or("configuration block") ); - let err = DatamodelError::new_validation_error(&msg, item.as_span().into()); + let err = DatamodelError::new_validation_error(&msg, (file_id, item.as_span()).into()); diagnostics.push_error(err); } _ => parsing_catch_all(&item, "source"), } } } - Rule::identifier => name = Some(current.into()), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), Rule::DATASOURCE_KEYWORD | Rule::GENERATOR_KEYWORD => kw = Some(current.as_str()), Rule::BLOCK_OPEN | Rule::BLOCK_CLOSE => {} @@ -50,28 +49,28 @@ pub(crate) fn parse_config_block(pair: Pair<'_>, diagnostics: &mut Diagnostics) name: name.unwrap(), properties, documentation: comment, - span: Span::from(pair_span), + span: Span::from((file_id, pair_span)), inner_span: inner_span.unwrap(), }), Some("generator") => Top::Generator(GeneratorConfig { name: name.unwrap(), properties, documentation: comment, - span: Span::from(pair_span), + span: Span::from((file_id, pair_span)), }), _ => unreachable!(), } } -fn parse_key_value(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> ConfigBlockProperty { +fn parse_key_value(pair: Pair<'_>, diagnostics: &mut Diagnostics, file_id: FileId) -> ConfigBlockProperty { let mut name: Option = None; let mut value: Option = None; let (pair_span, pair_str) = (pair.as_span(), pair.as_str()); for current in pair.into_inner() { match current.as_rule() { - Rule::identifier => name = Some(current.into()), - Rule::expression => value = Some(parse_expression(current, diagnostics)), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), + Rule::expression => value = Some(parse_expression(current, diagnostics, file_id)), Rule::trailing_comment => (), _ => unreachable!( "Encountered impossible source property declaration during parsing: {:?}", @@ -84,7 +83,7 @@ fn parse_key_value(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> ConfigBlock (Some(name), value) => ConfigBlockProperty { name, value, - span: Span::from(pair_span), + span: Span::from((file_id, pair_span)), }, _ => unreachable!( "Encountered impossible source property declaration during parsing: {:?}", diff --git a/psl/schema-ast/src/parser/parse_types.rs b/psl/schema-ast/src/parser/parse_types.rs index 7629ae636f8..d22cfe986fd 100644 --- a/psl/schema-ast/src/parser/parse_types.rs +++ b/psl/schema-ast/src/parser/parse_types.rs @@ -1,47 +1,48 @@ use super::{helpers::Pair, Rule}; use crate::{ast::*, parser::parse_expression::parse_expression}; -use diagnostics::{DatamodelError, Diagnostics}; +use diagnostics::{DatamodelError, Diagnostics, FileId}; pub fn parse_field_type( pair: Pair<'_>, diagnostics: &mut Diagnostics, + file_id: FileId, ) -> Result<(FieldArity, FieldType), DatamodelError> { assert!(pair.as_rule() == Rule::field_type); let current = pair.into_inner().next().unwrap(); match current.as_rule() { Rule::optional_type => Ok(( FieldArity::Optional, - parse_base_type(current.into_inner().next().unwrap(), diagnostics), + parse_base_type(current.into_inner().next().unwrap(), diagnostics, file_id), )), - Rule::base_type => Ok((FieldArity::Required, parse_base_type(current, diagnostics))), + Rule::base_type => Ok((FieldArity::Required, parse_base_type(current, diagnostics, file_id))), Rule::list_type => Ok(( FieldArity::List, - parse_base_type(current.into_inner().next().unwrap(), diagnostics), + parse_base_type(current.into_inner().next().unwrap(), diagnostics, file_id), )), Rule::legacy_required_type => Err(DatamodelError::new_legacy_parser_error( "Fields are required by default, `!` is no longer required.", - current.as_span().into(), + (file_id, current.as_span()).into(), )), Rule::legacy_list_type => Err(DatamodelError::new_legacy_parser_error( "To specify a list, please use `Type[]` instead of `[Type]`.", - current.as_span().into(), + (file_id, current.as_span()).into(), )), Rule::unsupported_optional_list_type => Err(DatamodelError::new_legacy_parser_error( "Optional lists are not supported. Use either `Type[]` or `Type?`.", - current.as_span().into(), + (file_id, current.as_span()).into(), )), _ => unreachable!("Encountered impossible field during parsing: {:?}", current.tokens()), } } -fn parse_base_type(pair: Pair<'_>, diagnostics: &mut Diagnostics) -> FieldType { +fn parse_base_type(pair: Pair<'_>, diagnostics: &mut Diagnostics, file_id: FileId) -> FieldType { let current = pair.into_inner().next().unwrap(); match current.as_rule() { Rule::identifier => FieldType::Supported(Identifier { name: current.as_str().to_string(), - span: Span::from(current.as_span()), + span: Span::from((file_id, current.as_span())), }), - Rule::unsupported_type => match parse_expression(current, diagnostics) { + Rule::unsupported_type => match parse_expression(current, diagnostics, file_id) { Expression::StringValue(lit, span) => FieldType::Unsupported(lit, span), _ => unreachable!("Encountered impossible type during parsing"), }, diff --git a/psl/schema-ast/src/parser/parse_view.rs b/psl/schema-ast/src/parser/parse_view.rs index 38066067b7a..546c6e775c6 100644 --- a/psl/schema-ast/src/parser/parse_view.rs +++ b/psl/schema-ast/src/parser/parse_view.rs @@ -6,9 +6,14 @@ use super::{ Rule, }; use crate::ast::{self, Attribute}; -use diagnostics::{DatamodelError, Diagnostics}; +use diagnostics::{DatamodelError, Diagnostics, FileId}; -pub(crate) fn parse_view(pair: Pair<'_>, doc_comment: Option>, diagnostics: &mut Diagnostics) -> ast::Model { +pub(crate) fn parse_view( + pair: Pair<'_>, + doc_comment: Option>, + diagnostics: &mut Diagnostics, + file_id: FileId, +) -> ast::Model { let pair_span = pair.as_span(); let mut name: Option = None; let mut fields: Vec = vec![]; @@ -17,19 +22,20 @@ pub(crate) fn parse_view(pair: Pair<'_>, doc_comment: Option>, diagnost for current in pair.into_inner() { match current.as_rule() { Rule::VIEW_KEYWORD | Rule::BLOCK_OPEN | Rule::BLOCK_CLOSE => (), - Rule::identifier => name = Some(current.into()), + Rule::identifier => name = Some(ast::Identifier::new(current, file_id)), Rule::model_contents => { let mut pending_field_comment: Option> = None; for item in current.into_inner() { match item.as_rule() { - Rule::block_attribute => attributes.push(parse_attribute(item, diagnostics)), + Rule::block_attribute => attributes.push(parse_attribute(item, diagnostics, file_id)), Rule::field_declaration => match parse_field( &name.as_ref().unwrap().name, "view", item, pending_field_comment.take(), diagnostics, + file_id, ) { Ok(field) => fields.push(field), Err(err) => diagnostics.push_error(err), @@ -37,7 +43,7 @@ pub(crate) fn parse_view(pair: Pair<'_>, doc_comment: Option>, diagnost Rule::comment_block => pending_field_comment = Some(item), Rule::BLOCK_LEVEL_CATCH_ALL => diagnostics.push_error(DatamodelError::new_validation_error( "This line is not a valid field or attribute definition.", - item.as_span().into(), + (file_id, item.as_span()).into(), )), _ => parsing_catch_all(&item, "view"), } @@ -54,7 +60,7 @@ pub(crate) fn parse_view(pair: Pair<'_>, doc_comment: Option>, diagnost attributes, documentation: doc_comment.and_then(parse_comment_block), is_view: true, - span: ast::Span::from(pair_span), + span: ast::Span::from((file_id, pair_span)), }, _ => panic!("Encountered impossible model declaration during parsing",), } diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs index 4f0e9aea1f2..a5b376e4fb6 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/runner/mod.rs @@ -217,7 +217,7 @@ impl Runner { } pub fn prisma_dml(&self) -> &str { - self.query_schema.internal_data_model.schema.db.source() + self.query_schema.internal_data_model.schema.db.source_assert_single() } pub fn max_bind_values(&self) -> Option { diff --git a/query-engine/query-engine-node-api/src/engine.rs b/query-engine/query-engine-node-api/src/engine.rs index 4ca524af699..d9f5314e248 100644 --- a/query-engine/query-engine-node-api/src/engine.rs +++ b/query-engine/query-engine-node-api/src/engine.rs @@ -122,7 +122,7 @@ impl QueryEngine { schema .diagnostics .to_result() - .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + .map_err(|err| ApiError::conversion(err, schema.db.source_assert_single()))?; config .resolve_datasource_urls_query_engine( @@ -130,11 +130,11 @@ impl QueryEngine { |key| env.get(key).map(ToString::to_string), ignore_env_var_errors, ) - .map_err(|err| ApiError::conversion(err, schema.db.source()))?; + .map_err(|err| ApiError::conversion(err, schema.db.source_assert_single()))?; config .validate_that_one_datasource_is_provided() - .map_err(|errors| ApiError::conversion(errors, schema.db.source()))?; + .map_err(|errors| ApiError::conversion(errors, schema.db.source_assert_single()))?; let enable_metrics = config.preview_features().contains(PreviewFeature::Metrics); let enable_tracing = config.preview_features().contains(PreviewFeature::Tracing); @@ -203,7 +203,10 @@ impl QueryEngine { builder.native.env.get(key).map(ToString::to_string) }) .map_err(|err| { - crate::error::ApiError::Conversion(err, builder.schema.db.source().to_owned()) + crate::error::ApiError::Conversion( + err, + builder.schema.db.source_assert_single().to_owned(), + ) })?; ConnectorKind::Rust { url, diff --git a/query-engine/query-structure/src/composite_type.rs b/query-engine/query-structure/src/composite_type.rs index 431c033dd19..9bbff74e129 100644 --- a/query-engine/query-structure/src/composite_type.rs +++ b/query-engine/query-structure/src/composite_type.rs @@ -1,6 +1,7 @@ use crate::{ast, Field}; +use psl::parser_database::CompositeTypeId; -pub type CompositeType = crate::Zipper; +pub type CompositeType = crate::Zipper; impl CompositeType { pub fn name(&self) -> &str { diff --git a/query-engine/query-structure/src/field/composite.rs b/query-engine/query-structure/src/field/composite.rs index 30564e5859b..aebe2b36aad 100644 --- a/query-engine/query-structure/src/field/composite.rs +++ b/query-engine/query-structure/src/field/composite.rs @@ -1,6 +1,6 @@ use crate::{parent_container::ParentContainer, CompositeType}; use psl::{ - parser_database::ScalarFieldId, + parser_database::{self as db, ScalarFieldId}, schema_ast::ast::{self, FieldArity}, }; use std::fmt::{Debug, Display}; @@ -8,7 +8,7 @@ use std::fmt::{Debug, Display}; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum CompositeFieldId { InModel(ScalarFieldId), - InCompositeType((ast::CompositeTypeId, ast::FieldId)), + InCompositeType((db::CompositeTypeId, ast::FieldId)), } pub type CompositeField = crate::Zipper; diff --git a/query-engine/query-structure/src/field/mod.rs b/query-engine/query-structure/src/field/mod.rs index 39e43f186c1..d8faf404e66 100644 --- a/query-engine/query-structure/src/field/mod.rs +++ b/query-engine/query-structure/src/field/mod.rs @@ -6,8 +6,8 @@ pub use composite::*; pub use relation::*; pub use scalar::*; -use crate::{ast, parent_container::ParentContainer, Model}; -use psl::parser_database::{walkers, ScalarType}; +use crate::{parent_container::ParentContainer, Model}; +use psl::parser_database::{walkers, EnumId, ScalarType}; use std::{borrow::Cow, hash::Hash}; #[derive(Debug, Clone, PartialEq, Eq, Hash)] @@ -143,7 +143,7 @@ pub enum TypeIdentifier { Float, Decimal, Boolean, - Enum(ast::EnumId), + Enum(EnumId), UUID, Json, DateTime, diff --git a/query-engine/query-structure/src/field/scalar.rs b/query-engine/query-structure/src/field/scalar.rs index becd438db27..c03ada0a9b7 100644 --- a/query-engine/query-structure/src/field/scalar.rs +++ b/query-engine/query-structure/src/field/scalar.rs @@ -1,7 +1,7 @@ use crate::{ast, parent_container::ParentContainer, prelude::*, DefaultKind, NativeTypeInstance, ValueGenerator}; use chrono::{DateTime, FixedOffset}; use psl::{ - parser_database::{walkers, ScalarFieldType, ScalarType}, + parser_database::{self as db, walkers, ScalarFieldType, ScalarType}, schema_ast::ast::FieldArity, }; use std::fmt::{Debug, Display}; @@ -12,7 +12,7 @@ pub type ScalarFieldRef = ScalarField; #[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)] pub enum ScalarFieldId { InModel(psl::parser_database::ScalarFieldId), - InCompositeType((ast::CompositeTypeId, ast::FieldId)), + InCompositeType((db::CompositeTypeId, ast::FieldId)), } impl ScalarField { diff --git a/query-engine/query-structure/src/internal_data_model.rs b/query-engine/query-structure/src/internal_data_model.rs index 70f8761cbdc..ce8dd059fa0 100644 --- a/query-engine/query-structure/src/internal_data_model.rs +++ b/query-engine/query-structure/src/internal_data_model.rs @@ -1,5 +1,5 @@ use crate::{prelude::*, CompositeType, InternalEnum}; -use psl::schema_ast::ast; +use psl::parser_database as db; use std::sync::Arc; pub(crate) type InternalDataModelRef = InternalDataModel; @@ -52,11 +52,11 @@ impl InternalDataModel { .ok_or_else(|| DomainError::ModelNotFound { name: name.to_string() }) } - pub fn find_composite_type_by_id(&self, ctid: ast::CompositeTypeId) -> CompositeType { + pub fn find_composite_type_by_id(&self, ctid: db::CompositeTypeId) -> CompositeType { self.clone().zip(ctid) } - pub fn find_model_by_id(&self, model_id: ast::ModelId) -> Model { + pub fn find_model_by_id(&self, model_id: db::ModelId) -> Model { self.clone().zip(model_id) } diff --git a/query-engine/query-structure/src/internal_enum.rs b/query-engine/query-structure/src/internal_enum.rs index 6467adcebf6..13dfd7206dc 100644 --- a/query-engine/query-structure/src/internal_enum.rs +++ b/query-engine/query-structure/src/internal_enum.rs @@ -1,9 +1,8 @@ use crate::Zipper; +use psl::{parser_database::EnumId, schema_ast::ast::EnumValueId}; -use psl::schema_ast::ast; - -pub type InternalEnum = Zipper; -pub type InternalEnumValue = Zipper; +pub type InternalEnum = Zipper; +pub type InternalEnumValue = Zipper; impl InternalEnum { pub fn name(&self) -> &str { diff --git a/query-engine/query-structure/src/model.rs b/query-engine/query-structure/src/model.rs index a2d9fa4ff46..310df1fbe6c 100644 --- a/query-engine/query-structure/src/model.rs +++ b/query-engine/query-structure/src/model.rs @@ -1,7 +1,7 @@ use crate::prelude::*; -use psl::{parser_database::walkers, schema_ast::ast}; +use psl::parser_database::{walkers, ModelId}; -pub type Model = crate::Zipper; +pub type Model = crate::Zipper; impl Model { pub fn name(&self) -> &str { diff --git a/query-engine/schema/src/build.rs b/query-engine/schema/src/build.rs index 2970be408b5..b4562757b98 100644 --- a/query-engine/schema/src/build.rs +++ b/query-engine/schema/src/build.rs @@ -16,7 +16,7 @@ pub(crate) use output_types::{mutation_type, query_type}; use self::{enum_types::*, utils::*}; use crate::*; use psl::{datamodel_connector::ConnectorCapability, PreviewFeatures}; -use query_structure::{ast, Field as ModelField, Model, RelationFieldRef, TypeIdentifier}; +use query_structure::{Field as ModelField, Model, RelationFieldRef, TypeIdentifier}; pub fn build(schema: Arc, enable_raw_queries: bool) -> QuerySchema { let preview_features = schema.configuration.preview_features(); diff --git a/query-engine/schema/src/build/enum_types.rs b/query-engine/schema/src/build/enum_types.rs index b0ddc66a638..7401732e939 100644 --- a/query-engine/schema/src/build/enum_types.rs +++ b/query-engine/schema/src/build/enum_types.rs @@ -1,6 +1,7 @@ use super::*; use crate::EnumType; use constants::{filters, itx, json_null, load_strategy, ordering}; +use psl::parser_database as db; use query_structure::prelude::ParentContainer; pub(crate) fn sort_order_enum() -> EnumType { @@ -16,7 +17,7 @@ pub(crate) fn nulls_order_enum() -> EnumType { ) } -pub(crate) fn map_schema_enum_type(ctx: &'_ QuerySchema, enum_id: ast::EnumId) -> EnumType { +pub(crate) fn map_schema_enum_type(ctx: &'_ QuerySchema, enum_id: db::EnumId) -> EnumType { let ident = Identifier::new_model(IdentifierType::Enum(ctx.internal_data_model.clone().zip(enum_id))); let schema_enum = ctx.internal_data_model.clone().zip(enum_id); diff --git a/query-engine/schema/src/output_types.rs b/query-engine/schema/src/output_types.rs index 32956d01d50..2b7a86dd516 100644 --- a/query-engine/schema/src/output_types.rs +++ b/query-engine/schema/src/output_types.rs @@ -1,7 +1,7 @@ use super::*; use fmt::Debug; use once_cell::sync::Lazy; -use query_structure::ast::ModelId; +use psl::parser_database as db; use std::{borrow::Cow, fmt}; #[derive(Debug, Clone)] @@ -120,8 +120,7 @@ pub struct ObjectType<'a> { pub(crate) fields: OutputObjectFields<'a>, // Object types can directly map to models. - pub(crate) model: Option, - _heh: (), + pub(crate) model: Option, } impl Debug for ObjectType<'_> { @@ -145,7 +144,6 @@ impl<'a> ObjectType<'a> { identifier, fields: Arc::new(lazy), model: None, - _heh: (), } } @@ -215,7 +213,7 @@ impl<'a> OutputField<'a> { } } - pub fn model(&self) -> Option { + pub fn model(&self) -> Option { self.query_info.as_ref().and_then(|info| info.model) } diff --git a/query-engine/schema/src/query_schema.rs b/query-engine/schema/src/query_schema.rs index e677b10e75a..ff25c17159f 100644 --- a/query-engine/schema/src/query_schema.rs +++ b/query-engine/schema/src/query_schema.rs @@ -2,9 +2,9 @@ use crate::{IdentifierType, ObjectType, OutputField}; use psl::{ can_support_relation_load_strategy, datamodel_connector::{Connector, ConnectorCapabilities, ConnectorCapability, JoinStrategySupport, RelationMode}, - has_capability, PreviewFeature, PreviewFeatures, + has_capability, parser_database as db, PreviewFeature, PreviewFeatures, }; -use query_structure::{ast, InternalDataModel}; +use query_structure::InternalDataModel; use std::{collections::HashMap, fmt}; #[derive(Clone, Debug, Hash, Eq, PartialEq)] @@ -218,7 +218,7 @@ impl QuerySchema { /// Designates a specific top-level operation on a corresponding model. #[derive(Debug, Clone, PartialEq, Hash, Eq)] pub struct QueryInfo { - pub model: Option, + pub model: Option, pub tag: QueryTag, } diff --git a/schema-engine/connectors/schema-connector/src/introspection_context.rs b/schema-engine/connectors/schema-connector/src/introspection_context.rs index 54f197935bd..62f116e5ca9 100644 --- a/schema-engine/connectors/schema-connector/src/introspection_context.rs +++ b/schema-engine/connectors/schema-connector/src/introspection_context.rs @@ -38,13 +38,14 @@ impl IntrospectionContext { ) -> Self { let mut config_blocks = String::new(); - for source in previous_schema.db.ast().sources() { - config_blocks.push_str(&previous_schema.db.source()[source.span.start..source.span.end]); + for source in previous_schema.db.ast_assert_single().sources() { + config_blocks.push_str(&previous_schema.db.source_assert_single()[source.span.start..source.span.end]); config_blocks.push('\n'); } - for generator in previous_schema.db.ast().generators() { - config_blocks.push_str(&previous_schema.db.source()[generator.span.start..generator.span.end]); + for generator in previous_schema.db.ast_assert_single().generators() { + config_blocks + .push_str(&previous_schema.db.source_assert_single()[generator.span.start..generator.span.end]); config_blocks.push('\n'); } @@ -70,7 +71,7 @@ impl IntrospectionContext { /// The string source of the PSL schema file. pub fn schema_string(&self) -> &str { - self.previous_schema.db.source() + self.previous_schema.db.source_assert_single() } /// The configuration block of the PSL schema file. diff --git a/schema-engine/connectors/sql-schema-connector/src/introspection/datamodel_calculator/context.rs b/schema-engine/connectors/sql-schema-connector/src/introspection/datamodel_calculator/context.rs index 32f2ed0a589..04dcfa7345d 100644 --- a/schema-engine/connectors/sql-schema-connector/src/introspection/datamodel_calculator/context.rs +++ b/schema-engine/connectors/sql-schema-connector/src/introspection/datamodel_calculator/context.rs @@ -11,7 +11,7 @@ use crate::introspection::{ use psl::{ builtin_connectors::*, datamodel_connector::Connector, - parser_database::{ast, walkers}, + parser_database::{self as db, walkers}, Configuration, PreviewFeature, }; use quaint::prelude::SqlFamily; @@ -363,11 +363,11 @@ impl<'a> DatamodelCalculatorContext<'a> { self.introspection_map.relation_names.m2m_relation_name(id) } - pub(crate) fn table_missing_for_model(&self, id: &ast::ModelId) -> bool { + pub(crate) fn table_missing_for_model(&self, id: &db::ModelId) -> bool { self.introspection_map.missing_tables_for_previous_models.contains(id) } - pub(crate) fn view_missing_for_model(&self, id: &ast::ModelId) -> bool { + pub(crate) fn view_missing_for_model(&self, id: &db::ModelId) -> bool { self.introspection_map.missing_views_for_previous_models.contains(id) } diff --git a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_map.rs b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_map.rs index 099408e1dcf..5fd5019213a 100644 --- a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_map.rs +++ b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_map.rs @@ -7,7 +7,7 @@ use crate::introspection::{ introspection_pair::RelationFieldDirection, sanitize_datamodel_names, }; use psl::{ - parser_database::{self, ast, ScalarFieldId}, + parser_database::{self as db, ScalarFieldId}, PreviewFeature, }; use relation_names::RelationNames; @@ -24,15 +24,15 @@ pub(crate) use relation_names::RelationName; /// schema. #[derive(Default)] pub(crate) struct IntrospectionMap<'a> { - pub(crate) existing_enums: HashMap, - pub(crate) existing_models: HashMap, - pub(crate) existing_views: HashMap, - pub(crate) missing_tables_for_previous_models: HashSet, - pub(crate) missing_views_for_previous_models: HashSet, + pub(crate) existing_enums: HashMap, + pub(crate) existing_models: HashMap, + pub(crate) existing_views: HashMap, + pub(crate) missing_tables_for_previous_models: HashSet, + pub(crate) missing_views_for_previous_models: HashSet, pub(crate) existing_model_scalar_fields: HashMap, pub(crate) existing_view_scalar_fields: HashMap, - pub(crate) existing_inline_relations: HashMap, - pub(crate) existing_m2m_relations: HashMap, + pub(crate) existing_inline_relations: HashMap, + pub(crate) existing_m2m_relations: HashMap, pub(crate) relation_names: RelationNames<'a>, pub(crate) inline_relation_positions: Vec<(sql::TableId, sql::ForeignKeyId, RelationFieldDirection)>, pub(crate) m2m_relation_positions: Vec<(sql::TableId, sql::ForeignKeyId, RelationFieldDirection)>, diff --git a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/enumerator.rs b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/enumerator.rs index b14c2c51ea3..29fff1f18c3 100644 --- a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/enumerator.rs +++ b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/enumerator.rs @@ -1,8 +1,8 @@ use super::IntrospectionPair; use crate::introspection::sanitize_datamodel_names::{EnumVariantName, ModelName}; use psl::{ - parser_database::walkers, - schema_ast::ast::{self, WithDocumentation}, + parser_database::{self as db, walkers}, + schema_ast::ast::WithDocumentation, }; use sql_schema_describer as sql; use std::borrow::Cow; @@ -51,7 +51,7 @@ impl<'a> EnumPair<'a> { /// The position of the enum from the PSL, if existing. Used for /// sorting the enums in the final introspected data model. - pub(crate) fn previous_position(self) -> Option { + pub(crate) fn previous_position(self) -> Option { self.previous.map(|e| e.id) } diff --git a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/model.rs b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/model.rs index 13f3b78f88e..0e907fdbefc 100644 --- a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/model.rs +++ b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/model.rs @@ -1,7 +1,7 @@ use psl::{ datamodel_connector::walker_ext_traits::IndexWalkerExt, - parser_database::walkers, - schema_ast::ast::{self, WithDocumentation}, + parser_database::{self as db, walkers}, + schema_ast::ast::WithDocumentation, }; use sql::postgres::PostgresSchemaExt; use sql_schema_describer as sql; @@ -18,7 +18,7 @@ pub(crate) type ModelPair<'a> = IntrospectionPair<'a, Option ModelPair<'a> { /// The position of the model from the PSL, if existing. Used for /// sorting the models in the final introspected data model. - pub(crate) fn previous_position(self) -> Option { + pub(crate) fn previous_position(self) -> Option { self.previous.map(|m| m.id) } diff --git a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/view.rs b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/view.rs index e5b58ebd3cf..ea7ac6cd30c 100644 --- a/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/view.rs +++ b/schema-engine/connectors/sql-schema-connector/src/introspection/introspection_pair/view.rs @@ -1,12 +1,10 @@ -use std::borrow::Cow; - +use super::{IdPair, IndexPair, IntrospectionPair, RelationFieldPair, ScalarFieldPair}; use psl::{ - parser_database::walkers, - schema_ast::ast::{self, WithDocumentation}, + parser_database::{self as db, walkers}, + schema_ast::ast::WithDocumentation, }; use sql_schema_describer as sql; - -use super::{IdPair, IndexPair, IntrospectionPair, RelationFieldPair, ScalarFieldPair}; +use std::borrow::Cow; /// Comparing a PSL view (which currently utilizes the /// model structure due to them being completely the same @@ -16,7 +14,7 @@ pub(crate) type ViewPair<'a> = IntrospectionPair<'a, Option ViewPair<'a> { /// The position of the view from the PSL, if existing. Used for /// sorting the views in the final introspected data model. - pub(crate) fn previous_position(self) -> Option { + pub(crate) fn previous_position(self) -> Option { self.previous.map(|m| m.id) } diff --git a/schema-engine/connectors/sql-schema-connector/src/introspection/rendering/enums.rs b/schema-engine/connectors/sql-schema-connector/src/introspection/rendering/enums.rs index fe8f2a96807..11c87ab7de0 100644 --- a/schema-engine/connectors/sql-schema-connector/src/introspection/rendering/enums.rs +++ b/schema-engine/connectors/sql-schema-connector/src/introspection/rendering/enums.rs @@ -5,11 +5,11 @@ use crate::introspection::{ sanitize_datamodel_names, }; use datamodel_renderer::datamodel as renderer; -use psl::parser_database::ast; +use psl::parser_database as db; /// Render all enums. pub(super) fn render<'a>(ctx: &'a DatamodelCalculatorContext<'a>, rendered: &mut renderer::Datamodel<'a>) { - let mut all_enums: Vec<(Option, renderer::Enum<'_>)> = Vec::new(); + let mut all_enums: Vec<(Option, renderer::Enum<'_>)> = Vec::new(); for pair in ctx.enum_pairs() { all_enums.push((pair.previous_position(), render_enum(pair))) diff --git a/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator.rs b/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator.rs index 3b36829cfcf..5ef3bb69529 100644 --- a/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator.rs +++ b/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator.rs @@ -6,7 +6,7 @@ use crate::{flavour::SqlFlavour, SqlDatabaseSchema}; use psl::{ datamodel_connector::walker_ext_traits::*, parser_database::{ - ast, + self as db, ast, walkers::{ModelWalker, ScalarFieldWalker}, ReferentialAction, ScalarFieldType, ScalarType, SortOrder, }, @@ -61,7 +61,7 @@ fn push_model_tables(ctx: &mut Context<'_>) { .schema .describer_schema .push_table(model.database_name().to_owned(), namespace_id, None); - ctx.model_id_to_table_id.insert(model.model_id(), table_id); + ctx.model_id_to_table_id.insert(model.id, table_id); for field in model.scalar_fields() { push_column_for_scalar_field(field, table_id, ctx); @@ -138,8 +138,8 @@ fn push_inline_relations(ctx: &mut Context<'_>) { let relation_field = relation .forward_relation_field() .expect("Expecting a complete relation in sql_schmea_calculator"); - let referencing_model = ctx.model_id_to_table_id[&relation_field.model().model_id()]; - let referenced_model = ctx.model_id_to_table_id[&relation.referenced_model().model_id()]; + let referencing_model = ctx.model_id_to_table_id[&relation_field.model().id]; + let referenced_model = ctx.model_id_to_table_id[&relation.referenced_model().id]; let on_delete_action = relation_field.explicit_on_delete().unwrap_or_else(|| { relation_field.default_on_delete_action( ctx.datamodel.configuration.relation_mode().unwrap_or_default(), @@ -193,9 +193,9 @@ fn push_relation_tables(ctx: &mut Context<'_>) { .take(datamodel.configuration.max_identifier_length()) .collect::(); let model_a = m2m.model_a(); - let model_a_table_id = ctx.model_id_to_table_id[&model_a.model_id()]; + let model_a_table_id = ctx.model_id_to_table_id[&model_a.id]; let model_b = m2m.model_b(); - let model_b_table_id = ctx.model_id_to_table_id[&model_b.model_id()]; + let model_b_table_id = ctx.model_id_to_table_id[&model_b.id]; let model_a_column = m2m.column_a_name(); let model_b_column = m2m.column_b_name(); let model_a_id = model_a.primary_key().unwrap().fields().next().unwrap(); @@ -300,7 +300,7 @@ fn push_relation_tables(ctx: &mut Context<'_>) { if ctx.datamodel.relation_mode().uses_foreign_keys() { let fkid = ctx.schema.describer_schema.push_foreign_key( Some(model_a_fk_name), - [table_id, ctx.model_id_to_table_id[&model_a.model_id()]], + [table_id, ctx.model_id_to_table_id[&model_a.id]], [flavour.m2m_foreign_key_action(model_a, model_b); 2], ); @@ -319,7 +319,7 @@ fn push_relation_tables(ctx: &mut Context<'_>) { let fkid = ctx.schema.describer_schema.push_foreign_key( Some(model_b_fk_name), - [table_id, ctx.model_id_to_table_id[&model_b.model_id()]], + [table_id, ctx.model_id_to_table_id[&model_b.id]], [flavour.m2m_foreign_key_action(model_a, model_b); 2], ); @@ -354,7 +354,7 @@ fn push_column_for_scalar_field(field: ScalarFieldWalker<'_>, table_id: sql::Tab fn push_column_for_model_enum_scalar_field( field: ScalarFieldWalker<'_>, - enum_id: ast::EnumId, + enum_id: db::EnumId, table_id: sql::TableId, ctx: &mut Context<'_>, ) { @@ -582,8 +582,8 @@ pub(crate) struct Context<'a> { schema: &'a mut SqlDatabaseSchema, flavour: &'a dyn SqlFlavour, schemas: HashMap<&'a str, sql::NamespaceId>, - model_id_to_table_id: HashMap, - enum_ids: HashMap, + model_id_to_table_id: HashMap, + enum_ids: HashMap, } impl Context<'_> { diff --git a/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mssql.rs b/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mssql.rs index 51a8f5ef54b..7e6b94a761a 100644 --- a/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mssql.rs +++ b/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/mssql.rs @@ -27,7 +27,7 @@ impl SqlSchemaCalculatorFlavour for MssqlFlavour { let mut data = MssqlSchemaExt::default(); for model in context.datamodel.db.walk_models() { - let table_id = context.model_id_to_table_id[&model.model_id()]; + let table_id = context.model_id_to_table_id[&model.id]; let table = context.schema.walk(table_id); if model .primary_key() diff --git a/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/postgres.rs b/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/postgres.rs index 656fe432a97..c2193252be9 100644 --- a/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/postgres.rs +++ b/schema-engine/connectors/sql-schema-connector/src/sql_schema_calculator/sql_schema_calculator_flavour/postgres.rs @@ -69,7 +69,7 @@ impl SqlSchemaCalculatorFlavour for PostgresFlavour { } for model in db.walk_models() { - let table_id = context.model_id_to_table_id[&model.model_id()]; + let table_id = context.model_id_to_table_id[&model.id]; // Add index algorithms and opclasses. for index in model.indexes() { diff --git a/schema-engine/core/src/state.rs b/schema-engine/core/src/state.rs index 9143ef1fb76..c376cb300fb 100644 --- a/schema-engine/core/src/state.rs +++ b/schema-engine/core/src/state.rs @@ -177,7 +177,8 @@ impl EngineState { return Err(ConnectorError::from_msg("Missing --datamodel".to_owned())); }; - self.with_connector_for_schema(schema.db.source(), None, f).await + self.with_connector_for_schema(schema.db.source_assert_single(), None, f) + .await } } diff --git a/schema-engine/sql-introspection-tests/tests/referential_actions/mysql.rs b/schema-engine/sql-introspection-tests/tests/referential_actions/mysql.rs index 7e184686c14..3f7ec20f542 100644 --- a/schema-engine/sql-introspection-tests/tests/referential_actions/mysql.rs +++ b/schema-engine/sql-introspection-tests/tests/referential_actions/mysql.rs @@ -55,7 +55,7 @@ async fn introspect_set_default_should_warn(api: &mut TestApi) -> TestResult { let warning_messages = schema .diagnostics - .warnings_to_pretty_string("schema.prisma", schema.db.source()); + .warnings_to_pretty_string("schema.prisma", schema.db.source_assert_single()); let expected_validation = expect![[r#" warning: MySQL does not actually support the `SetDefault` referential action, so using it may result in unexpected errors. Read more at https://pris.ly/d/mysql-set-default  From 87bc6b811aafa9b7bcdb787a2b8c8b5728cbbfb0 Mon Sep 17 00:00:00 2001 From: Serhii Tatarintsev Date: Mon, 8 Apr 2024 14:20:22 +0200 Subject: [PATCH 18/25] WIP(schema-wasm): support schema split into multiple files (#4787) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Implement multi-file schema handling in PSL This commit implements multi-file schema handling in the Prisma Schema Language. At a high level, instead of accepting a single string, `psl::validate_multi_file()` is an alternative to `psl::validate()` that accepts something morally equivalent to: ```json { "./prisma/schema/a.prisma": "datasource db { ... }", "./prisma/schema/nested/b.prisma": "model Test { ... }" } ``` There are tests for PSL validation with multiple schema files, but most of the rest of engines still consumes the single file version of `psl::validate()`. The implementation and the return type are shared between `psl::validate_multi_file()` and `psl::validate()`, so the change is completely transparent, other than the expectation of passing in a list of (file_name, file_contents) instead of a single string. The `psl::validate()` entry point should behave exactly the same as `psl::multi_schema()` with a single file named `schema.prisma`. In particular, it has the exact same return type. Implementation ============== This is achieved by extending `Span` to contain, in addition to a start and end offset, a `FileId`. The `FileId` is a unique identifier for a file and its parsed `SchemaAst` inside `ParserDatabase`. The identifier types for AST items in `ParserDatabase` are also extended to contain the `FileId`, so that they can be uniquely referred to in the context of the (multi-file) schema. After the analysis phase (the `parser_database` crate), consumers of the analyzed schema become multi-file aware completely transparently, no change is necessary in the other engines. The only changes that will be required at scattered points across the codebase are the `psl::validate()` call sites that will need to receive a `Vec, SourceFile>` instead of a single `SourceFile`. This PR does _not_ deal with that, but it makes where these call sites are obvious by what entry points they use: `psl::validate()`, `psl::parse_schema()` and the various `*_assert_single()` methods on `ParserDatabase`. The PR contains tests confirming that schema analysis, validation and displaying diagnostics across multiple files works as expected. Status of this PR ================= This is going to be directly mergeable after review, and it will not affect the current schema handling behaviour when dealing with a single schema file. Next steps ========== - Replace all calls to `psl::validate()` with calls to `psl::validate_multi_file()`. - The `*_assert_single()` calls should be progressively replaced with their multi-file counterparts across engines. - The language server should start sending multiple files to prisma-schema-wasm in all calls. This is not in the spirit of the language server spec, but that is the most immediate solution. We'll have to make `range_to_span()` in `prisma-fmt` multi-schema aware by taking a FileId param. Links ===== Relevant issue: https://github.com/prisma/prisma/issues/2377 Also see the [internal design doc](https://www.notion.so/prismaio/Multi-file-Schema-24d68fe8664048ad86252fe446caac24?d=68ef128f25974e619671a9855f65f44d#2889a038e68c4fe1ac9afe3cd34978bd). * WIP(schema-wasm): Support schema split into multiple files * Reformat support (psl crate) * Add multifile reformatting tests * Clippy * feat(prisma-fmt): addd support for mergeSchemas, expose functions to prisma-fmt-wasm * chore(prisma-fmt): removed unused function * chore: fix typo Co-authored-by: Serhii Tatarintsev * feat(prisma-fmt): apply validation to merge_schemas * chore(prisma-fmt): update unit test * chore: fix bad merge * chore: fix tests --------- Co-authored-by: Tom Houlé Co-authored-by: Alberto Schiabel Co-authored-by: jkomyno --- prisma-fmt/src/code_actions/multi_schema.rs | 3 +- prisma-fmt/src/get_config.rs | 49 +++--- prisma-fmt/src/get_dmmf.rs | 47 +++++- prisma-fmt/src/lib.rs | 10 ++ prisma-fmt/src/merge_schemas.rs | 127 +++++++++++++++ prisma-fmt/src/schema_file_input.rs | 26 +++ prisma-fmt/src/validate.rs | 93 ++++++++++- prisma-schema-wasm/src/lib.rs | 6 + psl/parser-database/src/files.rs | 7 + psl/parser-database/src/lib.rs | 22 ++- psl/psl-core/src/lib.rs | 10 +- psl/psl-core/src/reformat.rs | 150 +++++++++++++----- psl/psl/build.rs | 37 ++++- psl/psl/src/lib.rs | 2 + psl/psl/tests/multi_file/basic.rs | 2 +- psl/psl/tests/panic_with_diff/mod.rs | 7 +- psl/psl/tests/reformat_tests.rs | 71 ++++++++- .../align_blocks.reformatted/User.prisma | 5 + .../align_blocks.reformatted/db.prisma | 9 ++ .../align_blocks/User.prisma | 5 + .../align_blocks/db.prisma | 9 ++ .../relation_1_to_1.reformatted/Post.prisma | 5 + .../relation_1_to_1.reformatted/User.prisma | 7 + .../relation_1_to_1.reformatted/db.prisma | 9 ++ .../relation_1_to_1/Post.prisma | 4 + .../relation_1_to_1/User.prisma | 7 + .../relation_1_to_1/db.prisma | 9 ++ .../relation_list.reformatted/Post.prisma | 6 + .../relation_list.reformatted/User.prisma | 6 + .../relation_list.reformatted/db.prisma | 9 ++ .../relation_list/Post.prisma | 4 + .../relation_list/User.prisma | 6 + .../relation_list/db.prisma | 9 ++ .../relation_single.reformatted/Post.prisma | 5 + .../relation_single.reformatted/User.prisma | 7 + .../relation_single.reformatted/db.prisma | 9 ++ .../relation_single/Post.prisma | 4 + .../relation_single/User.prisma | 6 + .../relation_single/db.prisma | 9 ++ psl/psl/tests/validation_tests.rs | 2 +- psl/schema-ast/src/reformat.rs | 2 + psl/schema-ast/src/source_file.rs | 8 + query-engine/dmmf/src/lib.rs | 6 + 43 files changed, 750 insertions(+), 86 deletions(-) create mode 100644 prisma-fmt/src/merge_schemas.rs create mode 100644 prisma-fmt/src/schema_file_input.rs create mode 100644 psl/psl/tests/reformatter_multi_file/align_blocks.reformatted/User.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/align_blocks.reformatted/db.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/align_blocks/User.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/align_blocks/db.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/Post.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/User.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/db.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_1_to_1/Post.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_1_to_1/User.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_1_to_1/db.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_list.reformatted/Post.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_list.reformatted/User.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_list.reformatted/db.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_list/Post.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_list/User.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_list/db.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_single.reformatted/Post.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_single.reformatted/User.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_single.reformatted/db.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_single/Post.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_single/User.prisma create mode 100644 psl/psl/tests/reformatter_multi_file/relation_single/db.prisma diff --git a/prisma-fmt/src/code_actions/multi_schema.rs b/prisma-fmt/src/code_actions/multi_schema.rs index 7e6aa9ceaf8..aa5aaad0517 100644 --- a/prisma-fmt/src/code_actions/multi_schema.rs +++ b/prisma-fmt/src/code_actions/multi_schema.rs @@ -147,8 +147,7 @@ pub(super) fn add_schema_to_schemas( ) } None => { - let has_properties = datasource.provider_defined() - || datasource.url_defined() + let has_properties = datasource.provider_defined() | datasource.url_defined() || datasource.direct_url_defined() || datasource.shadow_url_defined() || datasource.relation_mode_defined() diff --git a/prisma-fmt/src/get_config.rs b/prisma-fmt/src/get_config.rs index d6de194e1e8..97f714dc456 100644 --- a/prisma-fmt/src/get_config.rs +++ b/prisma-fmt/src/get_config.rs @@ -1,14 +1,14 @@ -use psl::Diagnostics; +use psl::{Diagnostics, ValidatedSchema}; use serde::Deserialize; use serde_json::json; use std::collections::HashMap; -use crate::validate::SCHEMA_PARSER_ERROR_CODE; +use crate::{schema_file_input::SchemaFileInput, validate::SCHEMA_PARSER_ERROR_CODE}; #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] struct GetConfigParams { - prisma_schema: String, + prisma_schema: SchemaFileInput, #[serde(default)] ignore_env_var_errors: bool, #[serde(default)] @@ -43,29 +43,38 @@ pub(crate) fn get_config(params: &str) -> Result { } fn get_config_impl(params: GetConfigParams) -> Result { - let wrap_get_config_err = |errors: Diagnostics| -> GetConfigError { - use std::fmt::Write as _; - - let mut full_error = errors.to_pretty_string("schema.prisma", ¶ms.prisma_schema); - write!(full_error, "\nValidation Error Count: {}", errors.errors().len()).unwrap(); - - GetConfigError { - // this mirrors user_facing_errors::common::SchemaParserError - error_code: Some(SCHEMA_PARSER_ERROR_CODE), - message: full_error, - } - }; - - let mut config = psl::parse_configuration(¶ms.prisma_schema).map_err(wrap_get_config_err)?; + let mut schema = psl::validate_multi_file(params.prisma_schema.into()); + if schema.diagnostics.has_errors() { + return Err(create_get_config_error(&schema, &schema.diagnostics)); + } if !params.ignore_env_var_errors { let overrides: Vec<(_, _)> = params.datasource_overrides.into_iter().collect(); - config + schema + .configuration .resolve_datasource_urls_prisma_fmt(&overrides, |key| params.env.get(key).map(String::from)) - .map_err(wrap_get_config_err)?; + .map_err(|diagnostics| create_get_config_error(&schema, &diagnostics))?; } - Ok(psl::get_config(&config)) + Ok(psl::get_config(&schema.configuration)) +} + +fn create_get_config_error(schema: &ValidatedSchema, diagnostics: &Diagnostics) -> GetConfigError { + use std::fmt::Write as _; + + let mut rendered_diagnostics = schema.render_diagnostics(diagnostics); + write!( + rendered_diagnostics, + "\nValidation Error Count: {}", + diagnostics.errors().len() + ) + .unwrap(); + + GetConfigError { + // this mirrors user_facing_errors::common::SchemaParserError + error_code: Some(SCHEMA_PARSER_ERROR_CODE), + message: rendered_diagnostics, + } } #[cfg(test)] diff --git a/prisma-fmt/src/get_dmmf.rs b/prisma-fmt/src/get_dmmf.rs index 02eec126d17..151cb7691ee 100644 --- a/prisma-fmt/src/get_dmmf.rs +++ b/prisma-fmt/src/get_dmmf.rs @@ -1,11 +1,11 @@ use serde::Deserialize; -use crate::validate; +use crate::{schema_file_input::SchemaFileInput, validate}; #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] struct GetDmmfParams { - prisma_schema: String, + prisma_schema: SchemaFileInput, #[serde(default)] no_color: bool, } @@ -18,7 +18,7 @@ pub(crate) fn get_dmmf(params: &str) -> Result { } }; - validate::run(¶ms.prisma_schema, params.no_color).map(|_| dmmf::dmmf_json_from_schema(¶ms.prisma_schema)) + validate::run(params.prisma_schema, params.no_color).map(dmmf::dmmf_json_from_validated_schema) } #[cfg(test)] @@ -90,6 +90,47 @@ mod tests { expected.assert_eq(&response); } + #[test] + fn get_dmmf_multiple_files() { + let schema = vec![ + ( + "a.prisma", + r#" + datasource thedb { + provider = "postgresql" + url = env("DBURL") + } + + model A { + id String @id + b_id String @unique + b B @relation(fields: [b_id], references: [id]) + } + "#, + ), + ( + "b.prisma", + r#" + model B { + id String @id + a A? + } + "#, + ), + ]; + + let request = json!({ + "prismaSchema": schema, + }); + + let expected = expect![[ + r#"{"datamodel":{"enums":[],"models":[{"name":"A","dbName":null,"fields":[{"name":"id","kind":"scalar","isList":false,"isRequired":true,"isUnique":false,"isId":true,"isReadOnly":false,"hasDefaultValue":false,"type":"String","isGenerated":false,"isUpdatedAt":false},{"name":"b_id","kind":"scalar","isList":false,"isRequired":true,"isUnique":true,"isId":false,"isReadOnly":true,"hasDefaultValue":false,"type":"String","isGenerated":false,"isUpdatedAt":false},{"name":"b","kind":"object","isList":false,"isRequired":true,"isUnique":false,"isId":false,"isReadOnly":false,"hasDefaultValue":false,"type":"B","relationName":"AToB","relationFromFields":["b_id"],"relationToFields":["id"],"isGenerated":false,"isUpdatedAt":false}],"primaryKey":null,"uniqueFields":[],"uniqueIndexes":[],"isGenerated":false},{"name":"B","dbName":null,"fields":[{"name":"id","kind":"scalar","isList":false,"isRequired":true,"isUnique":false,"isId":true,"isReadOnly":false,"hasDefaultValue":false,"type":"String","isGenerated":false,"isUpdatedAt":false},{"name":"a","kind":"object","isList":false,"isRequired":false,"isUnique":false,"isId":false,"isReadOnly":false,"hasDefaultValue":false,"type":"A","relationName":"AToB","relationFromFields":[],"relationToFields":[],"isGenerated":false,"isUpdatedAt":false}],"primaryKey":null,"uniqueFields":[],"uniqueIndexes":[],"isGenerated":false}],"types":[]},"schema":{"inputObjectTypes":{"prisma":[{"name":"AWhereInput","meta":{"source":"A"},"constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"AND","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"OR","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"NOT","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"StringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"String","location":"scalar","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"StringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"String","location":"scalar","isList":false}]},{"name":"b","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BRelationFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AOrderByWithRelationInput","constraints":{"maxNumFields":1,"minNumFields":0},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"b","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AWhereUniqueInput","meta":{"source":"A"},"constraints":{"maxNumFields":null,"minNumFields":1,"fields":["id","b_id"]},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"AND","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"OR","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"NOT","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"b","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BRelationFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AOrderByWithAggregationInput","constraints":{"maxNumFields":1,"minNumFields":0},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"_count","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACountOrderByAggregateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_max","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AMaxOrderByAggregateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_min","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AMinOrderByAggregateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AScalarWhereWithAggregatesInput","meta":{"source":"A"},"constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"AND","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"OR","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"NOT","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"StringWithAggregatesFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"String","location":"scalar","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"StringWithAggregatesFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"String","location":"scalar","isList":false}]}]},{"name":"BWhereInput","meta":{"source":"B"},"constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"AND","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"OR","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"NOT","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"StringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"String","location":"scalar","isList":false}]},{"name":"a","isRequired":false,"isNullable":true,"inputTypes":[{"type":"ANullableRelationFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"Null","location":"scalar","isList":false}]}]},{"name":"BOrderByWithRelationInput","constraints":{"maxNumFields":1,"minNumFields":0},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"a","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BWhereUniqueInput","meta":{"source":"B"},"constraints":{"maxNumFields":null,"minNumFields":1,"fields":["id"]},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"AND","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"OR","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"NOT","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"a","isRequired":false,"isNullable":true,"inputTypes":[{"type":"ANullableRelationFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"Null","location":"scalar","isList":false}]}]},{"name":"BOrderByWithAggregationInput","constraints":{"maxNumFields":1,"minNumFields":0},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"_count","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BCountOrderByAggregateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_max","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BMaxOrderByAggregateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_min","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BMinOrderByAggregateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BScalarWhereWithAggregatesInput","meta":{"source":"B"},"constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"AND","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"OR","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"NOT","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"StringWithAggregatesFilter","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"String","location":"scalar","isList":false}]}]},{"name":"ACreateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"b","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BCreateNestedOneWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUncheckedCreateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"b_id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"AUpdateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"b","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BUpdateOneRequiredWithoutANestedInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUncheckedUpdateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"ACreateManyInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"b_id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"AUpdateManyMutationInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUncheckedUpdateManyInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BCreateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"a","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateNestedOneWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUncheckedCreateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"a","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AUncheckedCreateNestedOneWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUpdateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"a","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AUpdateOneWithoutBNestedInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUncheckedUpdateInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"a","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AUncheckedUpdateOneWithoutBNestedInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BCreateManyInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"BUpdateManyMutationInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUncheckedUpdateManyInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"StringFilter","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"equals","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"in","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"notIn","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"contains","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"startsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"endsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"mode","isRequired":false,"isNullable":false,"inputTypes":[{"type":"QueryMode","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"not","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"NestedStringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BRelationFilter","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"is","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"isNot","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"ACountOrderByAggregateInput","constraints":{"maxNumFields":1,"minNumFields":1},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]}]},{"name":"AMaxOrderByAggregateInput","constraints":{"maxNumFields":1,"minNumFields":1},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]}]},{"name":"AMinOrderByAggregateInput","constraints":{"maxNumFields":1,"minNumFields":1},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"b_id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]}]},{"name":"StringWithAggregatesFilter","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"equals","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"in","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"notIn","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"contains","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"startsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"endsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"mode","isRequired":false,"isNullable":false,"inputTypes":[{"type":"QueryMode","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"not","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"NestedStringWithAggregatesFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_count","isRequired":false,"isNullable":false,"inputTypes":[{"type":"NestedIntFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_min","isRequired":false,"isNullable":false,"inputTypes":[{"type":"NestedStringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_max","isRequired":false,"isNullable":false,"inputTypes":[{"type":"NestedStringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"ANullableRelationFilter","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"is","isRequired":false,"isNullable":true,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"Null","location":"scalar","isList":false}]},{"name":"isNot","isRequired":false,"isNullable":true,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"Null","location":"scalar","isList":false}]}]},{"name":"BCountOrderByAggregateInput","constraints":{"maxNumFields":1,"minNumFields":1},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]}]},{"name":"BMaxOrderByAggregateInput","constraints":{"maxNumFields":1,"minNumFields":1},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]}]},{"name":"BMinOrderByAggregateInput","constraints":{"maxNumFields":1,"minNumFields":1},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"SortOrder","namespace":"prisma","location":"enumTypes","isList":false}]}]},{"name":"BCreateNestedOneWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"create","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connectOrCreate","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BCreateOrConnectWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"StringFieldUpdateOperationsInput","constraints":{"maxNumFields":1,"minNumFields":1},"fields":[{"name":"set","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"BUpdateOneRequiredWithoutANestedInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"create","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connectOrCreate","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BCreateOrConnectWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"upsert","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BUpsertWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"update","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BUpdateToOneWithWhereWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUpdateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedUpdateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"ACreateNestedOneWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"create","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connectOrCreate","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateOrConnectWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUncheckedCreateNestedOneWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"create","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connectOrCreate","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateOrConnectWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUpdateOneWithoutBNestedInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"create","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connectOrCreate","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateOrConnectWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"upsert","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AUpsertWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"disconnect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Boolean","location":"scalar","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"delete","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Boolean","location":"scalar","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"update","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AUpdateToOneWithWhereWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUncheckedUpdateOneWithoutBNestedInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"create","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connectOrCreate","isRequired":false,"isNullable":false,"inputTypes":[{"type":"ACreateOrConnectWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"upsert","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AUpsertWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"disconnect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Boolean","location":"scalar","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"delete","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Boolean","location":"scalar","isList":false},{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"connect","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"update","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AUpdateToOneWithWhereWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"NestedStringFilter","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"equals","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"in","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"notIn","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"contains","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"startsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"endsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"not","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"NestedStringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"NestedStringWithAggregatesFilter","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"equals","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"in","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"notIn","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":true},{"type":"ListStringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"contains","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"startsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"endsWith","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"not","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"NestedStringWithAggregatesFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_count","isRequired":false,"isNullable":false,"inputTypes":[{"type":"NestedIntFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_min","isRequired":false,"isNullable":false,"inputTypes":[{"type":"NestedStringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"_max","isRequired":false,"isNullable":false,"inputTypes":[{"type":"NestedStringFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"NestedIntFilter","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"equals","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false},{"type":"IntFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"in","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":true},{"type":"ListIntFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"notIn","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":true},{"type":"ListIntFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false},{"type":"IntFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"lte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false},{"type":"IntFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gt","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false},{"type":"IntFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"gte","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false},{"type":"IntFieldRefInput","namespace":"prisma","location":"fieldRefTypes","isList":false}]},{"name":"not","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false},{"type":"NestedIntFilter","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BCreateWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"BUncheckedCreateWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"BCreateOrConnectWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"create","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUpsertWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"update","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BUpdateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedUpdateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"create","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedCreateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUpdateToOneWithWhereWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BUpdateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedUpdateWithoutAInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUpdateWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"BUncheckedUpdateWithoutAInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"ACreateWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"AUncheckedCreateWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"ACreateOrConnectWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"create","isRequired":true,"isNullable":false,"inputTypes":[{"type":"ACreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUpsertWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"update","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"create","isRequired":true,"isNullable":false,"inputTypes":[{"type":"ACreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUpdateToOneWithWhereWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedUpdateWithoutBInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUpdateWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]},{"name":"AUncheckedUpdateWithoutBInput","constraints":{"maxNumFields":null,"minNumFields":null},"fields":[{"name":"id","isRequired":false,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false},{"type":"StringFieldUpdateOperationsInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}]}]},"outputObjectTypes":{"prisma":[{"name":"Query","fields":[{"name":"findFirstA","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"distinct","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false},{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true}]}],"isNullable":true,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"findFirstAOrThrow","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"distinct","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false},{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true}]}],"isNullable":true,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"findManyA","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"distinct","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false},{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true}]}],"isNullable":false,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":true}},{"name":"aggregateA","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"AOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"AggregateA","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"groupByA","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AOrderByWithAggregationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"AOrderByWithAggregationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"by","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true},{"type":"AScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"having","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"AGroupByOutputType","namespace":"prisma","location":"outputObjectTypes","isList":true}},{"name":"findUniqueA","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"findUniqueAOrThrow","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"findFirstB","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"distinct","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false},{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true}]}],"isNullable":true,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"findFirstBOrThrow","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"distinct","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false},{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true}]}],"isNullable":true,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"findManyB","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"distinct","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false},{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true}]}],"isNullable":false,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":true}},{"name":"aggregateB","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"BOrderByWithRelationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"cursor","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"AggregateB","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"groupByB","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"orderBy","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BOrderByWithAggregationInput","namespace":"prisma","location":"inputObjectTypes","isList":true},{"type":"BOrderByWithAggregationInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"by","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":true},{"type":"BScalarFieldEnum","namespace":"prisma","location":"enumTypes","isList":false}]},{"name":"having","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BScalarWhereWithAggregatesInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"take","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]},{"name":"skip","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Int","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"BGroupByOutputType","namespace":"prisma","location":"outputObjectTypes","isList":true}},{"name":"findUniqueB","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"findUniqueBOrThrow","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}}]},{"name":"Mutation","fields":[{"name":"createOneA","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"ACreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"upsertOneA","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"create","isRequired":true,"isNullable":false,"inputTypes":[{"type":"ACreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedCreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"update","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"createManyA","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"ACreateManyInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"ACreateManyInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"skipDuplicates","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Boolean","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"AffectedRowsOutput","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"deleteOneA","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"updateOneA","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"updateManyA","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"AUpdateManyMutationInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"AUncheckedUpdateManyInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"AffectedRowsOutput","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"deleteManyA","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"AffectedRowsOutput","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"createOneB","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BCreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedCreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"upsertOneB","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"create","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BCreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedCreateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"update","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"createManyB","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BCreateManyInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BCreateManyInput","namespace":"prisma","location":"inputObjectTypes","isList":true}]},{"name":"skipDuplicates","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Boolean","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"AffectedRowsOutput","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"deleteOneB","args":[{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"updateOneB","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedUpdateInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"where","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BWhereUniqueInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}},{"name":"updateManyB","args":[{"name":"data","isRequired":true,"isNullable":false,"inputTypes":[{"type":"BUpdateManyMutationInput","namespace":"prisma","location":"inputObjectTypes","isList":false},{"type":"BUncheckedUpdateManyInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]},{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"AffectedRowsOutput","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"deleteManyB","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"BWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":false,"outputType":{"type":"AffectedRowsOutput","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"executeRaw","args":[{"name":"query","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"parameters","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Json","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"Json","location":"scalar","isList":false}},{"name":"queryRaw","args":[{"name":"query","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"parameters","isRequired":false,"isNullable":false,"inputTypes":[{"type":"Json","location":"scalar","isList":false}]}],"isNullable":false,"outputType":{"type":"Json","location":"scalar","isList":false}}]},{"name":"AggregateA","fields":[{"name":"_count","args":[],"isNullable":true,"outputType":{"type":"ACountAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_min","args":[],"isNullable":true,"outputType":{"type":"AMinAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_max","args":[],"isNullable":true,"outputType":{"type":"AMaxAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}}]},{"name":"AGroupByOutputType","fields":[{"name":"id","args":[],"isNullable":false,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"b_id","args":[],"isNullable":false,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"_count","args":[],"isNullable":true,"outputType":{"type":"ACountAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_min","args":[],"isNullable":true,"outputType":{"type":"AMinAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_max","args":[],"isNullable":true,"outputType":{"type":"AMaxAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}}]},{"name":"AggregateB","fields":[{"name":"_count","args":[],"isNullable":true,"outputType":{"type":"BCountAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_min","args":[],"isNullable":true,"outputType":{"type":"BMinAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_max","args":[],"isNullable":true,"outputType":{"type":"BMaxAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}}]},{"name":"BGroupByOutputType","fields":[{"name":"id","args":[],"isNullable":false,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"_count","args":[],"isNullable":true,"outputType":{"type":"BCountAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_min","args":[],"isNullable":true,"outputType":{"type":"BMinAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}},{"name":"_max","args":[],"isNullable":true,"outputType":{"type":"BMaxAggregateOutputType","namespace":"prisma","location":"outputObjectTypes","isList":false}}]},{"name":"AffectedRowsOutput","fields":[{"name":"count","args":[],"isNullable":false,"outputType":{"type":"Int","location":"scalar","isList":false}}]},{"name":"ACountAggregateOutputType","fields":[{"name":"id","args":[],"isNullable":false,"outputType":{"type":"Int","location":"scalar","isList":false}},{"name":"b_id","args":[],"isNullable":false,"outputType":{"type":"Int","location":"scalar","isList":false}},{"name":"_all","args":[],"isNullable":false,"outputType":{"type":"Int","location":"scalar","isList":false}}]},{"name":"AMinAggregateOutputType","fields":[{"name":"id","args":[],"isNullable":true,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"b_id","args":[],"isNullable":true,"outputType":{"type":"String","location":"scalar","isList":false}}]},{"name":"AMaxAggregateOutputType","fields":[{"name":"id","args":[],"isNullable":true,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"b_id","args":[],"isNullable":true,"outputType":{"type":"String","location":"scalar","isList":false}}]},{"name":"BCountAggregateOutputType","fields":[{"name":"id","args":[],"isNullable":false,"outputType":{"type":"Int","location":"scalar","isList":false}},{"name":"_all","args":[],"isNullable":false,"outputType":{"type":"Int","location":"scalar","isList":false}}]},{"name":"BMinAggregateOutputType","fields":[{"name":"id","args":[],"isNullable":true,"outputType":{"type":"String","location":"scalar","isList":false}}]},{"name":"BMaxAggregateOutputType","fields":[{"name":"id","args":[],"isNullable":true,"outputType":{"type":"String","location":"scalar","isList":false}}]}],"model":[{"name":"A","fields":[{"name":"id","args":[],"isNullable":false,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"b_id","args":[],"isNullable":false,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"b","args":[],"isNullable":false,"outputType":{"type":"B","namespace":"model","location":"outputObjectTypes","isList":false}}]},{"name":"B","fields":[{"name":"id","args":[],"isNullable":false,"outputType":{"type":"String","location":"scalar","isList":false}},{"name":"a","args":[{"name":"where","isRequired":false,"isNullable":false,"inputTypes":[{"type":"AWhereInput","namespace":"prisma","location":"inputObjectTypes","isList":false}]}],"isNullable":true,"outputType":{"type":"A","namespace":"model","location":"outputObjectTypes","isList":false}}]}]},"enumTypes":{"prisma":[{"name":"TransactionIsolationLevel","values":["ReadUncommitted","ReadCommitted","RepeatableRead","Serializable"]},{"name":"AScalarFieldEnum","values":["id","b_id"]},{"name":"BScalarFieldEnum","values":["id"]},{"name":"SortOrder","values":["asc","desc"]},{"name":"QueryMode","values":["default","insensitive"]}]},"fieldRefTypes":{"prisma":[{"name":"StringFieldRefInput","allowTypes":[{"type":"String","location":"scalar","isList":false}],"fields":[{"name":"_ref","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"_container","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"ListStringFieldRefInput","allowTypes":[{"type":"String","location":"scalar","isList":true}],"fields":[{"name":"_ref","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"_container","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"IntFieldRefInput","allowTypes":[{"type":"Int","location":"scalar","isList":false}],"fields":[{"name":"_ref","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"_container","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]},{"name":"ListIntFieldRefInput","allowTypes":[{"type":"Int","location":"scalar","isList":true}],"fields":[{"name":"_ref","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]},{"name":"_container","isRequired":true,"isNullable":false,"inputTypes":[{"type":"String","location":"scalar","isList":false}]}]}]}},"mappings":{"modelOperations":[{"model":"A","aggregate":"aggregateA","createMany":"createManyA","createOne":"createOneA","deleteMany":"deleteManyA","deleteOne":"deleteOneA","findFirst":"findFirstA","findFirstOrThrow":"findFirstAOrThrow","findMany":"findManyA","findUnique":"findUniqueA","findUniqueOrThrow":"findUniqueAOrThrow","groupBy":"groupByA","updateMany":"updateManyA","updateOne":"updateOneA","upsertOne":"upsertOneA"},{"model":"B","aggregate":"aggregateB","createMany":"createManyB","createOne":"createOneB","deleteMany":"deleteManyB","deleteOne":"deleteOneB","findFirst":"findFirstB","findFirstOrThrow":"findFirstBOrThrow","findMany":"findManyB","findUnique":"findUniqueB","findUniqueOrThrow":"findUniqueBOrThrow","groupBy":"groupByB","updateMany":"updateManyB","updateOne":"updateOneB","upsertOne":"upsertOneB"}],"otherOperations":{"read":[],"write":["executeRaw","queryRaw"]}}}"# + ]]; + + let response = get_dmmf(&request.to_string()).unwrap(); + expected.assert_eq(&response); + } + #[test] fn get_dmmf_using_both_relation_mode_and_referential_integrity() { let schema = r#" diff --git a/prisma-fmt/src/lib.rs b/prisma-fmt/src/lib.rs index ada79cd7290..c1449b3b205 100644 --- a/prisma-fmt/src/lib.rs +++ b/prisma-fmt/src/lib.rs @@ -3,8 +3,10 @@ mod code_actions; mod get_config; mod get_dmmf; mod lint; +mod merge_schemas; mod native; mod preview; +mod schema_file_input; mod text_document_completion; mod validate; @@ -89,6 +91,14 @@ pub fn validate(validate_params: String) -> Result<(), String> { validate::validate(&validate_params) } +/// Given a list of Prisma schema files (and their locations), returns the merged schema. +/// This is useful for `@prisma/client` generation, where the client needs a single - potentially large - schema, +/// while still allowing the user to split their schema copies into multiple files. +/// Internally, it uses `[validate]`. +pub fn merge_schemas(params: String) -> Result { + merge_schemas::merge_schemas(¶ms) +} + pub fn native_types(schema: String) -> String { native::run(&schema) } diff --git a/prisma-fmt/src/merge_schemas.rs b/prisma-fmt/src/merge_schemas.rs new file mode 100644 index 00000000000..bcb37922b68 --- /dev/null +++ b/prisma-fmt/src/merge_schemas.rs @@ -0,0 +1,127 @@ +use psl::reformat_validated_schema_into_single; +use serde::Deserialize; + +use crate::schema_file_input::SchemaFileInput; + +#[derive(Debug, Deserialize)] +pub struct MergeSchemasParams { + schema: SchemaFileInput, +} + +pub(crate) fn merge_schemas(params: &str) -> Result { + let params: MergeSchemasParams = match serde_json::from_str(params) { + Ok(params) => params, + Err(serde_err) => { + panic!("Failed to deserialize MergeSchemasParams: {serde_err}"); + } + }; + + let validated_schema = crate::validate::run(params.schema, false)?; + + let indent_width = 2usize; + let merged_schema = reformat_validated_schema_into_single(validated_schema, indent_width).unwrap(); + + Ok(merged_schema) +} + +#[cfg(test)] +mod tests { + use super::*; + use expect_test::expect; + use serde_json::json; + + #[test] + fn merge_two_valid_schemas_succeeds() { + let schema = vec![ + ( + "b.prisma", + r#" + model B { + id String @id + a A? + } + "#, + ), + ( + "a.prisma", + r#" + datasource db { + provider = "postgresql" + url = env("DBURL") + } + + model A { + id String @id + b_id String @unique + b B @relation(fields: [b_id], references: [id]) + } + "#, + ), + ]; + + let request = json!({ + "schema": schema, + }); + + let expected = expect![[r#" + model B { + id String @id + a A? + } + + datasource db { + provider = "postgresql" + url = env("DBURL") + } + + model A { + id String @id + b_id String @unique + b B @relation(fields: [b_id], references: [id]) + } + "#]]; + + let response = merge_schemas(&request.to_string()).unwrap(); + expected.assert_eq(&response); + } + + #[test] + fn merge_two_invalid_schemas_panics() { + let schema = vec![ + ( + "b.prisma", + r#" + model B { + id String @id + a A? + } + "#, + ), + ( + "a.prisma", + r#" + datasource db { + provider = "postgresql" + url = env("DBURL") + } + + model A { + id String @id + b_id String @unique + } + "#, + ), + ]; + + let request = json!({ + "schema": schema, + }); + + let expected = expect![[ + r#"{"error_code":"P1012","message":"\u001b[1;91merror\u001b[0m: \u001b[1mError validating field `a` in model `B`: The relation field `a` on model `B` is missing an opposite relation field on the model `A`. Either run `prisma format` or add it manually.\u001b[0m\n \u001b[1;94m-->\u001b[0m \u001b[4mb.prisma:4\u001b[0m\n\u001b[1;94m | \u001b[0m\n\u001b[1;94m 3 | \u001b[0m id String @id\n\u001b[1;94m 4 | \u001b[0m \u001b[1;91ma A?\u001b[0m\n\u001b[1;94m 5 | \u001b[0m }\n\u001b[1;94m | \u001b[0m\n\nValidation Error Count: 1"}"# + ]]; + + let response = merge_schemas(&request.to_string()).unwrap_err(); + expected.assert_eq(&response); + } +} diff --git a/prisma-fmt/src/schema_file_input.rs b/prisma-fmt/src/schema_file_input.rs new file mode 100644 index 00000000000..a7204510ed8 --- /dev/null +++ b/prisma-fmt/src/schema_file_input.rs @@ -0,0 +1,26 @@ +use psl::SourceFile; +use serde::Deserialize; + +/// Struct for supporting multiple files +/// in a backward-compatible way: can either accept +/// a single file contents or vector of (filePath, content) tuples. +/// Can be converted to the input for `psl::validate_multi_file` from +/// any of the variants. +#[derive(Deserialize, Debug)] +#[serde(untagged)] +pub(crate) enum SchemaFileInput { + Single(String), + Multiple(Vec<(String, String)>), +} + +impl From for Vec<(String, SourceFile)> { + fn from(value: SchemaFileInput) -> Self { + match value { + SchemaFileInput::Single(content) => vec![("schema.prisma".to_owned(), content.into())], + SchemaFileInput::Multiple(file_list) => file_list + .into_iter() + .map(|(filename, content)| (filename, content.into())) + .collect(), + } + } +} diff --git a/prisma-fmt/src/validate.rs b/prisma-fmt/src/validate.rs index 4cc9f88bf8b..7bbce19e425 100644 --- a/prisma-fmt/src/validate.rs +++ b/prisma-fmt/src/validate.rs @@ -1,14 +1,17 @@ +use psl::ValidatedSchema; use serde::Deserialize; use serde_json::json; use std::fmt::Write as _; +use crate::schema_file_input::SchemaFileInput; + // this mirrors user_facing_errors::common::SchemaParserError pub(crate) static SCHEMA_PARSER_ERROR_CODE: &str = "P1012"; #[derive(Deserialize, Debug)] #[serde(rename_all = "camelCase")] struct ValidateParams { - prisma_schema: String, + prisma_schema: SchemaFileInput, #[serde(default)] no_color: bool, } @@ -21,21 +24,22 @@ pub(crate) fn validate(params: &str) -> Result<(), String> { } }; - run(¶ms.prisma_schema, params.no_color) + run(params.prisma_schema, params.no_color)?; + Ok(()) } -pub fn run(input_schema: &str, no_color: bool) -> Result<(), String> { - let validate_schema = psl::validate(input_schema.into()); +pub fn run(input_schema: SchemaFileInput, no_color: bool) -> Result { + let validate_schema = psl::validate_multi_file(input_schema.into()); let diagnostics = &validate_schema.diagnostics; if !diagnostics.has_errors() { - return Ok(()); + return Ok(validate_schema); } // always colorise output regardless of the environment, which is important for Wasm colored::control::set_override(!no_color); - let mut formatted_error = diagnostics.to_pretty_string("schema.prisma", input_schema); + let mut formatted_error = validate_schema.render_own_diagnostics(); write!( formatted_error, "\nValidation Error Count: {}", @@ -109,6 +113,83 @@ mod tests { validate(&request.to_string()).unwrap(); } + #[test] + fn validate_multiple_files() { + let schema = vec![ + ( + "a.prisma", + r#" + datasource thedb { + provider = "postgresql" + url = env("DBURL") + } + + model A { + id String @id + b_id String @unique + b B @relation(fields: [b_id], references: [id]) + } + "#, + ), + ( + "b.prisma", + r#" + model B { + id String @id + a A? + } + "#, + ), + ]; + + let request = json!({ + "prismaSchema": schema, + }); + + validate(&request.to_string()).unwrap(); + } + + #[test] + fn validate_multiple_files_error() { + let schema = vec![ + ( + "a.prisma", + r#" + datasource thedb { + provider = "postgresql" + url = env("DBURL") + } + + model A { + id String @id + b_id String @unique + b B @relation(fields: [b_id], references: [id]) + } + "#, + ), + ( + "b.prisma", + r#" + model B { + id String @id + a A + } + "#, + ), + ]; + + let request = json!({ + "prismaSchema": schema, + }); + + let expected = expect![[ + r#"{"error_code":"P1012","message":"\u001b[1;91merror\u001b[0m: \u001b[1mError parsing attribute \"@relation\": The relation field `a` on Model `B` is required. This is no longer valid because it's not possible to enforce this constraint on the database level. Please change the field type from `A` to `A?` to fix this.\u001b[0m\n \u001b[1;94m-->\u001b[0m \u001b[4mb.prisma:4\u001b[0m\n\u001b[1;94m | \u001b[0m\n\u001b[1;94m 3 | \u001b[0m id String @id\n\u001b[1;94m 4 | \u001b[0m \u001b[1;91ma A\u001b[0m\n\u001b[1;94m 5 | \u001b[0m }\n\u001b[1;94m | \u001b[0m\n\nValidation Error Count: 1"}"# + ]]; + + let response = validate(&request.to_string()).unwrap_err(); + expected.assert_eq(&response); + } + #[test] fn validate_using_both_relation_mode_and_referential_integrity() { let schema = r#" diff --git a/prisma-schema-wasm/src/lib.rs b/prisma-schema-wasm/src/lib.rs index cd373128d60..43288dd32f5 100644 --- a/prisma-schema-wasm/src/lib.rs +++ b/prisma-schema-wasm/src/lib.rs @@ -58,6 +58,12 @@ pub fn validate(params: String) -> Result<(), JsError> { prisma_fmt::validate(params).map_err(|e| JsError::new(&e)) } +#[wasm_bindgen] +pub fn merge_schemas(input: String) -> Result { + register_panic_hook(); + prisma_fmt::merge_schemas(input).map_err(|e| JsError::new(&e)) +} + #[wasm_bindgen] pub fn native_types(input: String) -> String { register_panic_hook(); diff --git a/psl/parser-database/src/files.rs b/psl/parser-database/src/files.rs index f201c839eea..9aef27d3d70 100644 --- a/psl/parser-database/src/files.rs +++ b/psl/parser-database/src/files.rs @@ -15,6 +15,13 @@ impl Files { .enumerate() .map(|(idx, (path, contents, ast))| (FileId(idx as u32), path, contents, ast)) } + + pub(crate) fn into_iter(self) -> impl Iterator { + self.0 + .into_iter() + .enumerate() + .map(|(idx, (path, contents, ast))| (FileId(idx as u32), path, contents, ast)) + } } impl Index for Files { diff --git a/psl/parser-database/src/lib.rs b/psl/parser-database/src/lib.rs index e1dd7b72b25..61dc685f93b 100644 --- a/psl/parser-database/src/lib.rs +++ b/psl/parser-database/src/lib.rs @@ -175,6 +175,16 @@ impl ParserDatabase { self.asts.iter().map(|(_, _, _, ast)| ast) } + /// Iterate all parsed ASTs, consuming parser database + pub fn into_iter_asts(self) -> impl Iterator { + self.asts.into_iter().map(|(_, _, _, ast)| ast) + } + + /// Iterate all file ids + pub fn iter_file_ids(&self) -> impl Iterator + '_ { + self.asts.iter().map(|(file_id, _, _, _)| file_id) + } + /// A parsed AST. pub fn ast(&self, file_id: FileId) -> &ast::SchemaAst { &self.asts[file_id].2 @@ -199,9 +209,19 @@ impl ParserDatabase { } /// The source file contents. - pub(crate) fn source(&self, file_id: FileId) -> &str { + pub fn source(&self, file_id: FileId) -> &str { self.asts[file_id].1.as_str() } + + /// Iterate all source file contents. + pub fn iter_sources(&self) -> impl Iterator { + self.asts.iter().map(|ast| ast.2.as_str()) + } + + /// The name of the file. + pub fn file_name(&self, file_id: FileId) -> &str { + self.asts[file_id].0.as_str() + } } impl std::ops::Index for ParserDatabase { diff --git a/psl/psl-core/src/lib.rs b/psl/psl-core/src/lib.rs index 9d1877bd26d..03e1dca4356 100644 --- a/psl/psl-core/src/lib.rs +++ b/psl/psl-core/src/lib.rs @@ -20,7 +20,7 @@ pub use crate::{ configuration::{ Configuration, Datasource, DatasourceConnectorData, Generator, GeneratorConfigValue, StringFromEnvVar, }, - reformat::reformat, + reformat::{reformat, reformat_multiple, reformat_validated_schema_into_single}, }; pub use diagnostics; pub use parser_database::{self, is_reserved_type_name}; @@ -53,10 +53,14 @@ impl ValidatedSchema { self.relation_mode } - pub fn render_diagnostics(&self) -> String { + pub fn render_own_diagnostics(&self) -> String { + self.render_diagnostics(&self.diagnostics) + } + + pub fn render_diagnostics(&self, diagnostics: &Diagnostics) -> String { let mut out = Vec::new(); - for error in self.diagnostics.errors() { + for error in diagnostics.errors() { let (file_name, source, _) = &self.db[error.span().file_id]; error.pretty_print(&mut out, file_name, source.as_str()).unwrap(); } diff --git a/psl/psl-core/src/reformat.rs b/psl/psl-core/src/reformat.rs index 09d21c731b3..a18b32e301b 100644 --- a/psl/psl-core/src/reformat.rs +++ b/psl/psl-core/src/reformat.rs @@ -1,46 +1,95 @@ -use crate::ParserDatabase; +use crate::{ParserDatabase, ValidatedSchema}; +use diagnostics::FileId; use parser_database::{ast::WithSpan, walkers}; use schema_ast::{ast, SourceFile}; -use std::{borrow::Cow, sync::Arc}; +use std::{borrow::Cow, collections::HashMap}; /// Returns either the reformatted schema, or the original input if we can't reformat. This happens /// if and only if the source does not parse to a well formed AST. pub fn reformat(source: &str, indent_width: usize) -> Option { - let file = SourceFile::new_allocated(Arc::from(source.to_owned().into_boxed_str())); + let reformatted = reformat_multiple(vec![("schema.prisma".to_owned(), source.into())], indent_width); - let mut diagnostics = diagnostics::Diagnostics::new(); - let db = parser_database::ParserDatabase::new_single_file(file, &mut diagnostics); + reformatted.first().map(|(_, source)| source).cloned() +} + +pub fn reformat_validated_schema_into_single(schema: ValidatedSchema, indent_width: usize) -> Option { + let db = schema.db; + + let source = db + .iter_sources() + .map(|source| source.to_owned()) + .collect::>() + .join("\n"); - let source_to_reformat = if diagnostics.has_errors() { - Cow::Borrowed(source) + schema_ast::reformat(&source, indent_width) +} + +pub fn reformat_multiple(sources: Vec<(String, SourceFile)>, indent_width: usize) -> Vec<(String, String)> { + let mut diagnostics = diagnostics::Diagnostics::new(); + let db = parser_database::ParserDatabase::new(sources, &mut diagnostics); + + if diagnostics.has_errors() { + db.iter_file_ids() + .filter_map(|file_id| { + let formatted_source = schema_ast::reformat(db.source(file_id), indent_width)?; + Some((db.file_name(file_id).to_owned(), formatted_source)) + }) + .collect() } else { - let mut missing_bits = Vec::new(); + let mut missing_bits = HashMap::new(); + let mut ctx = MagicReformatCtx { - original_schema: source, - missing_bits: &mut missing_bits, + missing_bits_map: &mut missing_bits, db: &db, }; + push_missing_fields(&mut ctx); push_missing_attributes(&mut ctx); push_missing_relation_attribute_args(&mut ctx); - missing_bits.sort_by_key(|bit| bit.position); + ctx.sort_missing_bits(); - if missing_bits.is_empty() { - Cow::Borrowed(source) - } else { - Cow::Owned(enrich(source, &missing_bits)) - } - }; + db.iter_file_ids() + .filter_map(|file_id| { + let source = if let Some(missing_bits) = ctx.get_missing_bits(file_id) { + Cow::Owned(enrich(db.source(file_id), missing_bits)) + } else { + Cow::Borrowed(db.source(file_id)) + }; + + let formatted_source = schema_ast::reformat(&source, indent_width)?; - schema_ast::reformat(&source_to_reformat, indent_width) + Some((db.file_name(file_id).to_owned(), formatted_source)) + }) + .collect() + } } struct MagicReformatCtx<'a> { - original_schema: &'a str, - missing_bits: &'a mut Vec, + missing_bits_map: &'a mut HashMap>, db: &'a ParserDatabase, } +impl<'a> MagicReformatCtx<'a> { + fn add_missing_bit(&mut self, file_id: FileId, bit: MissingBit) { + self.missing_bits_map.entry(file_id).or_default().push(bit); + } + + fn get_missing_bits(&self, file_id: FileId) -> Option<&Vec> { + let bits_vec = self.missing_bits_map.get(&file_id)?; + if bits_vec.is_empty() { + None + } else { + Some(bits_vec) + } + } + + fn sort_missing_bits(&mut self) { + self.missing_bits_map + .iter_mut() + .for_each(|(_, bits)| bits.sort_by_key(|bit| bit.position)) + } +} + fn enrich(input: &str, missing_bits: &[MissingBit]) -> String { let bits = missing_bits.iter().scan(0usize, |last_insert_position, missing_bit| { let start: usize = *last_insert_position; @@ -109,10 +158,13 @@ fn push_inline_relation_missing_arguments( (", ", "", relation_attribute.span.end - 1) }; - ctx.missing_bits.push(MissingBit { - position, - content: format!("{prefix}{extra_args}{suffix}"), - }); + ctx.add_missing_bit( + relation_attribute.span.file_id, + MissingBit { + position, + content: format!("{prefix}{extra_args}{suffix}"), + }, + ); } } @@ -136,10 +188,14 @@ fn push_missing_relation_attribute(inline_relation: walkers::InlineRelationWalke content.push_str(&references_argument(inline_relation)); content.push(')'); - ctx.missing_bits.push(MissingBit { - position: after_type(forward.ast_field().field_type.span().end, ctx.original_schema), - content, - }) + let file_id = forward.ast_field().span().file_id; + ctx.add_missing_bit( + file_id, + MissingBit { + position: after_type(forward.ast_field().field_type.span().end, ctx.db.source(file_id)), + content, + }, + ); } } @@ -167,10 +223,14 @@ fn push_missing_relation_fields(inline: walkers::InlineRelationWalker<'_>, ctx: }; let arity = if inline.is_one_to_one() { "?" } else { "[]" }; - ctx.missing_bits.push(MissingBit { - position: inline.referenced_model().ast_model().span().end - 1, - content: format!("{referencing_model_name} {referencing_model_name}{arity} {ignore}\n"), - }); + let span = inline.referenced_model().ast_model().span(); + ctx.add_missing_bit( + span.file_id, + MissingBit { + position: span.end - 1, + content: format!("{referencing_model_name} {referencing_model_name}{arity} {ignore}\n"), + }, + ); } if inline.forward_relation_field().is_none() { @@ -179,10 +239,14 @@ fn push_missing_relation_fields(inline: walkers::InlineRelationWalker<'_>, ctx: let arity = render_arity(forward_relation_field_arity(inline)); let fields_arg = fields_argument(inline); let references_arg = references_argument(inline); - ctx.missing_bits.push(MissingBit { - position: inline.referencing_model().ast_model().span().end - 1, - content: format!("{field_name} {field_type}{arity} @relation({fields_arg}, {references_arg})\n"), - }) + let span = inline.referencing_model().ast_model().span(); + ctx.add_missing_bit( + span.file_id, + MissingBit { + position: span.end - 1, + content: format!("{field_name} {field_type}{arity} @relation({fields_arg}, {references_arg})\n"), + }, + ) } } @@ -211,13 +275,17 @@ fn push_missing_scalar_fields(inline: walkers::InlineRelationWalker<'_>, ctx: &m let mut attributes: String = String::new(); if let Some((_datasource_name, _type_name, _args, span)) = field.blueprint.raw_native_type() { - attributes.push_str(&ctx.original_schema[span.start..span.end]); + attributes.push_str(&ctx.db.source(span.file_id)[span.start..span.end]); } - ctx.missing_bits.push(MissingBit { - position: inline.referencing_model().ast_model().span().end - 1, - content: format!("{field_name} {field_type}{arity} {attributes}\n"), - }); + let span = inline.referencing_model().ast_model().span(); + ctx.add_missing_bit( + span.file_id, + MissingBit { + position: span.end - 1, + content: format!("{field_name} {field_type}{arity} {attributes}\n"), + }, + ); } } diff --git a/psl/psl/build.rs b/psl/psl/build.rs index 509b6087599..1b0d560da55 100644 --- a/psl/psl/build.rs +++ b/psl/psl/build.rs @@ -1,19 +1,46 @@ use std::{env, fs, io::Write as _, path}; const VALIDATIONS_ROOT_DIR: &str = "tests/validation"; -const REFORMAT_ROOT_DIR: &str = "tests/reformatter"; +const REFORMAT_SINGLE_FILE_ROOT_DIR: &str = "tests/reformatter"; +const REFORMAT_MULTI_FILE_ROOT_DIR: &str = "tests/reformatter_multi_file"; const CARGO_MANIFEST_DIR: &str = env!("CARGO_MANIFEST_DIR"); fn main() { build_validation_tests(); - build_reformat_tests(); + build_reformat_single_file_tests(); + build_reformat_multi_file_tests(); } -fn build_reformat_tests() { - println!("cargo:rerun-if-changed={REFORMAT_ROOT_DIR}"); +fn build_reformat_multi_file_tests() { + println!("cargo:rerun-if-changed={REFORMAT_MULTI_FILE_ROOT_DIR}"); + let schema_dirs_to_reformat = fs::read_dir(format!("{CARGO_MANIFEST_DIR}/{REFORMAT_MULTI_FILE_ROOT_DIR}")) + .unwrap() + .map(Result::unwrap) + .filter_map(|entry| { + let name = entry.file_name(); + let name = name.to_str().unwrap(); + if name == "." || name == ".." || name.ends_with(".reformatted") { + None + } else { + Some(name.trim_start_matches('/').to_owned()) + } + }); + let mut out_file = out_file("reformat_multi_file_tests.rs"); + for schema_dir in schema_dirs_to_reformat { + let test_name = test_name(&schema_dir); + writeln!( + out_file, + "#[test] fn {test_name}() {{ run_reformat_multi_file_test(\"{schema_dir}\"); }}" + ) + .unwrap(); + } +} + +fn build_reformat_single_file_tests() { + println!("cargo:rerun-if-changed={REFORMAT_SINGLE_FILE_ROOT_DIR}"); let mut all_schemas = Vec::new(); - find_all_schemas("", &mut all_schemas, REFORMAT_ROOT_DIR); + find_all_schemas("", &mut all_schemas, REFORMAT_SINGLE_FILE_ROOT_DIR); let mut out_file = out_file("reformat_tests.rs"); let schemas_to_reformat = all_schemas.iter().filter(|name| !name.ends_with(".reformatted.prisma")); diff --git a/psl/psl/src/lib.rs b/psl/psl/src/lib.rs index d1c38eaf433..af78ef19b3b 100644 --- a/psl/psl/src/lib.rs +++ b/psl/psl/src/lib.rs @@ -12,6 +12,8 @@ pub use psl_core::{ parser_database::{self, SourceFile}, reachable_only_with_capability, reformat, + reformat_multiple, + reformat_validated_schema_into_single, schema_ast, set_config_dir, Configuration, diff --git a/psl/psl/tests/multi_file/basic.rs b/psl/psl/tests/multi_file/basic.rs index fd1c2d0e4f9..d5eaf5b8b48 100644 --- a/psl/psl/tests/multi_file/basic.rs +++ b/psl/psl/tests/multi_file/basic.rs @@ -8,7 +8,7 @@ fn expect_errors(schemas: &[[&'static str; 2]], expectation: expect_test::Expect .collect(), ); - let actual = out.render_diagnostics(); + let actual = out.render_own_diagnostics(); expectation.assert_eq(&actual) } diff --git a/psl/psl/tests/panic_with_diff/mod.rs b/psl/psl/tests/panic_with_diff/mod.rs index a66b81643fd..6360545e851 100644 --- a/psl/psl/tests/panic_with_diff/mod.rs +++ b/psl/psl/tests/panic_with_diff/mod.rs @@ -1,9 +1,12 @@ -pub(crate) fn panic_with_diff(expected: &str, found: &str) { +pub(crate) fn panic_with_diff(expected: &str, found: &str, name: Option<&str>) { + let title = name + .map(|name| format!("Snapshot '{name}'")) + .unwrap_or("Snapshot".to_owned()); let chunks = dissimilar::diff(expected, found); let diff = format_chunks(chunks); panic!( r#" -Snapshot comparison failed. Run the test again with UPDATE_EXPECT=1 in the environment to update the snapshot. +${title} comparison failed. Run the test again with UPDATE_EXPECT=1 in the environment to update the snapshot. ===== EXPECTED ==== {expected} diff --git a/psl/psl/tests/reformat_tests.rs b/psl/psl/tests/reformat_tests.rs index c945ad53c07..c3c1748d8fe 100644 --- a/psl/psl/tests/reformat_tests.rs +++ b/psl/psl/tests/reformat_tests.rs @@ -24,7 +24,7 @@ fn run_reformat_test(test_file_path: &str) { let mut file = fs::File::create(&snapshot_file_name).unwrap(); // truncate file.write_all(reformatted_text.as_bytes()).unwrap(); } else { - panic_with_diff::panic_with_diff(&expected_text, &reformatted_text); + panic_with_diff::panic_with_diff(&expected_text, &reformatted_text, None); } if reformat(&reformatted_text) != reformatted_text { @@ -39,3 +39,72 @@ include!(concat!(env!("OUT_DIR"), "/reformat_tests.rs")); fn reformat(s: &str) -> String { psl::reformat(s, 2).unwrap() } + +mod reformat_multi_file { + use std::{collections::HashMap, fs, io::Write, path}; + + use psl::{reformat_multiple, SourceFile}; + + use crate::panic_with_diff; + + const MULTIFILE_TESTS_ROOT: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/tests/reformatter_multi_file"); + + #[inline(never)] + fn run_reformat_multi_file_test(test_dir_name: &str) { + let dir_path = path::Path::new(MULTIFILE_TESTS_ROOT).join(test_dir_name); + let snapshot_dir_path = path::Path::new(MULTIFILE_TESTS_ROOT).join(format!("{test_dir_name}.reformatted")); + + fs::create_dir_all(&snapshot_dir_path).unwrap(); + let schemas: Vec<_> = read_schemas_from_dir(dir_path).collect(); + + let result = reformat_multiple(schemas, 2); + + let should_update = std::env::var("UPDATE_EXPECT").is_ok(); + let mut snapshot_schemas: HashMap<_, _> = read_schemas_from_dir(&snapshot_dir_path).collect(); + for (path, content) in result { + let content = content.as_str(); + let snapshot_content = snapshot_schemas.remove(&path).unwrap_or_default(); + let snapshot_content = snapshot_content.as_str(); + if content == snapshot_content { + continue; + } + + if should_update { + let snapshot_file_path = path::Path::new(&snapshot_dir_path).join(path); + let mut file = fs::File::create(&snapshot_file_path).unwrap(); + file.write_all(content.as_bytes()).unwrap() + } else { + panic_with_diff::panic_with_diff(snapshot_content, content, Some(&path)); + } + } + + // cleanup removed files + for missing_file in snapshot_schemas.keys() { + if should_update { + fs::remove_file(path::Path::new(&snapshot_dir_path).join(missing_file)).unwrap() + } else { + panic!("{missing_file} is present in the snapshot directory, but missing from formatting results") + } + } + } + + fn read_schemas_from_dir(root_dir_path: impl AsRef) -> impl Iterator { + let root_dir_path = root_dir_path.as_ref().to_owned(); + fs::read_dir(&root_dir_path) + .unwrap() + .map(Result::unwrap) + .filter_map(move |entry| { + let file_name = entry.file_name(); + let file_name = file_name.to_str().unwrap(); + if !file_name.ends_with(".prisma") { + None + } else { + let full_path = root_dir_path.clone().join(file_name); + let content = fs::read_to_string(full_path).unwrap(); + Some((file_name.to_owned(), content.into())) + } + }) + } + + include!(concat!(env!("OUT_DIR"), "/reformat_multi_file_tests.rs")); +} diff --git a/psl/psl/tests/reformatter_multi_file/align_blocks.reformatted/User.prisma b/psl/psl/tests/reformatter_multi_file/align_blocks.reformatted/User.prisma new file mode 100644 index 00000000000..5a45cefe011 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/align_blocks.reformatted/User.prisma @@ -0,0 +1,5 @@ +model User { + id Int @id @default(autoincrement()) + name String + age Float +} diff --git a/psl/psl/tests/reformatter_multi_file/align_blocks.reformatted/db.prisma b/psl/psl/tests/reformatter_multi_file/align_blocks.reformatted/db.prisma new file mode 100644 index 00000000000..e4acdefaaa6 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/align_blocks.reformatted/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} diff --git a/psl/psl/tests/reformatter_multi_file/align_blocks/User.prisma b/psl/psl/tests/reformatter_multi_file/align_blocks/User.prisma new file mode 100644 index 00000000000..f24cc66e4d2 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/align_blocks/User.prisma @@ -0,0 +1,5 @@ +model User { + id Int @id @default( autoincrement()) + name String + age Float +} \ No newline at end of file diff --git a/psl/psl/tests/reformatter_multi_file/align_blocks/db.prisma b/psl/psl/tests/reformatter_multi_file/align_blocks/db.prisma new file mode 100644 index 00000000000..34e89da163e --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/align_blocks/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} \ No newline at end of file diff --git a/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/Post.prisma b/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/Post.prisma new file mode 100644 index 00000000000..84b3d54ea7f --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/Post.prisma @@ -0,0 +1,5 @@ +model Post { + id Int @id @default(autoincrement()) + title String + User User? +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/User.prisma b/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/User.prisma new file mode 100644 index 00000000000..fc08b7b116d --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/User.prisma @@ -0,0 +1,7 @@ +model User { + id Int @id @default(autoincrement()) + name String + age Float + postId Int @unique + post Post @relation(fields: [postId], references: [id]) +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/db.prisma b/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/db.prisma new file mode 100644 index 00000000000..e4acdefaaa6 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_1_to_1.reformatted/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_1_to_1/Post.prisma b/psl/psl/tests/reformatter_multi_file/relation_1_to_1/Post.prisma new file mode 100644 index 00000000000..149498bbab3 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_1_to_1/Post.prisma @@ -0,0 +1,4 @@ +model Post { + id Int @id @default(autoincrement()) + title String +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_1_to_1/User.prisma b/psl/psl/tests/reformatter_multi_file/relation_1_to_1/User.prisma new file mode 100644 index 00000000000..51b16016f50 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_1_to_1/User.prisma @@ -0,0 +1,7 @@ +model User { + id Int @id @default(autoincrement()) + name String + age Float + postId Int @unique + post Post @relation(fields: [postId], references: [id]) +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_1_to_1/db.prisma b/psl/psl/tests/reformatter_multi_file/relation_1_to_1/db.prisma new file mode 100644 index 00000000000..e4acdefaaa6 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_1_to_1/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/Post.prisma b/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/Post.prisma new file mode 100644 index 00000000000..93707cea33f --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/Post.prisma @@ -0,0 +1,6 @@ +model Post { + id Int @id @default(autoincrement()) + title String + User User? @relation(fields: [userId], references: [id]) + userId Int? +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/User.prisma b/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/User.prisma new file mode 100644 index 00000000000..0057debd6bd --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/User.prisma @@ -0,0 +1,6 @@ +model User { + id Int @id @default(autoincrement()) + name String + age Float + posts Post[] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/db.prisma b/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/db.prisma new file mode 100644 index 00000000000..e4acdefaaa6 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_list.reformatted/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_list/Post.prisma b/psl/psl/tests/reformatter_multi_file/relation_list/Post.prisma new file mode 100644 index 00000000000..149498bbab3 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_list/Post.prisma @@ -0,0 +1,4 @@ +model Post { + id Int @id @default(autoincrement()) + title String +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_list/User.prisma b/psl/psl/tests/reformatter_multi_file/relation_list/User.prisma new file mode 100644 index 00000000000..91bb36ac360 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_list/User.prisma @@ -0,0 +1,6 @@ +model User { + id Int @id @default(autoincrement()) + name String + age Float + posts Post[] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_list/db.prisma b/psl/psl/tests/reformatter_multi_file/relation_list/db.prisma new file mode 100644 index 00000000000..e4acdefaaa6 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_list/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/Post.prisma b/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/Post.prisma new file mode 100644 index 00000000000..d5524c154ee --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/Post.prisma @@ -0,0 +1,5 @@ +model Post { + id Int @id @default(autoincrement()) + title String + User User[] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/User.prisma b/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/User.prisma new file mode 100644 index 00000000000..a2690c937c7 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/User.prisma @@ -0,0 +1,7 @@ +model User { + id Int @id @default(autoincrement()) + name String + age Float + post Post @relation(fields: [postId], references: [id]) + postId Int +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/db.prisma b/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/db.prisma new file mode 100644 index 00000000000..e4acdefaaa6 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_single.reformatted/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_single/Post.prisma b/psl/psl/tests/reformatter_multi_file/relation_single/Post.prisma new file mode 100644 index 00000000000..149498bbab3 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_single/Post.prisma @@ -0,0 +1,4 @@ +model Post { + id Int @id @default(autoincrement()) + title String +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_single/User.prisma b/psl/psl/tests/reformatter_multi_file/relation_single/User.prisma new file mode 100644 index 00000000000..9d892ac43e6 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_single/User.prisma @@ -0,0 +1,6 @@ +model User { + id Int @id @default(autoincrement()) + name String + age Float + post Post +} diff --git a/psl/psl/tests/reformatter_multi_file/relation_single/db.prisma b/psl/psl/tests/reformatter_multi_file/relation_single/db.prisma new file mode 100644 index 00000000000..e4acdefaaa6 --- /dev/null +++ b/psl/psl/tests/reformatter_multi_file/relation_single/db.prisma @@ -0,0 +1,9 @@ +datasource db { + provider = "sqlite" + url = env("DATABASE_URL") +} + +generator client { + provider = "prisma-client-js" + previewFeatures = ["prismaSchemaFolder"] +} diff --git a/psl/psl/tests/validation_tests.rs b/psl/psl/tests/validation_tests.rs index 6d0120cf933..9859dbbc6bd 100644 --- a/psl/psl/tests/validation_tests.rs +++ b/psl/psl/tests/validation_tests.rs @@ -91,7 +91,7 @@ fn run_validation_test(test_file_path: &str) { return; } - panic_with_diff::panic_with_diff(&last_comment_contents, &diagnostics) + panic_with_diff::panic_with_diff(&last_comment_contents, &diagnostics, None) } include!(concat!(env!("OUT_DIR"), "/validation_tests.rs")); diff --git a/psl/schema-ast/src/reformat.rs b/psl/schema-ast/src/reformat.rs index 853258226e2..3492bb0524f 100644 --- a/psl/schema-ast/src/reformat.rs +++ b/psl/schema-ast/src/reformat.rs @@ -19,6 +19,8 @@ pub fn reformat(input: &str, indent_width: usize) -> Option { renderer.stream.push('\n'); } + // TODO: why do we need to use a `Some` here? + // Also: if we really want to return an `Option`, why do unwrap in `ast.next()`? Some(renderer.stream) } diff --git a/psl/schema-ast/src/source_file.rs b/psl/schema-ast/src/source_file.rs index 3d7deafd3a2..63329ad93c3 100644 --- a/psl/schema-ast/src/source_file.rs +++ b/psl/schema-ast/src/source_file.rs @@ -6,6 +6,14 @@ pub struct SourceFile { contents: Contents, } +impl Default for SourceFile { + fn default() -> Self { + Self { + contents: Contents::Static(""), + } + } +} + impl SourceFile { pub fn new_static(content: &'static str) -> Self { Self { diff --git a/query-engine/dmmf/src/lib.rs b/query-engine/dmmf/src/lib.rs index 340566c83de..42cfb2757ca 100644 --- a/query-engine/dmmf/src/lib.rs +++ b/query-engine/dmmf/src/lib.rs @@ -4,6 +4,7 @@ mod serialization_ast; #[cfg(test)] mod tests; +use psl::ValidatedSchema; pub use serialization_ast::DataModelMetaFormat; use ast_builders::schema_to_dmmf; @@ -15,6 +16,11 @@ pub fn dmmf_json_from_schema(schema: &str) -> String { serde_json::to_string(&dmmf).unwrap() } +pub fn dmmf_json_from_validated_schema(schema: ValidatedSchema) -> String { + let dmmf = from_precomputed_parts(&schema::build(Arc::new(schema), true)); + serde_json::to_string(&dmmf).unwrap() +} + pub fn dmmf_from_schema(schema: &str) -> DataModelMetaFormat { let schema = Arc::new(psl::parse_schema(schema).unwrap()); from_precomputed_parts(&schema::build(schema, true)) From f45ef6a011804f88f9c74e70248cc4dfbcbccb2a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Apr 2024 09:35:36 +0200 Subject: [PATCH 19/25] chore(deps): bump h2 from 0.3.24 to 0.3.26 (#4814) Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d838995c6f9..3340e7e90fa 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1583,9 +1583,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.24" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" dependencies = [ "bytes", "fnv", From fce8abaff49572845d2800356cd4cec8fd3dac83 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Wed, 10 Apr 2024 11:44:47 +0200 Subject: [PATCH 20/25] qe: add json serialization span in binary engine (#4339) The equivalent of https://github.com/prisma/prisma-engines/pull/4154 for the binary engine. Closes: https://github.com/prisma/prisma/issues/21405 --- query-engine/query-engine/src/server/mod.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/query-engine/query-engine/src/server/mod.rs b/query-engine/query-engine/src/server/mod.rs index 01b61a07b6b..a63504cc766 100644 --- a/query-engine/query-engine/src/server/mod.rs +++ b/query-engine/query-engine/src/server/mod.rs @@ -173,7 +173,7 @@ async fn request_handler(cx: Arc, req: Request) -> Result { let handler = RequestHandler::new(cx.executor(), cx.query_schema(), cx.engine_protocol()); - let mut result = handler.handle(body, tx_id, traceparent).instrument(span).await; + let mut result = handler.handle(body, tx_id, traceparent).await; if let telemetry::capturing::Capturer::Enabled(capturer) = &capture_config { let telemetry = capturer.fetch_captures().await; @@ -183,7 +183,8 @@ async fn request_handler(cx: Arc, req: Request) -> Result, req: Request) -> Result Date: Wed, 10 Apr 2024 12:11:58 +0200 Subject: [PATCH 21/25] feat: complete format support (#4818) * feat: complete format support * feat: complete lint support * chore: clippy * feat: ensure format never panics and always returns original datamodel --- Cargo.lock | 2 + prisma-fmt/src/lib.rs | 37 ++++++++++++++---- prisma-fmt/src/lint.rs | 59 +++++++++++++++++++++++++---- prisma-fmt/src/main.rs | 6 +-- prisma-fmt/src/schema_file_input.rs | 7 +--- prisma-schema-wasm/src/lib.rs | 2 +- psl/schema-ast/Cargo.toml | 2 + psl/schema-ast/src/source_file.rs | 12 ++++++ 8 files changed, 104 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3340e7e90fa..da25af4bcf4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4502,6 +4502,8 @@ dependencies = [ "diagnostics", "pest", "pest_derive", + "serde", + "serde_json", ] [[package]] diff --git a/prisma-fmt/src/lib.rs b/prisma-fmt/src/lib.rs index c1449b3b205..8cbca952234 100644 --- a/prisma-fmt/src/lib.rs +++ b/prisma-fmt/src/lib.rs @@ -13,6 +13,7 @@ mod validate; use log::*; use lsp_types::{Position, Range}; use psl::parser_database::ast; +use schema_file_input::SchemaFileInput; /// The API is modelled on an LSP [completion /// request](https://github.com/microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-16.md#textDocument_completion). @@ -45,27 +46,49 @@ pub fn code_actions(schema: String, params: &str) -> String { } /// The two parameters are: -/// - The Prisma schema to reformat, as a string. +/// - The [`SchemaFileInput`] to reformat, as a string. /// - An LSP /// [DocumentFormattingParams](https://github.com/microsoft/language-server-protocol/blob/gh-pages/_specifications/specification-3-16.md#textDocument_formatting) object, as JSON. /// /// The function returns the formatted schema, as a string. +/// If the schema or any of the provided parameters is invalid, the function returns the original schema. +/// This function never panics. /// /// Of the DocumentFormattingParams, we only take into account tabSize, at the moment. -pub fn format(schema: &str, params: &str) -> String { +pub fn format(datamodel: String, params: &str) -> String { + let schema: SchemaFileInput = match serde_json::from_str(&datamodel) { + Ok(params) => params, + Err(_) => { + return datamodel; + } + }; + let params: lsp_types::DocumentFormattingParams = match serde_json::from_str(params) { Ok(params) => params, - Err(err) => { - warn!("Error parsing DocumentFormattingParams params: {}", err); - return schema.to_owned(); + Err(_) => { + return datamodel; } }; - psl::reformat(schema, params.options.tab_size as usize).unwrap_or_else(|| schema.to_owned()) + let indent_width = params.options.tab_size as usize; + + match schema { + SchemaFileInput::Single(single) => psl::reformat(&single, indent_width).unwrap_or(datamodel), + SchemaFileInput::Multiple(multiple) => { + let result = psl::reformat_multiple(multiple, indent_width); + serde_json::to_string(&result).unwrap_or(datamodel) + } + } } pub fn lint(schema: String) -> String { - lint::run(&schema) + let schema: SchemaFileInput = match serde_json::from_str(&schema) { + Ok(params) => params, + Err(serde_err) => { + panic!("Failed to deserialize SchemaFileInput: {serde_err}"); + } + }; + lint::run(schema) } /// Function that throws a human-friendly error message when the schema is invalid, following the JSON formatting diff --git a/prisma-fmt/src/lint.rs b/prisma-fmt/src/lint.rs index a52a8105aff..6ccef59750f 100644 --- a/prisma-fmt/src/lint.rs +++ b/prisma-fmt/src/lint.rs @@ -1,5 +1,7 @@ use psl::diagnostics::{DatamodelError, DatamodelWarning}; +use crate::schema_file_input::SchemaFileInput; + #[derive(serde::Serialize)] pub struct MiniError { start: usize, @@ -8,8 +10,11 @@ pub struct MiniError { is_warning: bool, } -pub(crate) fn run(schema: &str) -> String { - let schema = psl::validate(schema.into()); +pub(crate) fn run(schema: SchemaFileInput) -> String { + let schema = match schema { + SchemaFileInput::Single(file) => psl::validate(file.into()), + SchemaFileInput::Multiple(files) => psl::validate_multi_file(files), + }; let diagnostics = &schema.diagnostics; let mut mini_errors: Vec = diagnostics @@ -45,19 +50,20 @@ fn print_diagnostics(diagnostics: Vec) -> String { #[cfg(test)] mod tests { + use super::SchemaFileInput; use expect_test::expect; use indoc::indoc; - fn lint(s: &str) -> String { - let result = super::run(s); + fn lint(schema: SchemaFileInput) -> String { + let result = super::run(schema); let value: serde_json::Value = serde_json::from_str(&result).unwrap(); serde_json::to_string_pretty(&value).unwrap() } #[test] - fn deprecated_preview_features_should_give_a_warning() { - let dml = indoc! {r#" + fn single_deprecated_preview_features_should_give_a_warning() { + let schema = indoc! {r#" datasource db { provider = "postgresql" url = env("DATABASE_URL") @@ -72,6 +78,45 @@ mod tests { id String @id } "#}; + let datamodel = SchemaFileInput::Single(schema.to_string()); + + let expected = expect![[r#" + [ + { + "start": 149, + "end": 163, + "text": "Preview feature \"createMany\" is deprecated. The functionality can be used without specifying it as a preview feature.", + "is_warning": true + } + ]"#]]; + + expected.assert_eq(&lint(datamodel)); + } + + #[test] + fn multi_deprecated_preview_features_should_give_a_warning() { + let schema1 = indoc! {r#" + datasource db { + provider = "postgresql" + url = env("DATABASE_URL") + } + + generator client { + provider = "prisma-client-js" + previewFeatures = ["createMany"] + } + "#}; + + let schema2 = indoc! {r#" + model A { + id String @id + } + "#}; + + let datamodel = SchemaFileInput::Multiple(vec![ + ("schema1.prisma".to_string(), schema1.into()), + ("schema2.prisma".to_string(), schema2.into()), + ]); let expected = expect![[r#" [ @@ -83,6 +128,6 @@ mod tests { } ]"#]]; - expected.assert_eq(&lint(dml)); + expected.assert_eq(&lint(datamodel)); } } diff --git a/prisma-fmt/src/main.rs b/prisma-fmt/src/main.rs index 5c7a02d917b..fa9c39ad064 100644 --- a/prisma-fmt/src/main.rs +++ b/prisma-fmt/src/main.rs @@ -1,6 +1,6 @@ mod actions; mod format; -mod lint; +// mod lint; mod native; mod preview; @@ -30,7 +30,7 @@ pub struct FormatOpts { /// Prisma Datamodel v2 formatter pub enum FmtOpts { /// Specifies linter mode - Lint, + // Lint, /// Specifies format mode Format(FormatOpts), /// Specifies Native Types mode @@ -46,7 +46,7 @@ pub enum FmtOpts { fn main() { match FmtOpts::from_args() { FmtOpts::DebugPanic => panic!("This is the debugPanic artificial panic"), - FmtOpts::Lint => plug(lint::run), + // FmtOpts::Lint => plug(lint::run), FmtOpts::Format(opts) => format::run(opts), FmtOpts::NativeTypes => plug(native::run), FmtOpts::ReferentialActions => plug(actions::run), diff --git a/prisma-fmt/src/schema_file_input.rs b/prisma-fmt/src/schema_file_input.rs index a7204510ed8..26d3a177c0f 100644 --- a/prisma-fmt/src/schema_file_input.rs +++ b/prisma-fmt/src/schema_file_input.rs @@ -10,17 +10,14 @@ use serde::Deserialize; #[serde(untagged)] pub(crate) enum SchemaFileInput { Single(String), - Multiple(Vec<(String, String)>), + Multiple(Vec<(String, SourceFile)>), } impl From for Vec<(String, SourceFile)> { fn from(value: SchemaFileInput) -> Self { match value { SchemaFileInput::Single(content) => vec![("schema.prisma".to_owned(), content.into())], - SchemaFileInput::Multiple(file_list) => file_list - .into_iter() - .map(|(filename, content)| (filename, content.into())) - .collect(), + SchemaFileInput::Multiple(file_list) => file_list, } } } diff --git a/prisma-schema-wasm/src/lib.rs b/prisma-schema-wasm/src/lib.rs index 43288dd32f5..073f1337724 100644 --- a/prisma-schema-wasm/src/lib.rs +++ b/prisma-schema-wasm/src/lib.rs @@ -29,7 +29,7 @@ fn register_panic_hook() { #[wasm_bindgen] pub fn format(schema: String, params: String) -> String { register_panic_hook(); - prisma_fmt::format(&schema, ¶ms) + prisma_fmt::format(schema, ¶ms) } /// Docs: https://prisma.github.io/prisma-engines/doc/prisma_fmt/fn.get_config.html diff --git a/psl/schema-ast/Cargo.toml b/psl/schema-ast/Cargo.toml index be98278dfbb..0eab4dd05a5 100644 --- a/psl/schema-ast/Cargo.toml +++ b/psl/schema-ast/Cargo.toml @@ -8,3 +8,5 @@ diagnostics = { path = "../diagnostics" } pest = "2.1.3" pest_derive = "2.1.0" +serde.workspace = true +serde_json.workspace = true diff --git a/psl/schema-ast/src/source_file.rs b/psl/schema-ast/src/source_file.rs index 63329ad93c3..b53e2eaa5c1 100644 --- a/psl/schema-ast/src/source_file.rs +++ b/psl/schema-ast/src/source_file.rs @@ -1,11 +1,23 @@ use std::sync::Arc; +use serde::{Deserialize, Deserializer}; + /// A Prisma schema document. #[derive(Debug, Clone)] pub struct SourceFile { contents: Contents, } +impl<'de> Deserialize<'de> for SourceFile { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s: String = serde::de::Deserialize::deserialize(deserializer)?; + Ok(s.into()) + } +} + impl Default for SourceFile { fn default() -> Self { Self { From 6144ce0a93120402191d39552e28375c70a894a4 Mon Sep 17 00:00:00 2001 From: Sophie <29753584+Druue@users.noreply.github.com> Date: Wed, 10 Apr 2024 14:10:59 +0200 Subject: [PATCH 22/25] fix(qe): Add native type `Citext` as invalid batch filter (#4817) * Added DatasourceBuilder * Added support for db extensions in connector_test --- * Added test for prisma/prisma#13534 * Added native type (citext) as an invalid batch filter for postgres * Added `native_type_supports_compacting()` to connector * Added `can_be_compacted` to scalarfield --------- Co-authored-by: Alexey Orlenko Co-authored-by: Flavian Desverne --- .../postgres_datamodel_connector.rs | 9 ++ psl/psl-core/src/datamodel_connector.rs | 4 + .../tests/new/regressions/prisma_15607.rs | 1 + .../queries/batching/select_one_singular.rs | 44 ++++++ .../src/args/connector_test.rs | 21 +++ .../query-test-macros/src/connector_test.rs | 2 + .../src/datamodel_rendering/datasource.rs | 142 ++++++++++++++++++ .../src/datamodel_rendering/mod.rs | 38 ++--- .../query-tests-setup/src/lib.rs | 6 +- query-engine/core/src/query_document/mod.rs | 5 +- .../query-structure/src/field/scalar.rs | 8 + 11 files changed, 260 insertions(+), 20 deletions(-) create mode 100644 query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/datasource.rs diff --git a/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs b/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs index 3bb04eed451..c30041b4052 100644 --- a/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs +++ b/psl/psl-core/src/builtin_connectors/postgres_datamodel_connector.rs @@ -386,6 +386,15 @@ impl Connector for PostgresDatamodelConnector { } } + fn native_type_supports_compacting(&self, nt: Option) -> bool { + let native_type: Option<&PostgresType> = nt.as_ref().map(|nt| nt.downcast_ref()); + + match native_type { + Some(pt) => !matches!(pt, Citext), + None => true, + } + } + fn validate_model(&self, model: walkers::ModelWalker<'_>, _: RelationMode, errors: &mut Diagnostics) { for index in model.indexes() { validations::compatible_native_types(index, self, errors); diff --git a/psl/psl-core/src/datamodel_connector.rs b/psl/psl-core/src/datamodel_connector.rs index cb4d0bc7acd..3607c0e4af1 100644 --- a/psl/psl-core/src/datamodel_connector.rs +++ b/psl/psl-core/src/datamodel_connector.rs @@ -201,6 +201,10 @@ pub trait Connector: Send + Sync { diagnostics: &mut Diagnostics, ) -> Option; + fn native_type_supports_compacting(&self, _: Option) -> bool { + true + } + fn static_join_strategy_support(&self) -> bool { self.capabilities().contains(ConnectorCapability::LateralJoin) || self.capabilities().contains(ConnectorCapability::CorrelatedSubqueries) diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15607.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15607.rs index 3ab34b12010..a4b072256ec 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15607.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/new/regressions/prisma_15607.rs @@ -69,6 +69,7 @@ impl Actor { &[], None, &[], + &[], Some("READ COMMITTED"), ); diff --git a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs index 257620e1bdb..94127531fa6 100644 --- a/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs +++ b/query-engine/connector-test-kit-rs/query-engine-tests/tests/queries/batching/select_one_singular.rs @@ -479,6 +479,50 @@ mod singular_batch { Ok(()) } + fn citext_unique() -> String { + let schema = indoc! { r#" + model User { + #id(id, String, @id) + caseInsensitiveField String @unique @test.Citext + }"# + }; + + schema.to_owned() + } + + // Regression test for https://github.com/prisma/prisma/issues/13534 + #[connector_test(only(Postgres), schema(citext_unique), db_extensions("citext"))] + async fn repro_13534(runner: Runner) -> TestResult<()> { + run_query!( + &runner, + r#"mutation { + createOneUser(data: { id: "9df0f936-51d6-4c55-8e01-5144e588a8a1", caseInsensitiveField: "hello world" }) { id } + }"# + ); + + let queries = vec![ + r#"{ findUniqueUser( + where: { caseInsensitiveField: "HELLO WORLD" } + ) { id, caseInsensitiveField } }"# + .to_string(), + r#"{ findUniqueUser( + where: { caseInsensitiveField: "HELLO WORLD" } + ) { id, caseInsensitiveField } }"# + .to_string(), + ]; + + let (res, compact_doc) = compact_batch(&runner, queries.clone()).await?; + + assert!(!compact_doc.is_compact()); + + insta::assert_snapshot!( + res.to_string(), + @r###"{"batchResult":[{"data":{"findUniqueUser":{"id":"9df0f936-51d6-4c55-8e01-5144e588a8a1","caseInsensitiveField":"hello world"}}},{"data":{"findUniqueUser":{"id":"9df0f936-51d6-4c55-8e01-5144e588a8a1","caseInsensitiveField":"hello world"}}}]}"### + ); + + Ok(()) + } + async fn compact_batch(runner: &Runner, queries: Vec) -> TestResult<(QueryResult, BatchDocument)> { let res = runner.batch(queries.clone(), false, None).await?; diff --git a/query-engine/connector-test-kit-rs/query-test-macros/src/args/connector_test.rs b/query-engine/connector-test-kit-rs/query-test-macros/src/args/connector_test.rs index b59058e71cf..521a2f77eca 100644 --- a/query-engine/connector-test-kit-rs/query-test-macros/src/args/connector_test.rs +++ b/query-engine/connector-test-kit-rs/query-test-macros/src/args/connector_test.rs @@ -35,6 +35,9 @@ pub struct ConnectorTestArgs { #[darling(default)] pub db_schemas: DbSchemas, + + #[darling(default)] + pub db_extensions: DBExtensions, } impl ConnectorTestArgs { @@ -148,6 +151,24 @@ impl darling::FromMeta for DbSchemas { } } +#[derive(Debug, Default)] +pub struct DBExtensions { + db_extensions: Vec, +} + +impl DBExtensions { + pub fn extensions(&self) -> &[String] { + self.db_extensions.as_ref() + } +} + +impl darling::FromMeta for DBExtensions { + fn from_list(items: &[syn::NestedMeta]) -> Result { + let db_extensions = strings_to_list("DbExtensions", items)?; + Ok(Self { db_extensions }) + } +} + impl darling::FromMeta for ExcludeFeatures { fn from_list(items: &[syn::NestedMeta]) -> Result { let features = strings_to_list("Preview Features", items)?; diff --git a/query-engine/connector-test-kit-rs/query-test-macros/src/connector_test.rs b/query-engine/connector-test-kit-rs/query-test-macros/src/connector_test.rs index e963fbccea4..d0058dcb728 100644 --- a/query-engine/connector-test-kit-rs/query-test-macros/src/connector_test.rs +++ b/query-engine/connector-test-kit-rs/query-test-macros/src/connector_test.rs @@ -19,6 +19,7 @@ pub fn connector_test_impl(attr: TokenStream, input: TokenStream) -> TokenStream let excluded_features = args.exclude_features.features(); let db_schemas = args.db_schemas.schemas(); + let db_extensions = args.db_extensions.extensions(); let only = &args.only; let exclude = &args.exclude; let handler = args.schema.unwrap().handler_path; @@ -75,6 +76,7 @@ pub fn connector_test_impl(attr: TokenStream, input: TokenStream) -> TokenStream &[#(#excluded_features),*], #handler, &[#(#db_schemas),*], + &[#(#db_extensions),*], #referential_override, #runner_fn_ident, ); diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/datasource.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/datasource.rs new file mode 100644 index 00000000000..462b069990c --- /dev/null +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/datasource.rs @@ -0,0 +1,142 @@ +use std::iter; + +use indexmap::IndexMap; +use itertools::Itertools; + +pub struct DatasourceBuilder<'a> { + name: &'a str, + properties: IndexMap<&'static str, String>, +} + +impl<'a> DatasourceBuilder<'a> { + pub fn new(name: &'a str) -> Self { + Self { + name, + properties: Default::default(), + } + } + + pub fn provider(mut self, provider: impl AsRef) -> Self { + self.add_debug("provider", provider.as_ref()); + self + } + + pub fn url(mut self, url: impl AsRef) -> Self { + self.add_debug("url", url.as_ref()); + self + } + + pub fn relation_mode(mut self, relation_mode: impl AsRef) -> Self { + self.add_debug("relationMode", relation_mode.as_ref()); + self + } + + pub fn schemas(mut self, schemas: &[&str]) -> Self { + self.add_debug("schemas", schemas); + self + } + + pub fn schemas_if_not_empty(self, schemas: &[&str]) -> Self { + if schemas.is_empty() { + self + } else { + self.schemas(schemas) + } + } + + pub fn extensions(mut self, extensions: &[&str]) -> Self { + self.properties + .insert("extensions", format!("[{}]", extensions.iter().join(", "))); + self + } + + pub fn extensions_if_not_empty(self, extensions: &[&str]) -> Self { + if extensions.is_empty() { + self + } else { + self.extensions(extensions) + } + } + + pub fn render(self) -> String { + iter::once(format!("datasource {} {{", self.name)) + .chain(self.properties.into_iter().map(|(k, v)| format!(" {k} = {v}"))) + .chain(iter::once("}\n".into())) + .join("\n") + } + + fn add_debug(&mut self, key: &'static str, value: impl std::fmt::Debug) { + self.properties.insert(key, format!("{:?}", value)); + } +} + +#[cfg(test)] +mod test { + use indoc::indoc; + + use super::DatasourceBuilder; + + #[test] + fn all() { + let datasource = DatasourceBuilder::new("test") + .provider("postgresql") + .url("postgres://test") + .relation_mode("foreignKeys") + .schemas(&["public"]) + .extensions(&["citext", r#"postgis(version: "2.1")"#]) + .render(); + + assert_eq!( + datasource, + indoc! { + r#" + datasource test { + provider = "postgresql" + url = "postgres://test" + relationMode = "foreignKeys" + schemas = ["public"] + extensions = [citext, postgis(version: "2.1")] + } + "# + } + ) + } + + #[test] + fn partial_mixed() { + let datasource = DatasourceBuilder::new("db") + .url("mysql://test") + .provider("mysql") + .render(); + + assert_eq!( + datasource, + indoc! { + r#" + datasource db { + url = "mysql://test" + provider = "mysql" + } + "# + } + ) + } + + #[test] + fn skip_empty_arrays() { + let datasource = DatasourceBuilder::new("invalid") + .schemas_if_not_empty(&[]) + .extensions_if_not_empty(&[]) + .render(); + + assert_eq!( + datasource, + indoc! { + r#" + datasource invalid { + } + "# + } + ) + } +} diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/mod.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/mod.rs index 7295972f981..6e2ba4b2c7b 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/mod.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/datamodel_rendering/mod.rs @@ -1,10 +1,14 @@ +mod datasource; mod mongodb_renderer; mod sql_renderer; pub use mongodb_renderer::*; pub use sql_renderer::*; -use crate::{connection_string, templating, DatamodelFragment, IdFragment, M2mFragment, CONFIG}; +use crate::{ + connection_string, datamodel_rendering::datasource::DatasourceBuilder, templating, DatamodelFragment, IdFragment, + M2mFragment, CONFIG, +}; use indoc::indoc; use itertools::Itertools; use once_cell::sync::Lazy; @@ -34,6 +38,7 @@ pub fn render_test_datamodel( excluded_features: &[&str], relation_mode_override: Option, db_schemas: &[&str], + db_extensions: &[&str], isolation_level: Option<&'static str>, ) -> String { let (tag, version) = CONFIG.test_connector().unwrap(); @@ -41,31 +46,30 @@ pub fn render_test_datamodel( let is_multi_schema = !db_schemas.is_empty(); - let schema_def = if is_multi_schema { - format!("schemas = {db_schemas:?}") - } else { - String::default() - }; + let datasource = DatasourceBuilder::new("test") + .provider(tag.datamodel_provider()) + .url(connection_string( + &CONFIG, + &version, + test_database, + is_multi_schema, + isolation_level, + )) + .relation_mode(relation_mode_override.unwrap_or_else(|| tag.relation_mode().to_string())) + .schemas_if_not_empty(db_schemas) + .extensions_if_not_empty(db_extensions) + .render(); let datasource_with_generator = format!( indoc! {r#" - datasource test {{ - provider = "{}" - url = "{}" - relationMode = "{}" - {} - }} + {} generator client {{ provider = "prisma-client-js" previewFeatures = [{}] }} "#}, - tag.datamodel_provider(), - connection_string(&CONFIG, &version, test_database, is_multi_schema, isolation_level), - relation_mode_override.unwrap_or_else(|| tag.relation_mode().to_string()), - schema_def, - preview_features + datasource, preview_features ); let renderer = tag.datamodel_renderer(); diff --git a/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs b/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs index d7dbd0f5389..de7020ce3ec 100644 --- a/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs +++ b/query-engine/connector-test-kit-rs/query-tests-setup/src/lib.rs @@ -159,7 +159,7 @@ fn run_relation_link_test_impl( continue; } - let datamodel = render_test_datamodel(&test_db_name, template, &[], None, Default::default(), None); + let datamodel = render_test_datamodel(&test_db_name, template, &[], None, Default::default(), Default::default(), None); let (connector_tag, version) = CONFIG.test_connector().unwrap(); let metrics = setup_metrics(); let metrics_for_subscriber = metrics.clone(); @@ -215,6 +215,7 @@ pub fn run_connector_test( excluded_features: &[&str], handler: fn() -> String, db_schemas: &[&str], + db_extensions: &[&str], referential_override: Option, test_fn: T, ) where @@ -235,6 +236,7 @@ pub fn run_connector_test( excluded_features, handler, db_schemas, + db_extensions, referential_override, &boxify(test_fn), ) @@ -250,6 +252,7 @@ fn run_connector_test_impl( excluded_features: &[&str], handler: fn() -> String, db_schemas: &[&str], + db_extensions: &[&str], referential_override: Option, test_fn: &dyn Fn(Runner) -> BoxFuture<'static, TestResult<()>>, ) { @@ -266,6 +269,7 @@ fn run_connector_test_impl( excluded_features, referential_override, db_schemas, + db_extensions, None, ); let (connector_tag, version) = CONFIG.test_connector().unwrap(); diff --git a/query-engine/core/src/query_document/mod.rs b/query-engine/core/src/query_document/mod.rs index 575e3074df2..5ebe3cc9eed 100644 --- a/query-engine/core/src/query_document/mod.rs +++ b/query-engine/core/src/query_document/mod.rs @@ -73,6 +73,7 @@ impl BatchDocument { /// Those filters are: /// - non scalar filters (ie: relation filters, boolean operators...) /// - any scalar filters that is not `EQUALS` + /// - nativetypes (citext) fn invalid_compact_filter(op: &Operation, schema: &QuerySchema) -> bool { if !op.is_find_unique(schema) { return true; @@ -87,10 +88,10 @@ impl BatchDocument { ArgumentValue::Object(_) if resolve_compound_field(key, &model).is_some() => false, // Otherwise, we just look for a scalar field inside the model. If it's not one, then we break. val => match model.fields().find_from_scalar(key) { - Ok(_) => match val { + Ok(sf) => match val { // Consider scalar _only_ if the filter object contains "equals". eg: `{ scalar_field: { equals: 1 } }` ArgumentValue::Object(obj) => !obj.contains_key(filters::EQUALS), - _ => false, + _ => !sf.can_be_compacted(), }, Err(_) => true, }, diff --git a/query-engine/query-structure/src/field/scalar.rs b/query-engine/query-structure/src/field/scalar.rs index c03ada0a9b7..8aeb9c7f47a 100644 --- a/query-engine/query-structure/src/field/scalar.rs +++ b/query-engine/query-structure/src/field/scalar.rs @@ -178,6 +178,14 @@ impl ScalarField { }) } + pub fn can_be_compacted(&self) -> bool { + let connector = self.dm.schema.connector; + + let nt = self.native_type().map(|nt| nt.native_type); + + connector.native_type_supports_compacting(nt) + } + pub fn parse_json_datetime(&self, value: &str) -> chrono::ParseResult> { let nt = self.native_type().map(|nt| nt.native_type); let connector = self.dm.schema.connector; From 8b36be322cc78c7e156e98b8966fb34e5ea04db9 Mon Sep 17 00:00:00 2001 From: Alexey Orlenko Date: Thu, 11 Apr 2024 10:41:05 +0200 Subject: [PATCH 23/25] Revert "qe: add json serialization span in binary engine (#4339)" (#4824) This reverts commit fce8abaff49572845d2800356cd4cec8fd3dac83. --- query-engine/query-engine/src/server/mod.rs | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/query-engine/query-engine/src/server/mod.rs b/query-engine/query-engine/src/server/mod.rs index a63504cc766..01b61a07b6b 100644 --- a/query-engine/query-engine/src/server/mod.rs +++ b/query-engine/query-engine/src/server/mod.rs @@ -173,7 +173,7 @@ async fn request_handler(cx: Arc, req: Request) -> Result { let handler = RequestHandler::new(cx.executor(), cx.query_schema(), cx.engine_protocol()); - let mut result = handler.handle(body, tx_id, traceparent).await; + let mut result = handler.handle(body, tx_id, traceparent).instrument(span).await; if let telemetry::capturing::Capturer::Enabled(capturer) = &capture_config { let telemetry = capturer.fetch_captures().await; @@ -183,8 +183,7 @@ async fn request_handler(cx: Arc, req: Request) -> Result, req: Request) -> Result Date: Thu, 11 Apr 2024 14:29:09 +0100 Subject: [PATCH 24/25] fix: make `getFirstNonEmpty` actually filter out empty results (#4820) --- query-engine/core/src/interpreter/interpreter_impl.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/query-engine/core/src/interpreter/interpreter_impl.rs b/query-engine/core/src/interpreter/interpreter_impl.rs index 235536fe2ad..be80d4b4b28 100644 --- a/query-engine/core/src/interpreter/interpreter_impl.rs +++ b/query-engine/core/src/interpreter/interpreter_impl.rs @@ -267,8 +267,9 @@ impl<'conn> QueryInterpreter<'conn> { .find_map(|binding_name| { env.get(&binding_name) .map(|_| env.clone().remove(&binding_name).unwrap()) + .filter(|result| !matches!(result, ExpressionResult::Empty)) }) - .unwrap()) + .unwrap_or(ExpressionResult::Empty)) }), Expression::If { From 6d35870e9578a3e7406168f61f89f84e78c1b38d Mon Sep 17 00:00:00 2001 From: Nikita Lapkov <5737185+laplab@users.noreply.github.com> Date: Thu, 11 Apr 2024 14:30:04 +0100 Subject: [PATCH 25/25] fix: make expression log syntax easier to understand (#4819) --- .../core/src/interpreter/interpreter_impl.rs | 49 +++++++++++++------ 1 file changed, 34 insertions(+), 15 deletions(-) diff --git a/query-engine/core/src/interpreter/interpreter_impl.rs b/query-engine/core/src/interpreter/interpreter_impl.rs index be80d4b4b28..012bbc953b1 100644 --- a/query-engine/core/src/interpreter/interpreter_impl.rs +++ b/query-engine/core/src/interpreter/interpreter_impl.rs @@ -184,21 +184,32 @@ impl<'conn> QueryInterpreter<'conn> { Expression::Func { func } => { let expr = func(env.clone()); - Box::pin(async move { self.interpret(expr?, env, level, trace_id).await }) + Box::pin(async move { + self.log_line(level, || "execute {"); + let result = self.interpret(expr?, env, level + 1, trace_id).await; + self.log_line(level, || "}"); + result + }) } - Expression::Sequence { seq } if seq.is_empty() => Box::pin(async { Ok(ExpressionResult::Empty) }), + Expression::Sequence { seq } if seq.is_empty() => Box::pin(async move { + self.log_line(level, || "[]"); + Ok(ExpressionResult::Empty) + }), Expression::Sequence { seq } => { Box::pin(async move { - self.log_line(level, || "SEQ"); + self.log_line(level, || "["); let mut results = Vec::with_capacity(seq.len()); for expr in seq { results.push(self.interpret(expr, env.clone(), level + 1, trace_id.clone()).await?); + self.log_line(level + 1, || ","); } + self.log_line(level, || "]"); + // Last result gets returned Ok(results.pop().unwrap()) }) @@ -210,15 +221,17 @@ impl<'conn> QueryInterpreter<'conn> { } => { Box::pin(async move { let mut inner_env = env.clone(); - self.log_line(level, || "LET"); + self.log_line(level, || "let"); for binding in bindings { - self.log_line(level + 1, || format!("bind {} ", &binding.name)); + self.log_line(level + 1, || format!("{} = {{", &binding.name)); let result = self .interpret(binding.expr, env.clone(), level + 2, trace_id.clone()) .await?; inner_env.insert(binding.name, result); + + self.log_line(level + 1, || "},"); } // the unwrapping improves the readability of the log significantly @@ -228,14 +241,17 @@ impl<'conn> QueryInterpreter<'conn> { Expression::Sequence { seq: expressions } }; - self.interpret(next_expression, inner_env, level + 1, trace_id).await + self.log_line(level, || "in {"); + let result = self.interpret(next_expression, inner_env, level + 1, trace_id).await; + self.log_line(level, || "}"); + result }) } Expression::Query { query } => Box::pin(async move { match *query { Query::Read(read) => { - self.log_line(level, || format!("READ {read}")); + self.log_line(level, || format!("readExecute {read}")); let span = info_span!("prisma:engine:read-execute"); Ok(read::execute(self.conn, read, None, trace_id) .instrument(span) @@ -244,7 +260,7 @@ impl<'conn> QueryInterpreter<'conn> { } Query::Write(write) => { - self.log_line(level, || format!("WRITE {write}")); + self.log_line(level, || format!("writeExecute {write}")); let span = info_span!("prisma:engine:write-execute"); Ok(write::execute(self.conn, write, trace_id) .instrument(span) @@ -255,12 +271,12 @@ impl<'conn> QueryInterpreter<'conn> { }), Expression::Get { binding_name } => Box::pin(async move { - self.log_line(level, || format!("GET {binding_name}")); + self.log_line(level, || format!("getVariable {binding_name}")); env.clone().remove(&binding_name) }), Expression::GetFirstNonEmpty { binding_names } => Box::pin(async move { - self.log_line(level, || format!("GET FIRST NON EMPTY {binding_names:?}")); + self.log_line(level, || format!("getFirstNonEmpty {binding_names:?}")); Ok(binding_names .into_iter() @@ -277,19 +293,22 @@ impl<'conn> QueryInterpreter<'conn> { then, else_: elze, } => Box::pin(async move { - self.log_line(level, || "IF"); + let predicate = func(); + self.log_line(level, || format!("if = {predicate} {{")); - if func() { + let result = if predicate { self.interpret(Expression::Sequence { seq: then }, env, level + 1, trace_id) .await } else { self.interpret(Expression::Sequence { seq: elze }, env, level + 1, trace_id) .await - } + }; + self.log_line(level, || "}"); + result }), Expression::Return { result } => Box::pin(async move { - self.log_line(level, || "RETURN"); + self.log_line(level, || "returnValue"); Ok(*result) }), } @@ -298,7 +317,7 @@ impl<'conn> QueryInterpreter<'conn> { pub(crate) fn log_output(&self) -> String { let mut output = String::with_capacity(self.log.len() * 30); - for s in self.log.iter().rev() { + for s in self.log.iter() { output.push_str(s) }