Skip to content

Commit

Permalink
feat: sqlite enums (#5101)
Browse files Browse the repository at this point in the history
* feat: sqlite enums

* test: update capability test and rename a test

* [integration]

* test: make sqlite enum tests actually run

* test: skip remapping introspection tests

* test: fix tests

* test: fix typo

* test: cover invalid enum runtime error

* test: cover mongo runtime error

* doc: explain default comparison between enum and string

* doc: correct mistake
  • Loading branch information
jacek-prisma authored Jan 6, 2025
1 parent c49e56c commit 51db5cf
Show file tree
Hide file tree
Showing 17 changed files with 289 additions and 90 deletions.
2 changes: 1 addition & 1 deletion libs/test-setup/src/test_api_args.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ static DB_UNDER_TEST: Lazy<Result<DbUnderTest, String>> = Lazy::new(|| {
"file" | "sqlite" => Ok(DbUnderTest {
database_url,
tags: Tags::Sqlite.into(),
capabilities: Capabilities::CreateDatabase | Capabilities::Json,
capabilities: Capabilities::CreateDatabase | Capabilities::Enums | Capabilities::Json,
provider: "sqlite",
shadow_database_url,
max_ddl_refresh_delay: None,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ pub const CAPABILITIES: ConnectorCapabilities = enumflags2::make_bitflags!(Conne
Json |
JsonFiltering |
JsonFilteringJsonPath |
AdvancedJsonNullability
AdvancedJsonNullability |
Enums
});

pub struct SqliteDatamodelConnector;
Expand Down
16 changes: 1 addition & 15 deletions psl/psl/tests/capabilities/sqlite.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,21 +19,7 @@ fn enum_support() {
}
"#};

let error = parse_unwrap_err(dml);

let expectation = expect![[r#"
error: Error validating: You defined the enum `Status`. But the current connector does not support enums.
--> schema.prisma:11
 | 
10 | 
11 | enum Status {
12 |  DONE
13 |  NOT_DONE
14 | }
 | 
"#]];

expectation.assert_eq(&error);
assert_valid(dml);
}

#[test]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,84 @@ mod enum_type {
Ok(())
}

#[connector_test(only(Sqlite))]
async fn read_one_invalid_sqlite(runner: Runner) -> TestResult<()> {
runner
.query(r#"mutation { executeRaw(query: "INSERT INTO \"TestModel\" (id, my_enum) VALUES(1, 'D')", parameters: "[]") }"#)
.await?
.assert_success();

match runner.protocol() {
EngineProtocol::Graphql => {
let res = runner
.query(r#"{ findUniqueTestModel(where: { id: 1 }) { my_enum } }"#)
.await?;
res.assert_failure(None, Some("Value 'D' not found in enum 'MyEnum'".to_owned()));
}
EngineProtocol::Json => {
let res = runner
.query_json(
r#"{
"modelName": "TestModel",
"action": "findUnique",
"query": {
"arguments": {
"where": { "id": 1 }
},
"selection": {
"my_enum": true
}
}
}"#,
)
.await?;

res.assert_failure(None, Some("Value 'D' not found in enum 'MyEnum'".to_owned()));
}
}

Ok(())
}

#[connector_test(only(MongoDB))]
async fn read_one_invalid_mongo(runner: Runner) -> TestResult<()> {
runner
.query(r#"mutation { runCommandRaw(command: "{\"insert\": \"TestModel\", \"documents\": [{ \"_id\": 1, \"my_enum\": \"D\"}]}") }"#)
.await?
.assert_success();

match runner.protocol() {
EngineProtocol::Graphql => {
let res = runner
.query(r#"{ findUniqueTestModel(where: { id: 1 }) { my_enum } }"#)
.await?;
res.assert_failure(None, Some("Value 'D' not found in enum 'MyEnum'".to_owned()));
}
EngineProtocol::Json => {
let res = runner
.query_json(
r#"{
"modelName": "TestModel",
"action": "findUnique",
"query": {
"arguments": {
"where": { "id": 1 }
},
"selection": {
"my_enum": true
}
}
}"#,
)
.await?;

res.assert_failure(None, Some("Value 'D' not found in enum 'MyEnum'".to_owned()));
}
}

Ok(())
}

async fn create_test_data(runner: &Runner) -> TestResult<()> {
create_row(runner, r#"{ id: 1, my_enum: A }"#).await?;
create_row(runner, r#"{ id: 2, my_enum: B }"#).await?;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -411,7 +411,9 @@ fn push_column_for_model_enum_scalar_field(
let column = sql::Column {
name: field.database_name().to_owned(),
tpe: sql::ColumnType::pure(
sql::ColumnTypeFamily::Enum(ctx.enum_ids[&r#enum.id]),
ctx.flavour
.column_type_for_enum(r#enum, ctx)
.expect("should have a column type for enum"),
column_arity(field.ast_field().arity),
),
auto_increment: false,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@ use sql_schema_describer::{self as sql, ColumnArity, ColumnType, ColumnTypeFamil
pub(crate) trait SqlSchemaCalculatorFlavour {
fn calculate_enums(&self, _ctx: &mut super::Context<'_>) {}

fn column_type_for_enum(&self, _enm: EnumWalker<'_>, _ctx: &super::Context<'_>) -> Option<sql::ColumnTypeFamily> {
None
}

fn column_default_value_for_autoincrement(&self) -> Option<sql::DefaultValue> {
None
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
use psl::parser_database::walkers::EnumWalker;

use super::{super::Context, SqlSchemaCalculatorFlavour};
use crate::flavour::MysqlFlavour;
use sql_schema_describer as sql;

impl SqlSchemaCalculatorFlavour for MysqlFlavour {
fn calculate_enums(&self, ctx: &mut Context<'_>) {
Expand All @@ -23,4 +26,8 @@ impl SqlSchemaCalculatorFlavour for MysqlFlavour {
}
}
}

fn column_type_for_enum(&self, enm: EnumWalker<'_>, ctx: &Context<'_>) -> Option<sql::ColumnTypeFamily> {
ctx.enum_ids.get(&enm.id).map(|id| sql::ColumnTypeFamily::Enum(*id))
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use either::Either;
use psl::{
builtin_connectors::{cockroach_datamodel_connector::SequenceFunction, PostgresDatasourceProperties},
datamodel_connector::walker_ext_traits::IndexWalkerExt,
parser_database::{IndexAlgorithm, OperatorClass},
parser_database::{walkers::EnumWalker, IndexAlgorithm, OperatorClass},
};
use sql::postgres::DatabaseExtension;
use sql_schema_describer::{self as sql, postgres::PostgresSchemaExt};
Expand All @@ -29,6 +29,10 @@ impl SqlSchemaCalculatorFlavour for PostgresFlavour {
}
}

fn column_type_for_enum(&self, enm: EnumWalker<'_>, ctx: &Context<'_>) -> Option<sql::ColumnTypeFamily> {
ctx.enum_ids.get(&enm.id).map(|id| sql::ColumnTypeFamily::Enum(*id))
}

fn column_default_value_for_autoincrement(&self) -> Option<sql::DefaultValue> {
if self.is_cockroachdb() {
Some(sql::DefaultValue::unique_rowid())
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use super::SqlSchemaCalculatorFlavour;
use crate::flavour::SqliteFlavour;
use crate::{flavour::SqliteFlavour, sql_schema_calculator::Context};
use psl::parser_database::{walkers::*, ScalarType};
use sql_schema_describer::ColumnTypeFamily;

impl SqlSchemaCalculatorFlavour for SqliteFlavour {
// Integer primary keys on SQLite are automatically assigned the rowid, which means they are automatically autoincrementing.
Expand All @@ -12,4 +13,8 @@ impl SqlSchemaCalculatorFlavour for SqliteFlavour {
.unwrap_or(false)
&& field.scalar_type() == Some(ScalarType::Int)
}

fn column_type_for_enum(&self, _enm: EnumWalker<'_>, _ctx: &Context<'_>) -> Option<ColumnTypeFamily> {
Some(ColumnTypeFamily::String)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,16 @@ fn defaults_match(cols: MigrationPair<TableColumnWalker<'_>>, flavour: &dyn SqlF

(Some(DefaultKind::Value(PrismaValue::Int(i))), Some(DefaultKind::Value(PrismaValue::BigInt(j))))
| (Some(DefaultKind::Value(PrismaValue::BigInt(i))), Some(DefaultKind::Value(PrismaValue::Int(j)))) => i == j,

// SQLite introspection recognizes enum defaults as PrismaValue::String since SQLite does
// not support enums natively, while the Prisma schema recognizes them as
// PrismaValue::Enum. In order to avoid generating a diff we need to consider them equal
// if the underlying string values are equal.
(
Some(DefaultKind::Value(PrismaValue::Enum(prev) | PrismaValue::String(prev))),
Some(DefaultKind::Value(PrismaValue::String(next) | PrismaValue::Enum(next))),
) => prev == next && names_match,

(Some(DefaultKind::Value(prev)), Some(DefaultKind::Value(next))) => (prev == next) && names_match,
(Some(DefaultKind::Value(_)), Some(DefaultKind::Now)) => false,
(Some(DefaultKind::Value(_)), None) => false,
Expand Down
6 changes: 3 additions & 3 deletions schema-engine/sql-introspection-tests/tests/enums/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use quaint::prelude::Queryable;
use sql_introspection_tests::{test_api::*, TestResult};
use test_macros::test_connector;

#[test_connector(exclude(CockroachDb), capabilities(Enums))]
#[test_connector(exclude(CockroachDb, Sqlite), capabilities(Enums))]
async fn a_table_with_enums(api: &mut TestApi) -> TestResult {
let sql_family = api.sql_family();

Expand Down Expand Up @@ -67,7 +67,7 @@ async fn a_table_with_enums(api: &mut TestApi) -> TestResult {
Ok(())
}

#[test_connector(exclude(CockroachDb), capabilities(Enums))]
#[test_connector(exclude(CockroachDb, Sqlite), capabilities(Enums))]
async fn a_table_enums_should_return_alphabetically_even_when_in_different_order(api: &mut TestApi) -> TestResult {
let sql_family = api.sql_family();

Expand Down Expand Up @@ -129,7 +129,7 @@ async fn a_table_enums_should_return_alphabetically_even_when_in_different_order
Ok(())
}

#[test_connector(exclude(CockroachDb), capabilities(Enums))]
#[test_connector(exclude(CockroachDb, Sqlite), capabilities(Enums))]
async fn a_table_with_enum_default_values(api: &mut TestApi) -> TestResult {
let sql_family = api.sql_family();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ async fn mapped_field_name(api: &mut TestApi) -> TestResult {
Ok(())
}

#[test_connector(capabilities(Enums), exclude(CockroachDb))]
#[test_connector(capabilities(Enums), exclude(CockroachDb, Sqlite))]
async fn mapped_enum_name(api: &mut TestApi) -> TestResult {
let sql_family = api.sql_family();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ async fn remapping_fields_in_compound_relations(api: &mut TestApi) -> TestResult
Ok(())
}

#[test_connector(capabilities(Enums), exclude(CockroachDb))]
#[test_connector(capabilities(Enums), exclude(CockroachDb, Sqlite))]
async fn remapping_enum_values(api: &mut TestApi) -> TestResult {
let sql_family = api.sql_family();

Expand Down Expand Up @@ -310,7 +310,7 @@ async fn remapping_enum_values(api: &mut TestApi) -> TestResult {
Ok(())
}

#[test_connector(capabilities(Enums), exclude(CockroachDb))]
#[test_connector(capabilities(Enums), exclude(CockroachDb, Sqlite))]
async fn remapping_enum_default_values(api: &mut TestApi) -> TestResult {
let sql_family = api.sql_family();

Expand Down Expand Up @@ -404,7 +404,7 @@ async fn not_automatically_remapping_invalid_compound_unique_key_names(api: &mut

let dm = indoc! {r#"
model User {
id Int @id @default(autoincrement())
id Int @id @default(autoincrement())
first Int
last Int
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use std::borrow::Cow;

use sql_migration_tests::test_api::*;

#[test_connector]
Expand Down Expand Up @@ -137,15 +139,16 @@ fn evaluate_data_loss_with_past_unapplied_migrations_with_destructive_changes_do
"#,
);

let is_mysql = api.is_mysql();
let var_name: &[Cow<'_, str>] = if api.is_mysql() {
&["The values [PLAYFUL] on the enum `Cat_mood` will be removed. If these variants are still used in the database, this will fail.".into()]
} else if api.is_sqlite() {
&[]
} else {
&["The values [PLAYFUL] on the enum `CatMood` will be removed. If these variants are still used in the database, this will fail.".into()]
};
api.evaluate_data_loss(&directory, dm2.clone())
.send()
.assert_warnings(&[if is_mysql {
"The values [PLAYFUL] on the enum `Cat_mood` will be removed. If these variants are still used in the database, this will fail."
} else {
"The values [PLAYFUL] on the enum `CatMood` will be removed. If these variants are still used in the database, this will fail."
}
.into()]);
.assert_warnings(var_name);

api.create_migration("2-remove-value", &dm2, &directory).send_sync();

Expand Down Expand Up @@ -274,14 +277,17 @@ fn evaluate_data_loss_maps_warnings_to_the_right_steps(api: TestApi) {
api.normalize_identifier("Cat")
);

let is_postgres = api.is_postgres();
let expected_warnings = if api.is_postgres() || api.is_sqlite() { 1 } else { 0 };
let expected_unexecutables = if api.is_postgres() { 2 } else { 1 };

#[allow(clippy::bool_to_int_with_if)]
api.evaluate_data_loss(&directory, dm2)
.send()
.assert_warnings_with_indices(&[(warn.into(), if is_postgres { 1 } else { 0 })])
.assert_warnings_with_indices(&[(warn.into(), expected_warnings)])
.assert_unexecutables_with_indices(&[
("Added the required column `isGoodDog` to the `Dog` table without a default value. There are 1 rows in this table, it is not possible to execute this step.".into(), if is_postgres { 2 } else { 1 }),
("Added the required column `isGoodDog` to the `Dog` table without a default value. There are 1 rows in this table, it is not possible to execute this step.".into(),
expected_unexecutables
),
]);
}

Expand Down Expand Up @@ -338,13 +344,15 @@ fn evaluate_data_loss_multi_file_maps_warnings_to_the_right_steps(api: TestApi)
api.normalize_identifier("Cat")
);

let is_postgres = api.is_postgres();

let expected_warnings = if api.is_postgres() || api.is_sqlite() { 1 } else { 0 };
let expected_unexecutables = if api.is_postgres() { 2 } else { 1 };
#[allow(clippy::bool_to_int_with_if)]
api.evaluate_data_loss_multi_file(&directory, &[("schema_a", &schema_a), ("schema_b", schema_b)])
.send()
.assert_warnings_with_indices(&[(warn.into(), if is_postgres { 1 } else { 0 })])
.assert_warnings_with_indices(&[(warn.into(), expected_warnings)])
.assert_unexecutables_with_indices(&[
("Added the required column `isGoodDog` to the `Dog` table without a default value. There are 1 rows in this table, it is not possible to execute this step.".into(), if is_postgres { 2 } else { 1 }),
("Added the required column `isGoodDog` to the `Dog` table without a default value. There are 1 rows in this table, it is not possible to execute this step.".into(),
expected_unexecutables
),
]);
}
Loading

0 comments on commit 51db5cf

Please sign in to comment.