Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(schema-engine): add support for multiple split schema files #4809

Closed
wants to merge 17 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
8 changes: 8 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions Cargo.toml
Expand Up @@ -63,6 +63,7 @@ napi = { version = "2.15.1", default-features = false, features = [
"serde-json",
] }
napi-derive = "2.15.0"
nonempty = "0.10.0"
js-sys = { version = "0.3" }
rand = { version = "0.8" }
serde_repr = { version = "0.1.17" }
Expand Down
42 changes: 29 additions & 13 deletions libs/test-cli/src/main.rs
Expand Up @@ -196,21 +196,24 @@ async fn main() -> anyhow::Result<()> {
);
}

let schema = if let Some(file_path) = file_path {
read_datamodel_from_file(&file_path)?
} else if let Some(url) = url {
minimal_schema_from_url(&url)?
let schema = if let Some(file_path) = &file_path {
read_datamodel_from_file(file_path)?
} else if let Some(url) = &url {
minimal_schema_from_url(url)?
} else {
unreachable!()
};

let api = schema_core::schema_api(Some(schema.clone()), None)?;

let params = IntrospectParams {
schema,
schemas: vec![SchemaContainer {
file_path: file_path.unwrap_or_else(|| "schema.prisma".to_string()),
schema,
}],
force: false,
composite_type_depth: composite_type_depth.unwrap_or(0),
schemas: None,
namespaces: None,
};

let introspected = api.introspect(params).await.map_err(|err| anyhow::anyhow!("{err:?}"))?;
Expand Down Expand Up @@ -240,7 +243,10 @@ async fn main() -> anyhow::Result<()> {
let api = schema_core::schema_api(Some(schema.clone()), None)?;

api.create_database(CreateDatabaseParams {
datasource: DatasourceParam::SchemaString(SchemaContainer { schema }),
datasource: DatasourceParam::SchemaString(vec![SchemaContainer {
file_path: "schema.prisma".to_string(),
schema,
}]),
})
.await?;
}
Expand All @@ -252,7 +258,10 @@ async fn main() -> anyhow::Result<()> {

let input = CreateMigrationInput {
migrations_directory_path: cmd.migrations_path,
prisma_schema,
schemas: vec![SchemaContainer {
file_path: cmd.schema_path,
schema: prisma_schema,
}],
migration_name: cmd.name,
draft: true,
};
Expand Down Expand Up @@ -315,10 +324,13 @@ async fn generate_dmmf(cmd: &DmmfCommand) -> anyhow::Result<()> {
let api = schema_core::schema_api(Some(skeleton.clone()), None)?;

let params = IntrospectParams {
schema: skeleton,
schemas: vec![SchemaContainer {
file_path: "schema.prisma".to_string(),
schema: skeleton,
}],
force: false,
composite_type_depth: -1,
schemas: None,
namespaces: None,
};

let introspected = api.introspect(params).await.map_err(|err| anyhow::anyhow!("{err:?}"))?;
Expand Down Expand Up @@ -355,7 +367,10 @@ async fn schema_push(cmd: &SchemaPush) -> anyhow::Result<()> {

let response = api
.schema_push(SchemaPushInput {
schema,
schemas: vec![SchemaContainer {
file_path: cmd.schema_path.clone(),
schema,
}],
force: cmd.force,
})
.await?;
Expand Down Expand Up @@ -414,9 +429,10 @@ async fn migrate_diff(cmd: &MigrateDiff) -> anyhow::Result<()> {

let api = schema_core::schema_api(None, Some(Arc::new(DiffHost)))?;
let to = if let Some(to_schema_datamodel) = &cmd.to_schema_datamodel {
DiffTarget::SchemaDatamodel(SchemaContainer {
DiffTarget::SchemaDatamodel(vec![SchemaContainer {
file_path: "schema.prisma".to_string(),
schema: to_schema_datamodel.clone(),
})
}])
} else {
todo!("can't handle {:?} yet", cmd)
};
Expand Down
16 changes: 8 additions & 8 deletions prisma-fmt/src/lib.rs
Expand Up @@ -45,6 +45,14 @@ pub fn code_actions(schema: String, params: &str) -> String {
serde_json::to_string(&actions).unwrap()
}

/// Given a list of Prisma schema files (and their locations), returns the merged schema.
/// This is useful for `@prisma/client` generation, where the client needs a single - potentially large - schema,
/// while still allowing the user to split their schema copies into multiple files.
/// Internally, it uses `[validate]`.
pub fn merge_schemas(params: String) -> String {
merge_schemas::merge_schemas(&params)
}

/// The two parameters are:
/// - The [`SchemaFileInput`] to reformat, as a string.
/// - An LSP
Expand Down Expand Up @@ -114,14 +122,6 @@ pub fn validate(validate_params: String) -> Result<(), String> {
validate::validate(&validate_params)
}

/// Given a list of Prisma schema files (and their locations), returns the merged schema.
/// This is useful for `@prisma/client` generation, where the client needs a single - potentially large - schema,
/// while still allowing the user to split their schema copies into multiple files.
/// Internally, it uses `[validate]`.
pub fn merge_schemas(params: String) -> Result<String, String> {
merge_schemas::merge_schemas(&params)
}

pub fn native_types(schema: String) -> String {
native::run(&schema)
}
Expand Down
19 changes: 12 additions & 7 deletions prisma-fmt/src/merge_schemas.rs
Expand Up @@ -8,20 +8,24 @@ pub struct MergeSchemasParams {
schema: SchemaFileInput,
}

pub(crate) fn merge_schemas(params: &str) -> Result<String, String> {
pub(crate) fn merge_schemas(params: &str) -> String {
let params: MergeSchemasParams = match serde_json::from_str(params) {
Ok(params) => params,
Err(serde_err) => {
panic!("Failed to deserialize MergeSchemasParams: {serde_err}");
}
};

let validated_schema = crate::validate::run(params.schema, false)?;
let params_as_vec: Vec<_> = params.schema.into();
let validated_schema = psl::validate_multi_file(params_as_vec.clone());

let indent_width = 2usize;
let merged_schema = reformat_validated_schema_into_single(validated_schema, indent_width).unwrap();
// diagnostics aren't supposed to have errors, as they should be validated before-hand.
if validated_schema.diagnostics.has_errors() {
panic!("Invalid schemas.");
}

Ok(merged_schema)
let indent_width = 2usize;
reformat_validated_schema_into_single(validated_schema, indent_width).unwrap()
}

#[cfg(test)]
Expand Down Expand Up @@ -81,11 +85,12 @@ mod tests {
}
"#]];

let response = merge_schemas(&request.to_string()).unwrap();
let response = merge_schemas(&request.to_string());
expected.assert_eq(&response);
}

#[test]
#[should_panic]
fn merge_two_invalid_schemas_panics() {
let schema = vec![
(
Expand Down Expand Up @@ -121,7 +126,7 @@ mod tests {
r#"{"error_code":"P1012","message":"\u001b[1;91merror\u001b[0m: \u001b[1mError validating field `a` in model `B`: The relation field `a` on model `B` is missing an opposite relation field on the model `A`. Either run `prisma format` or add it manually.\u001b[0m\n \u001b[1;94m-->\u001b[0m \u001b[4mb.prisma:4\u001b[0m\n\u001b[1;94m | \u001b[0m\n\u001b[1;94m 3 | \u001b[0m id String @id\n\u001b[1;94m 4 | \u001b[0m \u001b[1;91ma A?\u001b[0m\n\u001b[1;94m 5 | \u001b[0m }\n\u001b[1;94m | \u001b[0m\n\nValidation Error Count: 1"}"#
]];

let response = merge_schemas(&request.to_string()).unwrap_err();
let response = merge_schemas(&request.to_string());
expected.assert_eq(&response);
}
}
8 changes: 4 additions & 4 deletions prisma-schema-wasm/src/lib.rs
Expand Up @@ -53,15 +53,15 @@ pub fn lint(input: String) -> String {
}

#[wasm_bindgen]
pub fn validate(params: String) -> Result<(), JsError> {
pub fn merge_schemas(input: String) -> String {
register_panic_hook();
prisma_fmt::validate(params).map_err(|e| JsError::new(&e))
prisma_fmt::merge_schemas(input)
}

#[wasm_bindgen]
pub fn merge_schemas(input: String) -> Result<String, JsError> {
pub fn validate(params: String) -> Result<(), JsError> {
register_panic_hook();
prisma_fmt::merge_schemas(input).map_err(|e| JsError::new(&e))
prisma_fmt::validate(params).map_err(|e| JsError::new(&e))
}

#[wasm_bindgen]
Expand Down
5 changes: 5 additions & 0 deletions psl/parser-database/src/lib.rs
Expand Up @@ -218,6 +218,11 @@ impl ParserDatabase {
self.asts.iter().map(|ast| ast.2.as_str())
}

/// Iterate all source file contents and their file paths.
pub fn iter_file_sources(&self) -> impl Iterator<Item = (&str, &str)> {
self.asts.iter().map(|ast| (ast.1.as_str(), ast.2.as_str()))
}

/// The name of the file.
pub fn file_name(&self, file_id: FileId) -> &str {
self.asts[file_id].0.as_str()
Expand Down
27 changes: 27 additions & 0 deletions psl/psl-core/src/lib.rs
Expand Up @@ -149,6 +149,33 @@ pub fn parse_configuration(
diagnostics.to_result().map(|_| out)
}

/// Loads all configuration blocks from a list of datamodel using the built-in source definitions.
pub fn parse_configuration_multi_file(
files: Vec<(String, SourceFile)>,
connectors: ConnectorRegistry<'_>,
) -> Result<Configuration, diagnostics::Diagnostics> {
assert!(
!files.is_empty(),
"psl::parse_configuration_multi_file() must be called with at least one file"
);

let mut diagnostics = Diagnostics::new();
let db = ParserDatabase::new(files, &mut diagnostics);

// TODO: the bulk of configuration block analysis should be part of ParserDatabase::new().
let mut configuration = Configuration::default();

for ast in db.iter_asts() {
let new_config = validate_configuration(ast, &mut diagnostics, connectors);

configuration.datasources.extend(new_config.datasources.into_iter());
configuration.generators.extend(new_config.generators.into_iter());
configuration.warnings.extend(new_config.warnings.into_iter());
}

diagnostics.to_result().map(|_| configuration)
}

fn validate_configuration(
schema_ast: &ast::SchemaAst,
diagnostics: &mut Diagnostics,
Expand Down
16 changes: 16 additions & 0 deletions psl/psl/src/lib.rs
Expand Up @@ -40,6 +40,12 @@ pub fn parse_configuration(schema: &str) -> Result<Configuration, Diagnostics> {
psl_core::parse_configuration(schema, builtin_connectors::BUILTIN_CONNECTORS)
}

/// Parses and validate a schema, but skip analyzing everything except datasource and generator
/// blocks.
pub fn parse_configuration_multi_file(schemas: Vec<(String, SourceFile)>) -> Result<Configuration, Diagnostics> {
psl_core::parse_configuration_multi_file(schemas, builtin_connectors::BUILTIN_CONNECTORS)
}

/// Parse and analyze a Prisma schema.
pub fn parse_schema(file: impl Into<SourceFile>) -> Result<ValidatedSchema, String> {
let mut schema = validate(file.into());
Expand All @@ -50,6 +56,16 @@ pub fn parse_schema(file: impl Into<SourceFile>) -> Result<ValidatedSchema, Stri
Ok(schema)
}

/// Parse and analyze a Prisma schema.
pub fn parse_schema_multi(files: Vec<(String, SourceFile)>) -> Result<ValidatedSchema, String> {
let mut schema = validate_multi_file(files);
schema
.diagnostics
.to_result()
.map_err(|err| err.to_pretty_string("schema.prisma", schema.db.source_assert_single()))?;
Ok(schema)
}

/// The most general API for dealing with Prisma schemas. It accumulates what analysis and
/// validation information it can, and returns it along with any error and warning diagnostics.
pub fn validate(file: SourceFile) -> ValidatedSchema {
Expand Down
4 changes: 2 additions & 2 deletions psl/schema-ast/src/source_file.rs
Expand Up @@ -3,7 +3,7 @@ use std::sync::Arc;
use serde::{Deserialize, Deserializer};

/// A Prisma schema document.
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct SourceFile {
contents: Contents,
}
Expand Down Expand Up @@ -77,7 +77,7 @@ impl From<String> for SourceFile {
}
}

#[derive(Debug, Clone)]
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
enum Contents {
Static(&'static str),
Allocated(Arc<str>),
Expand Down
6 changes: 5 additions & 1 deletion query-engine/connector-test-kit-rs/qe-setup/src/lib.rs
Expand Up @@ -151,7 +151,11 @@ pub(crate) async fn diff(schema: &str, url: String, connector: &mut dyn SchemaCo
.database_schema_from_diff_target(DiffTarget::Empty, None, None)
.await?;
let to = connector
.database_schema_from_diff_target(DiffTarget::Datamodel(schema.into()), None, None)
.database_schema_from_diff_target(
DiffTarget::Datamodel(vec![("schema.prisma".to_string(), schema.into())]),
None,
None,
)
.await?;
let migration = connector.diff(from, to);
connector.render_script(&migration, &Default::default())
Expand Down
1 change: 1 addition & 0 deletions schema-engine/cli/Cargo.toml
Expand Up @@ -14,6 +14,7 @@ backtrace = "0.3.59"
base64 = "0.13"
jsonrpc-core = "17"
structopt = "0.3.8"
nonempty.workspace = true
serde_json.workspace = true
serde.workspace = true
tokio.workspace = true
Expand Down
2 changes: 1 addition & 1 deletion schema-engine/cli/src/commands.rs
Expand Up @@ -24,7 +24,7 @@ impl Cli {
}

pub(crate) async fn run_inner(self) -> Result<String, ConnectorError> {
let api = schema_core::schema_api(None, None)?;
let api = schema_core::empty_schema_api();
match self.command {
CliCommand::CreateDatabase => {
let schema_core::json_rpc::types::CreateDatabaseResult { database_name } = api
Expand Down