Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We鈥檒l occasionally send you account related emails.

Already on GitHub? Sign in to your account

qe: replace InternalDatamodelBuilder with a simple fn #3182

Merged
merged 2 commits into from Sep 13, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
@@ -1,5 +1,4 @@
use crate::{ConnectorTag, RunnerInterface, TestResult, TxResult};
use prisma_models::InternalDataModelBuilder;
use query_core::{executor, schema::QuerySchemaRef, schema_builder, QueryExecutor, TxId};
use query_engine_metrics::MetricRegistry;
use request_handlers::{GraphQlBody, GraphQlHandler, MultiQuery};
Expand All @@ -24,7 +23,7 @@ impl RunnerInterface for DirectRunner {
let preview_features: Vec<_> = schema.configuration.preview_features().iter().collect();
let url = data_source.load_url(|key| env::var(key).ok()).unwrap();
let (db_name, executor) = executor::load(data_source, &preview_features, &url).await?;
let internal_data_model = InternalDataModelBuilder::new(&schema).build(db_name);
let internal_data_model = prisma_models::convert(&schema, db_name);

let query_schema: QuerySchemaRef = Arc::new(schema_builder::build(
internal_data_model,
Expand Down
@@ -1,8 +1,4 @@
use super::{
expression::{self},
into_expression::IntoUpdateExpression,
};

use super::{expression, into_expression::IntoUpdateExpression};
use connector_interface::{FieldPath, Filter};
use mongodb::bson::{doc, Document};

Expand Down
3 changes: 1 addition & 2 deletions query-engine/dmmf/src/lib.rs
Expand Up @@ -4,7 +4,6 @@ mod serialization_ast;
pub use serialization_ast::DataModelMetaFormat;

use ast_builders::{schema_to_dmmf, DmmfQuerySchemaRenderer};
use prisma_models::InternalDataModelBuilder;
use schema::{QuerySchemaRef, QuerySchemaRenderer};
use std::sync::Arc;

Expand All @@ -22,7 +21,7 @@ pub fn dmmf_from_schema(schema: &str) -> DataModelMetaFormat {
// We only support one data source at the moment, so take the first one (default not exposed yet).
let data_source = config.datasources.first().unwrap();
let preview_features: Vec<_> = config.preview_features().iter().collect();
let internal_data_model = InternalDataModelBuilder::from(&dml).build("dummy".to_owned());
let internal_data_model = prisma_models::convert(&schema, "dummy".to_owned());

// Construct query schema
let query_schema = Arc::new(schema_builder::build(
Expand Down
89 changes: 12 additions & 77 deletions query-engine/prisma-models/src/builders/internal_dm_builder.rs
@@ -1,84 +1,19 @@
use super::{
build_composites, field_builders::RelationFieldBuilder, relation_builder::RelationBuilder, CompositeTypeBuilder,
FieldBuilder, IndexBuilder, ModelBuilder, PrimaryKeyBuilder,
field_builders::RelationFieldBuilder, relation_builder::RelationBuilder, CompositeTypeBuilder, FieldBuilder,
IndexBuilder, ModelBuilder, PrimaryKeyBuilder,
};
use crate::{
builders::{CompositeFieldBuilder, ScalarFieldBuilder},
extensions::*,
IndexType, InlineRelation, InternalDataModel, InternalDataModelRef, InternalEnum, InternalEnumValue,
RelationLinkManifestation, RelationSide, RelationTable, TypeIdentifier,
IndexType, InlineRelation, InternalEnum, InternalEnumValue, RelationLinkManifestation, RelationSide, RelationTable,
TypeIdentifier,
};
use once_cell::sync::OnceCell;
use psl::dml::{self, CompositeTypeFieldType, Datamodel, Ignorable, WithDatabaseName};
use std::sync::Arc;

#[derive(Debug, Default)]
pub struct InternalDataModelBuilder {
pub models: Vec<ModelBuilder>,
pub relations: Vec<RelationBuilder>,
pub enums: Vec<InternalEnum>,
pub composite_types: Vec<CompositeTypeBuilder>,
}

impl InternalDataModelBuilder {
pub fn new(datamodel: &psl::ValidatedSchema) -> Self {
let datamodel = psl::lift(datamodel);

Self::from(&datamodel)
}

pub fn build(self, db_name: String) -> InternalDataModelRef {
let internal_data_model = Arc::new(InternalDataModel {
models: OnceCell::new(),
composite_types: OnceCell::new(),
relations: OnceCell::new(),
relation_fields: OnceCell::new(),
db_name,
enums: self.enums.into_iter().map(Arc::new).collect(),
});

let composite_types = build_composites(self.composite_types, Arc::downgrade(&internal_data_model));
internal_data_model.composite_types.set(composite_types).unwrap();

let models = self
.models
.into_iter()
.map(|mt| {
mt.build(
Arc::downgrade(&internal_data_model),
internal_data_model.composite_types.get().unwrap(),
)
})
.collect();

internal_data_model.models.set(models).unwrap();

let relations = self
.relations
.into_iter()
.map(|rt| rt.build(Arc::downgrade(&internal_data_model)))
.collect();

internal_data_model.relations.set(relations).unwrap();
internal_data_model.finalize();
internal_data_model
}
}

impl From<&dml::Datamodel> for InternalDataModelBuilder {
fn from(datamodel: &dml::Datamodel) -> Self {
let relation_placeholders = relation_placeholders(datamodel);

Self {
models: model_builders(datamodel, &relation_placeholders),
relations: relation_builders(&relation_placeholders),
enums: convert_enums(datamodel),
composite_types: composite_type_builders(datamodel),
}
}
}

fn model_builders(datamodel: &Datamodel, relation_placeholders: &[RelationPlaceholder]) -> Vec<ModelBuilder> {
pub(crate) fn model_builders(
datamodel: &Datamodel,
relation_placeholders: &[RelationPlaceholder],
) -> Vec<ModelBuilder> {
datamodel
.models()
.filter(|model| !model.is_ignored())
Expand Down Expand Up @@ -201,7 +136,7 @@ fn composite_field_builders(datamodel: &Datamodel, composite: &dml::CompositeTyp
.collect()
}

fn relation_builders(placeholders: &[RelationPlaceholder]) -> Vec<RelationBuilder> {
pub(crate) fn relation_builders(placeholders: &[RelationPlaceholder]) -> Vec<RelationBuilder> {
placeholders
.iter()
.filter(|r| r.model_a.is_relation_supported(&r.field_a) && r.model_b.is_relation_supported(&r.field_b))
Expand Down Expand Up @@ -245,7 +180,7 @@ fn pk_builder(model: &dml::Model) -> Option<PrimaryKeyBuilder> {
})
}

fn composite_type_builders(datamodel: &Datamodel) -> Vec<CompositeTypeBuilder> {
pub(crate) fn composite_type_builders(datamodel: &Datamodel) -> Vec<CompositeTypeBuilder> {
datamodel
.composite_types
.iter()
Expand All @@ -256,7 +191,7 @@ fn composite_type_builders(datamodel: &Datamodel) -> Vec<CompositeTypeBuilder> {
.collect()
}

fn convert_enums(datamodel: &Datamodel) -> Vec<InternalEnum> {
pub(crate) fn convert_enums(datamodel: &Datamodel) -> Vec<InternalEnum> {
datamodel
.enums()
.map(|e| InternalEnum {
Expand All @@ -276,7 +211,7 @@ fn convert_enum_values(enm: &dml::Enum) -> Vec<InternalEnumValue> {
}

/// Calculates placeholders that are used to compute builders dependent on some relation information being present already.
fn relation_placeholders(datamodel: &dml::Datamodel) -> Vec<RelationPlaceholder> {
pub(crate) fn relation_placeholders(datamodel: &dml::Datamodel) -> Vec<RelationPlaceholder> {
let mut result = Vec::new();

for model in datamodel.models().filter(|model| !model.is_ignored) {
Expand Down
45 changes: 45 additions & 0 deletions query-engine/prisma-models/src/convert.rs
@@ -0,0 +1,45 @@
use crate::{builders, InternalDataModel, InternalDataModelRef};
use once_cell::sync::OnceCell;
use std::sync::Arc;

pub fn convert(schema: &psl::ValidatedSchema, db_name: String) -> InternalDataModelRef {
let datamodel = psl::lift(schema);

let relation_placeholders = builders::relation_placeholders(&datamodel);
let models = builders::model_builders(&datamodel, &relation_placeholders);
let relations = builders::relation_builders(&relation_placeholders);
let enums = builders::convert_enums(&datamodel);
let composite_types = builders::composite_type_builders(&datamodel);
let internal_data_model = Arc::new(InternalDataModel {
models: OnceCell::new(),
composite_types: OnceCell::new(),
relations: OnceCell::new(),
relation_fields: OnceCell::new(),
db_name,
enums: enums.into_iter().map(Arc::new).collect(),
});

let composite_types = builders::build_composites(composite_types, Arc::downgrade(&internal_data_model));
internal_data_model.composite_types.set(composite_types).unwrap();

let models = models
.into_iter()
.map(|mt| {
mt.build(
Arc::downgrade(&internal_data_model),
internal_data_model.composite_types.get().unwrap(),
)
})
.collect();

internal_data_model.models.set(models).unwrap();

let relations = relations
.into_iter()
.map(|rt| rt.build(Arc::downgrade(&internal_data_model)))
.collect();

internal_data_model.relations.set(relations).unwrap();
internal_data_model.finalize();
internal_data_model
}
3 changes: 2 additions & 1 deletion query-engine/prisma-models/src/lib.rs
Expand Up @@ -5,6 +5,7 @@

mod builders;
mod composite_type;
mod convert;
mod error;
mod extensions;
mod field;
Expand All @@ -25,8 +26,8 @@ mod selection_result;
pub mod pk;
pub mod prelude;

pub use builders::InternalDataModelBuilder;
pub use composite_type::*;
pub use convert::convert;
pub use error::*;
pub use field::*;
pub use field_selection::*;
Expand Down
Expand Up @@ -469,7 +469,7 @@ fn duplicate_relation_name() {
"#;

let dml = psl::parse_schema_parserdb(schema).unwrap();
InternalDataModelBuilder::from(&psl::lift(&dml)).build(String::new());
prisma_models::convert(&dml, String::new());
}

#[test]
Expand All @@ -492,8 +492,7 @@ fn implicit_many_to_many_relation() {

fn convert(datamodel: &str) -> Arc<InternalDataModel> {
let schema = psl::parse_schema_parserdb(datamodel).unwrap();
let builder = InternalDataModelBuilder::new(&schema);
builder.build("not_important".to_string())
prisma_models::convert(&schema, "not_important".to_string())
}

trait DatamodelAssertions {
Expand Down
42 changes: 11 additions & 31 deletions query-engine/query-engine-node-api/src/engine.rs
@@ -1,7 +1,6 @@
use crate::{error::ApiError, log_callback::LogCallback, logger::Logger};
use futures::FutureExt;
use prisma_models::InternalDataModelBuilder;
use psl::{common::preview_features::PreviewFeature, dml::Datamodel};
use psl::common::preview_features::PreviewFeature;
use query_core::{
executor,
schema::{QuerySchema, QuerySchemaRenderer},
Expand Down Expand Up @@ -42,24 +41,16 @@ enum Inner {
Connected(ConnectedEngine),
}

/// Holding the information to reconnect the engine if needed.
#[derive(Debug, Clone)]
struct EngineDatamodel {
ast: Datamodel,
raw: String,
}

/// Everything needed to connect to the database and have the core running.
struct EngineBuilder {
datamodel: EngineDatamodel,
config: psl::Configuration,
schema: Arc<psl::ValidatedSchema>,
config_dir: PathBuf,
env: HashMap<String, String>,
}

/// Internal structure for querying and reconnecting with the engine.
struct ConnectedEngine {
datamodel: EngineDatamodel,
schema: Arc<psl::ValidatedSchema>,
query_schema: Arc<QuerySchema>,
executor: crate::Executor,
config_dir: PathBuf,
Expand Down Expand Up @@ -178,16 +169,8 @@ impl QueryEngine {
let enable_metrics = config.preview_features().contains(PreviewFeature::Metrics);
let enable_tracing = config.preview_features().contains(PreviewFeature::Tracing);

let ast = psl::lift(&schema);

let datamodel = EngineDatamodel {
ast,
raw: schema.db.source().to_owned(),
};

let builder = EngineBuilder {
datamodel,
config: schema.configuration,
schema: Arc::new(schema),
config_dir,
env,
};
Expand Down Expand Up @@ -231,22 +214,23 @@ impl QueryEngine {
let engine = async move {
// We only support one data source & generator at the moment, so take the first one (default not exposed yet).
let data_source = builder
.config
.schema
.configuration
.datasources
.first()
.ok_or_else(|| ApiError::configuration("No valid data source found"))?;

let preview_features: Vec<_> = builder.config.preview_features().iter().collect();
let preview_features: Vec<_> = builder.schema.configuration.preview_features().iter().collect();
let url = data_source
.load_url_with_config_dir(&builder.config_dir, |key| builder.env.get(key).map(ToString::to_string))
.map_err(|err| crate::error::ApiError::Conversion(err, builder.datamodel.raw.clone()))?;
.map_err(|err| crate::error::ApiError::Conversion(err, builder.schema.db.source().to_owned()))?;

let (db_name, executor) = executor::load(data_source, &preview_features, &url).await?;
let connector = executor.primary_connector();
connector.get_connection().await?;

// Build internal data model
let internal_data_model = InternalDataModelBuilder::from(&builder.datamodel.ast).build(db_name);
let internal_data_model = prisma_models::convert(&builder.schema, db_name);

let query_schema = schema_builder::build(
internal_data_model,
Expand All @@ -257,7 +241,7 @@ impl QueryEngine {
);

Ok(ConnectedEngine {
datamodel: builder.datamodel.clone(),
schema: builder.schema.clone(),
query_schema: Arc::new(query_schema),
executor,
config_dir: builder.config_dir.clone(),
Expand Down Expand Up @@ -291,12 +275,8 @@ impl QueryEngine {
let mut inner = self.inner.write().await;
let engine = inner.as_engine()?;

let config = psl::parse_configuration(&engine.datamodel.raw)
.map_err(|errors| ApiError::conversion(errors, &engine.datamodel.raw))?;

let builder = EngineBuilder {
datamodel: engine.datamodel.clone(),
config,
schema: engine.schema.clone(),
config_dir: engine.config_dir.clone(),
env: engine.env.clone(),
};
Expand Down
3 changes: 1 addition & 2 deletions query-engine/query-engine-node-api/src/functions.rs
@@ -1,7 +1,6 @@
use crate::error::ApiError;
use napi::{bindgen_prelude::*, JsUnknown};
use napi_derive::napi;
use prisma_models::InternalDataModelBuilder;
use query_core::{schema::QuerySchemaRef, schema_builder};
use request_handlers::dmmf;
use std::{
Expand Down Expand Up @@ -42,7 +41,7 @@ pub fn dmmf(datamodel_string: String) -> napi::Result<String> {

let referential_integrity = datasource.map(|ds| ds.referential_integrity()).unwrap_or_default();

let internal_data_model = InternalDataModelBuilder::from(&datamodel).build("".into());
let internal_data_model = prisma_models::convert(&schema, "".into());

let query_schema: QuerySchemaRef = Arc::new(schema_builder::build(
internal_data_model,
Expand Down