From 05d853c9fc6f1601a328e659cbea85da3be5fe22 Mon Sep 17 00:00:00 2001 From: ayankovsky Date: Tue, 10 Mar 2020 00:02:05 +0100 Subject: [PATCH 1/2] Data API Postgres WIP --- src/connection/ConnectionOptions.ts | 2 + src/driver/DriverFactory.ts | 3 + .../AuroraDataApiPostgresConnectionOptions.ts | 34 + .../AuroraDataApiPostgresDriver.ts | 888 +++++++ .../AuroraDataApiPostgresQueryRunner.ts | 2061 +++++++++++++++++ src/driver/types/DatabaseType.ts | 1 + src/error/MissingDriverError.ts | 4 +- src/metadata-builder/EntityMetadataBuilder.ts | 3 +- src/metadata/EntityMetadata.ts | 7 +- src/query-builder/DeleteQueryBuilder.ts | 5 +- src/query-builder/InsertQueryBuilder.ts | 9 +- src/query-builder/SelectQueryBuilder.ts | 9 +- src/query-builder/UpdateQueryBuilder.ts | 7 +- src/schema-builder/RdbmsSchemaBuilder.ts | 5 +- .../exclusions/exclusions-basic.ts | 3 +- ...schema-and-database-basic-functionality.ts | 9 +- .../locking/query-builder-locking.ts | 3 +- .../order-by/query-builder-order-by.ts | 5 +- test/functional/query-runner/rename-column.ts | 3 +- test/functional/query-runner/rename-table.ts | 3 +- .../find-options-locking.ts | 5 +- 21 files changed, 3036 insertions(+), 33 deletions(-) create mode 100644 src/driver/aurora-data-api-pg/AuroraDataApiPostgresConnectionOptions.ts create mode 100644 src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver.ts create mode 100644 src/driver/aurora-data-api-pg/AuroraDataApiPostgresQueryRunner.ts diff --git a/src/connection/ConnectionOptions.ts b/src/connection/ConnectionOptions.ts index 2d223f74d6..1c37c7ce84 100644 --- a/src/connection/ConnectionOptions.ts +++ b/src/connection/ConnectionOptions.ts @@ -12,6 +12,7 @@ import {NativescriptConnectionOptions} from "../driver/nativescript/Nativescript import {ExpoConnectionOptions} from "../driver/expo/ExpoConnectionOptions"; import {AuroraDataApiConnectionOptions} from "../driver/aurora-data-api/AuroraDataApiConnectionOptions"; import {SapConnectionOptions} from "../driver/sap/SapConnectionOptions"; +import {AuroraDataApiPostgresConnectionOptions} from "../driver/aurora-data-api-pg/AuroraDataApiPostgresConnectionOptions"; /** @@ -33,4 +34,5 @@ export type ConnectionOptions = SqljsConnectionOptions| MongoConnectionOptions| AuroraDataApiConnectionOptions| + AuroraDataApiPostgresConnectionOptions| ExpoConnectionOptions; diff --git a/src/driver/DriverFactory.ts b/src/driver/DriverFactory.ts index f6aa97b017..ae0f7a5eb6 100644 --- a/src/driver/DriverFactory.ts +++ b/src/driver/DriverFactory.ts @@ -15,6 +15,7 @@ import {AuroraDataApiDriver} from "./aurora-data-api/AuroraDataApiDriver"; import {Driver} from "./Driver"; import {Connection} from "../connection/Connection"; import {SapDriver} from "./sap/SapDriver"; +import {AuroraDataApiPostgresDriver} from "./aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Helps to create drivers. @@ -57,6 +58,8 @@ export class DriverFactory { return new ExpoDriver(connection); case "aurora-data-api": return new AuroraDataApiDriver(connection); + case "aurora-data-api-pg": + return new AuroraDataApiPostgresDriver(connection); default: throw new MissingDriverError(type); } diff --git a/src/driver/aurora-data-api-pg/AuroraDataApiPostgresConnectionOptions.ts b/src/driver/aurora-data-api-pg/AuroraDataApiPostgresConnectionOptions.ts new file mode 100644 index 0000000000..c0f110f2ca --- /dev/null +++ b/src/driver/aurora-data-api-pg/AuroraDataApiPostgresConnectionOptions.ts @@ -0,0 +1,34 @@ +import {BaseConnectionOptions} from "../../connection/BaseConnectionOptions"; + +/** + * Postgres-specific connection options. + */ +export interface AuroraDataApiPostgresConnectionOptions extends BaseConnectionOptions { + + /** + * Database type. + */ + readonly type: "aurora-data-api-pg"; + + readonly region: string; + + readonly secretArn: string; + + readonly resourceArn: string; + + readonly database: string; + + /** + * The Postgres extension to use to generate UUID columns. Defaults to uuid-ossp. + * If pgcrypto is selected, TypeORM will use the gen_random_uuid() function from this extension. + * If uuid-ossp is selected, TypeORM will use the uuid_generate_v4() function from this extension. + */ + readonly uuidExtension?: "pgcrypto" | "uuid-ossp"; + + + /* + * Function handling errors thrown by drivers pool. + * Defaults to logging error with `warn` level. + */ + readonly poolErrorHandler?: (err: any) => any; +} diff --git a/src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver.ts b/src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver.ts new file mode 100644 index 0000000000..b8174ceb2d --- /dev/null +++ b/src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver.ts @@ -0,0 +1,888 @@ +import {Driver} from "../Driver"; +import {ObjectLiteral} from "../.."; +import {ColumnMetadata} from "../../metadata/ColumnMetadata"; +import {AuroraDataApiPostgresQueryRunner} from "./AuroraDataApiPostgresQueryRunner"; +import {DateUtils} from "../../util/DateUtils"; +import {PlatformTools} from "../../platform/PlatformTools"; +import {Connection} from "../.."; +import {RdbmsSchemaBuilder} from "../../schema-builder/RdbmsSchemaBuilder"; +import {AuroraDataApiPostgresConnectionOptions} from "./AuroraDataApiPostgresConnectionOptions"; +import {MappedColumnTypes} from "../types/MappedColumnTypes"; +import {ColumnType} from "../types/ColumnTypes"; +import {QueryRunner} from "../../query-runner/QueryRunner"; +import {DataTypeDefaults} from "../types/DataTypeDefaults"; +import {TableColumn} from "../../schema-builder/table/TableColumn"; +import {EntityMetadata} from "../../metadata/EntityMetadata"; +import {OrmUtils} from "../../util/OrmUtils"; +import {ApplyValueTransformers} from "../../util/ApplyValueTransformers"; + +/** + * Organizes communication with PostgreSQL DBMS. + */ +export class AuroraDataApiPostgresDriver implements Driver { + + // ------------------------------------------------------------------------- + // Public Properties + // ------------------------------------------------------------------------- + + /** + * Connection used by driver. + */ + connection: Connection; + + /** + * Aurora Data API underlying library. + */ + DataApiDriver: any; + + client: any; + + /** + * Pool for master database. + */ + master: any; + + /** + * Pool for slave databases. + * Used in replication. + */ + slaves: any[] = []; + + /** + * We store all created query runners because we need to release them. + */ + connectedQueryRunners: QueryRunner[] = []; + + // ------------------------------------------------------------------------- + // Public Implemented Properties + // ------------------------------------------------------------------------- + + /** + * Connection options. + */ + options: AuroraDataApiPostgresConnectionOptions; + + /** + * Master database used to perform all write queries. + */ + database?: string; + + /** + * Indicates if replication is enabled. + */ + isReplicated: boolean = false; + + /** + * Indicates if tree tables are supported by this driver. + */ + treeSupport = true; + + /** + * Gets list of supported column data types by a driver. + * + * @see https://www.tutorialspoint.com/postgresql/postgresql_data_types.htm + * @see https://www.postgresql.org/docs/9.2/static/datatype.html + */ + supportedDataTypes: ColumnType[] = [ + "int", + "int2", + "int4", + "int8", + "smallint", + "integer", + "bigint", + "decimal", + "numeric", + "real", + "float", + "float4", + "float8", + "double precision", + "money", + "character varying", + "varchar", + "character", + "char", + "text", + "citext", + "hstore", + "bytea", + "bit", + "varbit", + "bit varying", + "timetz", + "timestamptz", + "timestamp", + "timestamp without time zone", + "timestamp with time zone", + "date", + "time", + "time without time zone", + "time with time zone", + "interval", + "bool", + "boolean", + "enum", + "point", + "line", + "lseg", + "box", + "path", + "polygon", + "circle", + "cidr", + "inet", + "macaddr", + "tsvector", + "tsquery", + "uuid", + "xml", + "json", + "jsonb", + "int4range", + "int8range", + "numrange", + "tsrange", + "tstzrange", + "daterange", + "geometry", + "geography", + "cube" + ]; + + /** + * Gets list of spatial column data types. + */ + spatialTypes: ColumnType[] = [ + "geometry", + "geography" + ]; + + /** + * Gets list of column data types that support length by a driver. + */ + withLengthColumnTypes: ColumnType[] = [ + "character varying", + "varchar", + "character", + "char", + "bit", + "varbit", + "bit varying" + ]; + + /** + * Gets list of column data types that support precision by a driver. + */ + withPrecisionColumnTypes: ColumnType[] = [ + "numeric", + "decimal", + "interval", + "time without time zone", + "time with time zone", + "timestamp without time zone", + "timestamp with time zone" + ]; + + /** + * Gets list of column data types that support scale by a driver. + */ + withScaleColumnTypes: ColumnType[] = [ + "numeric", + "decimal" + ]; + + /** + * Orm has special columns and we need to know what database column types should be for those types. + * Column types are driver dependant. + */ + mappedDataTypes: MappedColumnTypes = { + createDate: "timestamp", + createDateDefault: "now()", + updateDate: "timestamp", + updateDateDefault: "now()", + deleteDate: "timestamp", + deleteDateNullable: true, + version: "int4", + treeLevel: "int4", + migrationId: "int4", + migrationName: "varchar", + migrationTimestamp: "int8", + cacheId: "int4", + cacheIdentifier: "varchar", + cacheTime: "int8", + cacheDuration: "int4", + cacheQuery: "text", + cacheResult: "text", + metadataType: "varchar", + metadataDatabase: "varchar", + metadataSchema: "varchar", + metadataTable: "varchar", + metadataName: "varchar", + metadataValue: "text", + }; + + /** + * Default values of length, precision and scale depends on column data type. + * Used in the cases when length/precision/scale is not specified by user. + */ + dataTypeDefaults: DataTypeDefaults = { + "character": { length: 1 }, + "bit": { length: 1 }, + "interval": { precision: 6 }, + "time without time zone": { precision: 6 }, + "time with time zone": { precision: 6 }, + "timestamp without time zone": { precision: 6 }, + "timestamp with time zone": { precision: 6 }, + }; + + /** + * Max length allowed by Postgres for aliases. + * @see https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS + */ + maxAliasLength = 63; + + // ------------------------------------------------------------------------- + // Constructor + // ------------------------------------------------------------------------- + + constructor(connection: Connection) { + this.connection = connection; + this.options = connection.options as AuroraDataApiPostgresConnectionOptions; + this.isReplicated = false; + + // load data-api package + this.loadDependencies(); + + this.client = new this.DataApiDriver( + this.options.region, + this.options.secretArn, + this.options.resourceArn, + this.options.database, + (query: string, parameters?: any[]) => this.connection.logger.logQuery(query, parameters), + ); + } + + // ------------------------------------------------------------------------- + // Public Implemented Methods + // ------------------------------------------------------------------------- + + /** + * Performs connection to the database. + * Based on pooling options, it can either create connection immediately, + * either create a pool and create connection when needed. + */ + async connect(): Promise { + } + + /** + * Makes any action after connection (e.g. create extensions in Postgres driver). + */ + async afterConnect(): Promise { + const hasUuidColumns = this.connection.entityMetadatas.some(metadata => { + return metadata.generatedColumns.filter(column => column.generationStrategy === "uuid").length > 0; + }); + const hasCitextColumns = this.connection.entityMetadatas.some(metadata => { + return metadata.columns.filter(column => column.type === "citext").length > 0; + }); + const hasHstoreColumns = this.connection.entityMetadatas.some(metadata => { + return metadata.columns.filter(column => column.type === "hstore").length > 0; + }); + const hasCubeColumns = this.connection.entityMetadatas.some(metadata => { + return metadata.columns.filter(column => column.type === "cube").length > 0; + }); + const hasGeometryColumns = this.connection.entityMetadatas.some(metadata => { + return metadata.columns.filter(column => this.spatialTypes.indexOf(column.type) >= 0).length > 0; + }); + const hasExclusionConstraints = this.connection.entityMetadatas.some(metadata => { + return metadata.exclusions.length > 0; + }); + if (hasUuidColumns || hasCitextColumns || hasHstoreColumns || hasGeometryColumns || hasCubeColumns || hasExclusionConstraints) { + await Promise.all([this.master, ...this.slaves].map(pool => { + return new Promise(async (ok, fail) => { + const { logger } = this.connection; + if (hasUuidColumns) + try { + await this.executeQuery(this.connection, `CREATE EXTENSION IF NOT EXISTS "${this.options.uuidExtension || "uuid-ossp"}"`); + } catch (_) { + logger.log("warn", `At least one of the entities has uuid column, but the '${this.options.uuidExtension || "uuid-ossp"}' extension cannot be installed automatically. Please install it manually using superuser rights, or select another uuid extension.`); + } + if (hasCitextColumns) + try { + await this.executeQuery(this.connection, `CREATE EXTENSION IF NOT EXISTS "citext"`); + } catch (_) { + logger.log("warn", "At least one of the entities has citext column, but the 'citext' extension cannot be installed automatically. Please install it manually using superuser rights"); + } + if (hasHstoreColumns) + try { + await this.executeQuery(this.connection, `CREATE EXTENSION IF NOT EXISTS "hstore"`); + } catch (_) { + logger.log("warn", "At least one of the entities has hstore column, but the 'hstore' extension cannot be installed automatically. Please install it manually using superuser rights"); + } + if (hasGeometryColumns) + try { + await this.executeQuery(this.connection, `CREATE EXTENSION IF NOT EXISTS "postgis"`); + } catch (_) { + logger.log("warn", "At least one of the entities has a geometry column, but the 'postgis' extension cannot be installed automatically. Please install it manually using superuser rights"); + } + if (hasCubeColumns) + try { + await this.executeQuery(this.connection, `CREATE EXTENSION IF NOT EXISTS "cube"`); + } catch (_) { + logger.log("warn", "At least one of the entities has a cube column, but the 'cube' extension cannot be installed automatically. Please install it manually using superuser rights"); + } + if (hasExclusionConstraints) + try { + // The btree_gist extension provides operator support in PostgreSQL exclusion constraints + await this.executeQuery(this.connection, `CREATE EXTENSION IF NOT EXISTS "btree_gist"`); + } catch (_) { + logger.log("warn", "At least one of the entities has an exclusion constraint, but the 'btree_gist' extension cannot be installed automatically. Please install it manually using superuser rights"); + } + ok(); + }); + })); + } + + return Promise.resolve(); + } + + /** + * Closes connection with database. + */ + async disconnect(): Promise { + } + + /** + * Creates a schema builder used to build and sync a schema. + */ + createSchemaBuilder() { + return new RdbmsSchemaBuilder(this.connection); + } + + /** + * Creates a query runner used to execute database queries. + */ + createQueryRunner(mode: "master"|"slave" = "master") { + return new AuroraDataApiPostgresQueryRunner(this, mode); + } + + /** + * Prepares given value to a value to be persisted, based on its column type and metadata. + */ + preparePersistentValue(value: any, columnMetadata: ColumnMetadata): any { + if (columnMetadata.transformer) + value = ApplyValueTransformers.transformTo(columnMetadata.transformer, value); + + if (value === null || value === undefined) + return value; + + if (columnMetadata.type === Boolean) { + return value === true ? 1 : 0; + + } else if (columnMetadata.type === "date") { + return DateUtils.mixedDateToDateString(value); + + } else if (columnMetadata.type === "time") { + return DateUtils.mixedDateToTimeString(value); + + } else if (columnMetadata.type === "datetime" + || columnMetadata.type === Date + || columnMetadata.type === "timestamp" + || columnMetadata.type === "timestamp with time zone" + || columnMetadata.type === "timestamp without time zone") { + return DateUtils.mixedDateToDate(value); + + } else if (["json", "jsonb", ...this.spatialTypes].indexOf(columnMetadata.type) >= 0) { + return JSON.stringify(value); + + } else if (columnMetadata.type === "hstore") { + if (typeof value === "string") { + return value; + } else { + // https://www.postgresql.org/docs/9.0/hstore.html + const quoteString = (value: unknown) => { + // If a string to be quoted is `null` or `undefined`, we return a literal unquoted NULL. + // This way, NULL values can be stored in the hstore object. + if (value === null || typeof value === "undefined") { + return "NULL"; + } + // Convert non-null values to string since HStore only stores strings anyway. + // To include a double quote or a backslash in a key or value, escape it with a backslash. + return `"${`${value}`.replace(/(?=["\\])/g, "\\")}"`; + }; + return Object.keys(value).map(key => quoteString(key) + "=>" + quoteString(value[key])).join(","); + } + + } else if (columnMetadata.type === "simple-array") { + return DateUtils.simpleArrayToString(value); + + } else if (columnMetadata.type === "simple-json") { + return DateUtils.simpleJsonToString(value); + + } else if (columnMetadata.type === "cube") { + if (columnMetadata.isArray) { + return `{${value.map((cube: number[]) => `"(${cube.join(",")})"`).join(",")}}`; + } + return `(${value.join(",")})`; + + } else if ( + ( + columnMetadata.type === "enum" + || columnMetadata.type === "simple-enum" + ) + && !columnMetadata.isArray + ) { + return "" + value; + } + + return value; + } + + /** + * Prepares given value to a value to be persisted, based on its column type or metadata. + */ + prepareHydratedValue(value: any, columnMetadata: ColumnMetadata): any { + if (value === null || value === undefined) + return columnMetadata.transformer ? ApplyValueTransformers.transformFrom(columnMetadata.transformer, value) : value; + + if (columnMetadata.type === Boolean) { + value = value ? true : false; + + } else if (columnMetadata.type === "datetime" + || columnMetadata.type === Date + || columnMetadata.type === "timestamp" + || columnMetadata.type === "timestamp with time zone" + || columnMetadata.type === "timestamp without time zone") { + value = DateUtils.normalizeHydratedDate(value); + + } else if (columnMetadata.type === "date") { + value = DateUtils.mixedDateToDateString(value); + + } else if (columnMetadata.type === "time") { + value = DateUtils.mixedTimeToString(value); + + } else if (columnMetadata.type === "hstore") { + if (columnMetadata.hstoreType === "object") { + const unescapeString = (str: string) => str.replace(/\\./g, (m) => m[1]); + const regexp = /"([^"\\]*(?:\\.[^"\\]*)*)"=>(?:(NULL)|"([^"\\]*(?:\\.[^"\\]*)*)")(?:,|$)/g; + const object: ObjectLiteral = {}; + `${value}`.replace(regexp, (_, key, nullValue, stringValue) => { + object[unescapeString(key)] = nullValue ? null : unescapeString(stringValue); + return ""; + }); + return object; + + } else { + return value; + } + + } else if (columnMetadata.type === "simple-array") { + value = DateUtils.stringToSimpleArray(value); + + } else if (columnMetadata.type === "simple-json") { + value = DateUtils.stringToSimpleJson(value); + + } else if (columnMetadata.type === "cube") { + value = value.replace(/[\(\)\s]+/g, ""); // remove whitespace + if (columnMetadata.isArray) { + /** + * Strips these groups from `{"1,2,3","",NULL}`: + * 1. ["1,2,3", undefined] <- cube of arity 3 + * 2. ["", undefined] <- cube of arity 0 + * 3. [undefined, "NULL"] <- NULL + */ + const regexp = /(?:\"((?:[\d\s\.,])*)\")|(?:(NULL))/g; + const unparsedArrayString = value; + + value = []; + let cube: RegExpExecArray | null = null; + // Iterate through all regexp matches for cubes/null in array + while ((cube = regexp.exec(unparsedArrayString)) !== null) { + if (cube[1] !== undefined) { + value.push(cube[1].split(",").filter(Boolean).map(Number)); + } else { + value.push(undefined); + } + } + } else { + value = value.split(",").filter(Boolean).map(Number); + } + + } else if (columnMetadata.type === "enum" || columnMetadata.type === "simple-enum" ) { + if (columnMetadata.isArray) { + // manually convert enum array to array of values (pg does not support, see https://github.com/brianc/node-pg-types/issues/56) + value = value !== "{}" ? (value as string).substr(1, (value as string).length - 2).split(",") : []; + // convert to number if that exists in poosible enum options + value = value.map((val: string) => { + return !isNaN(+val) && columnMetadata.enum!.indexOf(parseInt(val)) >= 0 ? parseInt(val) : val; + }); + } else { + // convert to number if that exists in poosible enum options + value = !isNaN(+value) && columnMetadata.enum!.indexOf(parseInt(value)) >= 0 ? parseInt(value) : value; + } + } + + if (columnMetadata.transformer) + value = ApplyValueTransformers.transformFrom(columnMetadata.transformer, value); + + return value; + } + + /** + * Replaces parameters in the given sql with special escaping character + * and an array of parameter names to be passed to a query. + */ + escapeQueryWithParameters(sql: string, parameters: ObjectLiteral, nativeParameters: ObjectLiteral): [string, any[]] { + const builtParameters: any[] = Object.keys(nativeParameters).map(key => nativeParameters[key]); + if (!parameters || !Object.keys(parameters).length) + return [sql, builtParameters]; + + const keys = Object.keys(parameters).map(parameter => "(:(\\.\\.\\.)?" + parameter + "\\b)").join("|"); + sql = sql.replace(new RegExp(keys, "g"), (key: string): string => { + let value: any; + let isArray = false; + if (key.substr(0, 4) === ":...") { + isArray = true; + value = parameters[key.substr(4)]; + } else { + value = parameters[key.substr(1)]; + } + + if (isArray) { + return value.map((v: any) => { + builtParameters.push(v); + return "$" + builtParameters.length; + }).join(", "); + + } else if (value instanceof Function) { + return value(); + + } else { + builtParameters.push(value); + return "$" + builtParameters.length; + } + }); // todo: make replace only in value statements, otherwise problems + return [sql, builtParameters]; + } + + /** + * Escapes a column name. + */ + escape(columnName: string): string { + return "\"" + columnName + "\""; + } + + /** + * Build full table name with schema name and table name. + * E.g. "mySchema"."myTable" + */ + buildTableName(tableName: string, schema?: string): string { + return schema ? `${schema}.${tableName}` : tableName; + } + + /** + * Creates a database type from a given column metadata. + */ + normalizeType(column: { type?: ColumnType, length?: number | string, precision?: number|null, scale?: number, isArray?: boolean }): string { + if (column.type === Number || column.type === "int" || column.type === "int4") { + return "integer"; + + } else if (column.type === String || column.type === "varchar") { + return "character varying"; + + } else if (column.type === Date || column.type === "timestamp") { + return "timestamp without time zone"; + + } else if (column.type === "timestamptz") { + return "timestamp with time zone"; + + } else if (column.type === "time") { + return "time without time zone"; + + } else if (column.type === "timetz") { + return "time with time zone"; + + } else if (column.type === Boolean || column.type === "bool") { + return "boolean"; + + } else if (column.type === "simple-array") { + return "text"; + + } else if (column.type === "simple-json") { + return "text"; + + } else if (column.type === "simple-enum") { + return "enum"; + + } else if (column.type === "int2") { + return "smallint"; + + } else if (column.type === "int8") { + return "bigint"; + + } else if (column.type === "decimal") { + return "numeric"; + + } else if (column.type === "float8" || column.type === "float") { + return "double precision"; + + } else if (column.type === "float4") { + return "real"; + + } else if (column.type === "char") { + return "character"; + + } else if (column.type === "varbit") { + return "bit varying"; + + } else { + return column.type as string || ""; + } + } + + /** + * Normalizes "default" value of the column. + */ + normalizeDefault(columnMetadata: ColumnMetadata): string { + const defaultValue = columnMetadata.default; + const arrayCast = columnMetadata.isArray ? `::${columnMetadata.type}[]` : ""; + + if ( + ( + columnMetadata.type === "enum" + || columnMetadata.type === "simple-enum" + ) && defaultValue !== undefined + ) { + if (columnMetadata.isArray && Array.isArray(defaultValue)) { + return `'{${defaultValue.map((val: string) => `${val}`).join(",")}}'`; + } + return `'${defaultValue}'`; + } + + if (typeof defaultValue === "number") { + return "" + defaultValue; + + } else if (typeof defaultValue === "boolean") { + return defaultValue === true ? "true" : "false"; + + } else if (typeof defaultValue === "function") { + return defaultValue(); + + } else if (typeof defaultValue === "string") { + return `'${defaultValue}'${arrayCast}`; + + } else if (defaultValue === null) { + return `null`; + + } else if (typeof defaultValue === "object") { + return `'${JSON.stringify(defaultValue)}'`; + + } else { + return defaultValue; + } + } + + /** + * Normalizes "isUnique" value of the column. + */ + normalizeIsUnique(column: ColumnMetadata): boolean { + return column.entityMetadata.uniques.some(uq => uq.columns.length === 1 && uq.columns[0] === column); + } + + /** + * Returns default column lengths, which is required on column creation. + */ + getColumnLength(column: ColumnMetadata): string { + return column.length ? column.length.toString() : ""; + } + + /** + * Creates column type definition including length, precision and scale + */ + createFullType(column: TableColumn): string { + let type = column.type; + + if (column.length) { + type += "(" + column.length + ")"; + } else if (column.precision !== null && column.precision !== undefined && column.scale !== null && column.scale !== undefined) { + type += "(" + column.precision + "," + column.scale + ")"; + } else if (column.precision !== null && column.precision !== undefined) { + type += "(" + column.precision + ")"; + } + + if (column.type === "time without time zone") { + type = "TIME" + (column.precision !== null && column.precision !== undefined ? "(" + column.precision + ")" : ""); + + } else if (column.type === "time with time zone") { + type = "TIME" + (column.precision !== null && column.precision !== undefined ? "(" + column.precision + ")" : "") + " WITH TIME ZONE"; + + } else if (column.type === "timestamp without time zone") { + type = "TIMESTAMP" + (column.precision !== null && column.precision !== undefined ? "(" + column.precision + ")" : ""); + + } else if (column.type === "timestamp with time zone") { + type = "TIMESTAMP" + (column.precision !== null && column.precision !== undefined ? "(" + column.precision + ")" : "") + " WITH TIME ZONE"; + } else if (this.spatialTypes.indexOf(column.type as ColumnType) >= 0) { + if (column.spatialFeatureType != null && column.srid != null) { + type = `${column.type}(${column.spatialFeatureType},${column.srid})`; + } else if (column.spatialFeatureType != null) { + type = `${column.type}(${column.spatialFeatureType})`; + } else { + type = column.type; + } + } + + if (column.isArray) + type += " array"; + + return type; + } + + /** + * Obtains a new database connection to a master server. + * Used for replication. + * If replication is not setup then returns default connection's database connection. + */ + obtainMasterConnection(): Promise { + return new Promise((ok, fail) => { + this.master.connect((err: any, connection: any, release: any) => { + err ? fail(err) : ok([connection, release]); + }); + }); + } + + /** + * Obtains a new database connection to a slave server. + * Used for replication. + * If replication is not setup then returns master (default) connection's database connection. + */ + obtainSlaveConnection(): Promise { + if (!this.slaves.length) + return this.obtainMasterConnection(); + + return new Promise((ok, fail) => { + const random = Math.floor(Math.random() * this.slaves.length); + this.slaves[random].connect((err: any, connection: any, release: any) => { + err ? fail(err) : ok([connection, release]); + }); + }); + } + + /** + * Creates generated map of values generated or returned by database after INSERT query. + * + * todo: slow. optimize Object.keys(), OrmUtils.mergeDeep and column.createValueMap parts + */ + createGeneratedMap(metadata: EntityMetadata, insertResult: ObjectLiteral) { + if (!insertResult) + return undefined; + + return Object.keys(insertResult).reduce((map, key) => { + const column = metadata.findColumnWithDatabaseName(key); + if (column) { + OrmUtils.mergeDeep(map, column.createValueMap(insertResult[key])); + // OrmUtils.mergeDeep(map, column.createValueMap(this.prepareHydratedValue(insertResult[key], column))); // TODO: probably should be like there, but fails on enums, fix later + } + return map; + }, {} as ObjectLiteral); + } + + /** + * Differentiate columns of this table and columns from the given column metadatas columns + * and returns only changed. + */ + findChangedColumns(tableColumns: TableColumn[], columnMetadatas: ColumnMetadata[]): ColumnMetadata[] { + return columnMetadatas.filter(columnMetadata => { + const tableColumn = tableColumns.find(c => c.name === columnMetadata.databaseName); + if (!tableColumn) + return false; // we don't need new columns, we only need exist and changed + + return tableColumn.name !== columnMetadata.databaseName + || tableColumn.type !== this.normalizeType(columnMetadata) + || tableColumn.length !== columnMetadata.length + || tableColumn.precision !== columnMetadata.precision + || tableColumn.scale !== columnMetadata.scale + // || tableColumn.comment !== columnMetadata.comment // todo + || (!tableColumn.isGenerated && this.lowerDefaultValueIfNecessary(this.normalizeDefault(columnMetadata)) !== tableColumn.default) // we included check for generated here, because generated columns already can have default values + || tableColumn.isPrimary !== columnMetadata.isPrimary + || tableColumn.isNullable !== columnMetadata.isNullable + || tableColumn.isUnique !== this.normalizeIsUnique(columnMetadata) + || (tableColumn.enum && columnMetadata.enum && !OrmUtils.isArraysEqual(tableColumn.enum, columnMetadata.enum.map(val => val + ""))) // enums in postgres are always strings + || tableColumn.isGenerated !== columnMetadata.isGenerated + || (tableColumn.spatialFeatureType || "").toLowerCase() !== (columnMetadata.spatialFeatureType || "").toLowerCase() + || tableColumn.srid !== columnMetadata.srid; + }); + } + + private lowerDefaultValueIfNecessary(value: string | undefined) { + // Postgres saves function calls in default value as lowercase #2733 + if (!value) { + return value; + } + return value.split(`'`).map((v, i) => { + return i % 2 === 1 ? v : v.toLowerCase(); + }).join(`'`); + } + /** + * Returns true if driver supports RETURNING / OUTPUT statement. + */ + isReturningSqlSupported(): boolean { + return true; + } + + /** + * Returns true if driver supports uuid values generation on its own. + */ + isUUIDGenerationSupported(): boolean { + return true; + } + + get uuidGenerator(): string { + return this.options.uuidExtension === "pgcrypto" ? "gen_random_uuid()" : "uuid_generate_v4()"; + } + + /** + * Creates an escaped parameter. + */ + createParameter(parameterName: string, index: number): string { + return "$" + (index + 1); + } + + // ------------------------------------------------------------------------- + // Public Methods + // ------------------------------------------------------------------------- + + /** + * Loads postgres query stream package. + */ + loadStreamDependency() { + try { + return PlatformTools.load("pg-query-stream"); + + } catch (e) { // todo: better error for browser env + throw new Error(`To use streams you should install pg-query-stream package. Please run npm i pg-query-stream --save command.`); + } + } + + // ------------------------------------------------------------------------- + // Protected Methods + // ------------------------------------------------------------------------- + + /** + * If driver dependency is not given explicitly, then try to load it via "require". + */ + protected loadDependencies(): void { + const { pg } = PlatformTools.load("typeorm-aurora-data-api-driver"); + + // Driver uses rollup for publishing, which has issues when using typeorm in combination with webpack + // See https://github.com/webpack/webpack/issues/4742#issuecomment-295556787 + this.DataApiDriver = pg; + } + + /** + * Executes given query. + */ + protected executeQuery(connection: any, query: string) { + return this.client.query(query); + } + +} diff --git a/src/driver/aurora-data-api-pg/AuroraDataApiPostgresQueryRunner.ts b/src/driver/aurora-data-api-pg/AuroraDataApiPostgresQueryRunner.ts new file mode 100644 index 0000000000..388ba26f11 --- /dev/null +++ b/src/driver/aurora-data-api-pg/AuroraDataApiPostgresQueryRunner.ts @@ -0,0 +1,2061 @@ +import {PromiseUtils} from "../../"; +import {ObjectLiteral} from "../../common/ObjectLiteral"; +import {QueryRunnerAlreadyReleasedError} from "../../error/QueryRunnerAlreadyReleasedError"; +import {TransactionAlreadyStartedError} from "../../error/TransactionAlreadyStartedError"; +import {TransactionNotStartedError} from "../../error/TransactionNotStartedError"; +import {ColumnType} from "../../index"; +import {ReadStream} from "../../platform/PlatformTools"; +import {BaseQueryRunner} from "../../query-runner/BaseQueryRunner"; +import {QueryRunner} from "../../query-runner/QueryRunner"; +import {TableIndexOptions} from "../../schema-builder/options/TableIndexOptions"; +import {Table} from "../../schema-builder/table/Table"; +import {TableCheck} from "../../schema-builder/table/TableCheck"; +import {TableColumn} from "../../schema-builder/table/TableColumn"; +import {TableExclusion} from "../../schema-builder/table/TableExclusion"; +import {TableForeignKey} from "../../schema-builder/table/TableForeignKey"; +import {TableIndex} from "../../schema-builder/table/TableIndex"; +import {TableUnique} from "../../schema-builder/table/TableUnique"; +import {View} from "../../schema-builder/view/View"; +import {Broadcaster} from "../../subscriber/Broadcaster"; +import {OrmUtils} from "../../util/OrmUtils"; +import {Query} from "../Query"; +import {IsolationLevel} from "../types/IsolationLevel"; +import {AuroraDataApiPostgresDriver} from "./AuroraDataApiPostgresDriver"; + +/** + * Runs queries on a single postgres database connection. + */ +export class AuroraDataApiPostgresQueryRunner extends BaseQueryRunner implements QueryRunner { + + // ------------------------------------------------------------------------- + // Public Implemented Properties + // ------------------------------------------------------------------------- + + /** + * Database driver used by connection. + */ + driver: AuroraDataApiPostgresDriver; + + // ------------------------------------------------------------------------- + // Protected Properties + // ------------------------------------------------------------------------- + + /** + * Promise used to obtain a database connection for a first time. + */ + protected databaseConnectionPromise: Promise; + + /** + * Special callback provided by a driver used to release a created connection. + */ + protected releaseCallback: Function; + + // ------------------------------------------------------------------------- + // Constructor + // ------------------------------------------------------------------------- + + constructor(driver: AuroraDataApiPostgresDriver, mode: "master"|"slave" = "master") { + super(); + this.driver = driver; + this.connection = driver.connection; + this.mode = mode; + this.broadcaster = new Broadcaster(this); + } + + // ------------------------------------------------------------------------- + // Public Methods + // ------------------------------------------------------------------------- + + /** + * Creates/uses database connection from the connection pool to perform further operations. + * Returns obtained database connection. + */ + connect(): Promise { + if (this.databaseConnection) + return Promise.resolve(this.databaseConnection); + + if (this.databaseConnectionPromise) + return this.databaseConnectionPromise; + + if (this.mode === "slave" && this.driver.isReplicated) { + this.databaseConnectionPromise = this.driver.obtainSlaveConnection().then(([ connection, release]: any[]) => { + this.driver.connectedQueryRunners.push(this); + this.databaseConnection = connection; + this.releaseCallback = release; + return this.databaseConnection; + }); + + } else { // master + this.databaseConnectionPromise = this.driver.obtainMasterConnection().then(([connection, release]: any[]) => { + this.driver.connectedQueryRunners.push(this); + this.databaseConnection = connection; + this.releaseCallback = release; + return this.databaseConnection; + }); + } + + return this.databaseConnectionPromise; + } + + /** + * Releases used database connection. + * You cannot use query runner methods once its released. + */ + release(): Promise { + this.isReleased = true; + if (this.releaseCallback) + this.releaseCallback(); + + const index = this.driver.connectedQueryRunners.indexOf(this); + if (index !== -1) this.driver.connectedQueryRunners.splice(index); + + return Promise.resolve(); + } + + /** + * Starts transaction on the current connection. + */ + async startTransaction(isolationLevel?: IsolationLevel): Promise { + if (this.isTransactionActive) + throw new TransactionAlreadyStartedError(); + + this.isTransactionActive = true; + await this.driver.client.startTransaction(); + } + + /** + * Commits transaction. + * Error will be thrown if transaction was not started. + */ + async commitTransaction(): Promise { + if (!this.isTransactionActive) + throw new TransactionNotStartedError(); + + await this.driver.client.commitTransaction(); + this.isTransactionActive = false; + } + + /** + * Rollbacks transaction. + * Error will be thrown if transaction was not started. + */ + async rollbackTransaction(): Promise { + if (!this.isTransactionActive) + throw new TransactionNotStartedError(); + + await this.driver.client.rollbackTransaction(); + this.isTransactionActive = false; + } + + /** + * Executes a given SQL query. + */ + async query(query: string, parameters?: any[]): Promise { + if (this.isReleased) + throw new QueryRunnerAlreadyReleasedError(); + + const result = await this.driver.client.query(query, parameters); + + if (result.records) { + return result.records; + } + + return result; + } + + /** + * Returns raw data stream. + */ + stream(query: string, parameters?: any[], onEnd?: Function, onError?: Function): Promise { + const QueryStream = this.driver.loadStreamDependency(); + if (this.isReleased) + throw new QueryRunnerAlreadyReleasedError(); + + return new Promise(async (ok, fail) => { + try { + const databaseConnection = await this.connect(); + this.driver.connection.logger.logQuery(query, parameters, this); + const stream = databaseConnection.query(new QueryStream(query, parameters)); + if (onEnd) stream.on("end", onEnd); + if (onError) stream.on("error", onError); + ok(stream); + + } catch (err) { + fail(err); + } + }); + } + + /** + * Returns all available database names including system databases. + */ + async getDatabases(): Promise { + return Promise.resolve([]); + } + + /** + * Returns all available schema names including system schemas. + * If database parameter specified, returns schemas of that database. + */ + async getSchemas(database?: string): Promise { + return Promise.resolve([]); + } + + /** + * Checks if database with the given name exist. + */ + async hasDatabase(database: string): Promise { + return Promise.resolve(false); + } + + /** + * Checks if schema with the given name exist. + */ + async hasSchema(schema: string): Promise { + const result = await this.query(`SELECT * FROM "information_schema"."schemata" WHERE "schema_name" = '${schema}'`); + return result.length ? true : false; + } + + /** + * Checks if table with the given name exist in the database. + */ + async hasTable(tableOrName: Table|string): Promise { + const parsedTableName = this.parseTableName(tableOrName); + const sql = `SELECT * FROM "information_schema"."tables" WHERE "table_schema" = ${parsedTableName.schema} AND "table_name" = ${parsedTableName.tableName}`; + const result = await this.query(sql); + return result.length ? true : false; + } + + /** + * Checks if column with the given name exist in the given table. + */ + async hasColumn(tableOrName: Table|string, columnName: string): Promise { + const parsedTableName = this.parseTableName(tableOrName); + const sql = `SELECT * FROM "information_schema"."columns" WHERE "table_schema" = ${parsedTableName.schema} AND "table_name" = ${parsedTableName.tableName} AND "column_name" = '${columnName}'`; + const result = await this.query(sql); + return result.length ? true : false; + } + + /** + * Creates a new database. + * Postgres does not supports database creation inside a transaction block. + */ + async createDatabase(database: string, ifNotExist?: boolean): Promise { + await Promise.resolve(); + } + + /** + * Drops database. + * Postgres does not supports database drop inside a transaction block. + */ + async dropDatabase(database: string, ifExist?: boolean): Promise { + return Promise.resolve(); + } + + /** + * Creates a new table schema. + */ + async createSchema(schema: string, ifNotExist?: boolean): Promise { + const up = ifNotExist ? `CREATE SCHEMA IF NOT EXISTS "${schema}"` : `CREATE SCHEMA "${schema}"`; + const down = `DROP SCHEMA "${schema}" CASCADE`; + await this.executeQueries(new Query(up), new Query(down)); + } + + /** + * Drops table schema. + */ + async dropSchema(schemaPath: string, ifExist?: boolean, isCascade?: boolean): Promise { + const schema = schemaPath.indexOf(".") === -1 ? schemaPath : schemaPath.split(".")[0]; + const up = ifExist ? `DROP SCHEMA IF EXISTS "${schema}" ${isCascade ? "CASCADE" : ""}` : `DROP SCHEMA "${schema}" ${isCascade ? "CASCADE" : ""}`; + const down = `CREATE SCHEMA "${schema}"`; + await this.executeQueries(new Query(up), new Query(down)); + } + + /** + * Creates a new table. + */ + async createTable(table: Table, ifNotExist: boolean = false, createForeignKeys: boolean = true, createIndices: boolean = true): Promise { + if (ifNotExist) { + const isTableExist = await this.hasTable(table); + if (isTableExist) return Promise.resolve(); + } + const upQueries: Query[] = []; + const downQueries: Query[] = []; + + // if table have column with ENUM type, we must create this type in postgres. + await Promise.all(table.columns + .filter(column => column.type === "enum" || column.type === "simple-enum") + .map(async column => { + const hasEnum = await this.hasEnumType(table, column); + // TODO: Should also check if values of existing type matches expected ones + if (!hasEnum) { + upQueries.push(this.createEnumTypeSql(table, column)); + downQueries.push(this.dropEnumTypeSql(table, column)); + } + return Promise.resolve(); + })); + + upQueries.push(this.createTableSql(table, createForeignKeys)); + downQueries.push(this.dropTableSql(table)); + + // if createForeignKeys is true, we must drop created foreign keys in down query. + // createTable does not need separate method to create foreign keys, because it create fk's in the same query with table creation. + if (createForeignKeys) + table.foreignKeys.forEach(foreignKey => downQueries.push(this.dropForeignKeySql(table, foreignKey))); + + if (createIndices) { + table.indices.forEach(index => { + + // new index may be passed without name. In this case we generate index name manually. + if (!index.name) + index.name = this.connection.namingStrategy.indexName(table.name, index.columnNames, index.where); + upQueries.push(this.createIndexSql(table, index)); + downQueries.push(this.dropIndexSql(table, index)); + }); + } + + await this.executeQueries(upQueries, downQueries); + } + + /** + * Drops the table. + */ + async dropTable(target: Table|string, ifExist?: boolean, dropForeignKeys: boolean = true, dropIndices: boolean = true): Promise {// It needs because if table does not exist and dropForeignKeys or dropIndices is true, we don't need + // to perform drop queries for foreign keys and indices. + if (ifExist) { + const isTableExist = await this.hasTable(target); + if (!isTableExist) return Promise.resolve(); + } + + // if dropTable called with dropForeignKeys = true, we must create foreign keys in down query. + const createForeignKeys: boolean = dropForeignKeys; + const tableName = target instanceof Table ? target.name : target; + const table = await this.getCachedTable(tableName); + const upQueries: Query[] = []; + const downQueries: Query[] = []; + + + if (dropIndices) { + table.indices.forEach(index => { + upQueries.push(this.dropIndexSql(table, index)); + downQueries.push(this.createIndexSql(table, index)); + }); + } + + if (dropForeignKeys) + table.foreignKeys.forEach(foreignKey => upQueries.push(this.dropForeignKeySql(table, foreignKey))); + + upQueries.push(this.dropTableSql(table)); + downQueries.push(this.createTableSql(table, createForeignKeys)); + + await this.executeQueries(upQueries, downQueries); + } + + /** + * Creates a new view. + */ + async createView(view: View): Promise { + const upQueries: Query[] = []; + const downQueries: Query[] = []; + upQueries.push(this.createViewSql(view)); + upQueries.push(await this.insertViewDefinitionSql(view)); + downQueries.push(this.dropViewSql(view)); + downQueries.push(await this.deleteViewDefinitionSql(view)); + await this.executeQueries(upQueries, downQueries); + } + + /** + * Drops the view. + */ + async dropView(target: View|string): Promise { + const viewName = target instanceof View ? target.name : target; + const view = await this.getCachedView(viewName); + + const upQueries: Query[] = []; + const downQueries: Query[] = []; + upQueries.push(await this.deleteViewDefinitionSql(view)); + upQueries.push(this.dropViewSql(view)); + downQueries.push(await this.insertViewDefinitionSql(view)); + downQueries.push(this.createViewSql(view)); + await this.executeQueries(upQueries, downQueries); + } + + /** + * Renames the given table. + */ + async renameTable(oldTableOrName: Table|string, newTableName: string): Promise { + const upQueries: Query[] = []; + const downQueries: Query[] = []; + const oldTable = oldTableOrName instanceof Table ? oldTableOrName : await this.getCachedTable(oldTableOrName); + const newTable = oldTable.clone(); + const oldTableName = oldTable.name.indexOf(".") === -1 ? oldTable.name : oldTable.name.split(".")[1]; + const schemaName = oldTable.name.indexOf(".") === -1 ? undefined : oldTable.name.split(".")[0]; + newTable.name = schemaName ? `${schemaName}.${newTableName}` : newTableName; + + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(oldTable)} RENAME TO "${newTableName}"`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(newTable)} RENAME TO "${oldTableName}"`)); + + // rename column primary key constraint + if (newTable.primaryColumns.length > 0) { + const columnNames = newTable.primaryColumns.map(column => column.name); + + const oldPkName = this.connection.namingStrategy.primaryKeyName(oldTable, columnNames); + const newPkName = this.connection.namingStrategy.primaryKeyName(newTable, columnNames); + + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(newTable)} RENAME CONSTRAINT "${oldPkName}" TO "${newPkName}"`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(newTable)} RENAME CONSTRAINT "${newPkName}" TO "${oldPkName}"`)); + } + + // rename unique constraints + newTable.uniques.forEach(unique => { + // build new constraint name + const newUniqueName = this.connection.namingStrategy.uniqueConstraintName(newTable, unique.columnNames); + + // build queries + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(newTable)} RENAME CONSTRAINT "${unique.name}" TO "${newUniqueName}"`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(newTable)} RENAME CONSTRAINT "${newUniqueName}" TO "${unique.name}"`)); + + // replace constraint name + unique.name = newUniqueName; + }); + + // rename index constraints + newTable.indices.forEach(index => { + // build new constraint name + const schema = this.extractSchema(newTable); + const newIndexName = this.connection.namingStrategy.indexName(newTable, index.columnNames, index.where); + + // build queries + const up = schema ? `ALTER INDEX "${schema}"."${index.name}" RENAME TO "${newIndexName}"` : `ALTER INDEX "${index.name}" RENAME TO "${newIndexName}"`; + const down = schema ? `ALTER INDEX "${schema}"."${newIndexName}" RENAME TO "${index.name}"` : `ALTER INDEX "${newIndexName}" RENAME TO "${index.name}"`; + upQueries.push(new Query(up)); + downQueries.push(new Query(down)); + + // replace constraint name + index.name = newIndexName; + }); + + // rename foreign key constraints + newTable.foreignKeys.forEach(foreignKey => { + // build new constraint name + const newForeignKeyName = this.connection.namingStrategy.foreignKeyName(newTable, foreignKey.columnNames, foreignKey.referencedTableName, foreignKey.referencedColumnNames); + + // build queries + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(newTable)} RENAME CONSTRAINT "${foreignKey.name}" TO "${newForeignKeyName}"`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(newTable)} RENAME CONSTRAINT "${newForeignKeyName}" TO "${foreignKey.name}"`)); + + // replace constraint name + foreignKey.name = newForeignKeyName; + }); + + // rename ENUM types + const enumColumns = newTable.columns.filter(column => column.type === "enum" || column.type === "simple-enum"); + for (let column of enumColumns) { + const oldEnumType = await this.getEnumTypeName(oldTable, column); + upQueries.push(new Query(`ALTER TYPE "${oldEnumType.enumTypeSchema}"."${oldEnumType.enumTypeName}" RENAME TO ${this.buildEnumName(newTable, column, false)}`)); + downQueries.push(new Query(`ALTER TYPE ${this.buildEnumName(newTable, column)} RENAME TO "${oldEnumType.enumTypeName}"`)); + } + await this.executeQueries(upQueries, downQueries); + } + + /** + * Creates a new column from the column in the table. + */ + async addColumn(tableOrName: Table|string, column: TableColumn): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + const clonedTable = table.clone(); + const upQueries: Query[] = []; + const downQueries: Query[] = []; + + if (column.type === "enum" || column.type === "simple-enum") { + const hasEnum = await this.hasEnumType(table, column); + if (!hasEnum) { + upQueries.push(this.createEnumTypeSql(table, column)); + downQueries.push(this.dropEnumTypeSql(table, column)); + } + } + + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD ${this.buildCreateColumnSql(table, column)}`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP COLUMN "${column.name}"`)); + + // create or update primary key constraint + if (column.isPrimary) { + const primaryColumns = clonedTable.primaryColumns; + // if table already have primary key, me must drop it and recreate again + if (primaryColumns.length > 0) { + const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, primaryColumns.map(column => column.name)); + const columnNames = primaryColumns.map(column => `"${column.name}"`).join(", "); + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${pkName}"`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNames})`)); + } + + primaryColumns.push(column); + const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, primaryColumns.map(column => column.name)); + const columnNames = primaryColumns.map(column => `"${column.name}"`).join(", "); + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNames})`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${pkName}"`)); + } + + // create column index + const columnIndex = clonedTable.indices.find(index => index.columnNames.length === 1 && index.columnNames[0] === column.name); + if (columnIndex) { + upQueries.push(this.createIndexSql(table, columnIndex)); + downQueries.push(this.dropIndexSql(table, columnIndex)); + } + + // create unique constraint + if (column.isUnique) { + const uniqueConstraint = new TableUnique({ + name: this.connection.namingStrategy.uniqueConstraintName(table.name, [column.name]), + columnNames: [column.name] + }); + clonedTable.uniques.push(uniqueConstraint); + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${uniqueConstraint.name}" UNIQUE ("${column.name}")`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${uniqueConstraint.name}"`)); + } + + await this.executeQueries(upQueries, downQueries); + + clonedTable.addColumn(column); + this.replaceCachedTable(table, clonedTable); + } + + /** + * Creates a new columns from the column in the table. + */ + async addColumns(tableOrName: Table|string, columns: TableColumn[]): Promise { + await PromiseUtils.runInSequence(columns, column => this.addColumn(tableOrName, column)); + } + + /** + * Renames column in the given table. + */ + async renameColumn(tableOrName: Table|string, oldTableColumnOrName: TableColumn|string, newTableColumnOrName: TableColumn|string): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + const oldColumn = oldTableColumnOrName instanceof TableColumn ? oldTableColumnOrName : table.columns.find(c => c.name === oldTableColumnOrName); + if (!oldColumn) + throw new Error(`Column "${oldTableColumnOrName}" was not found in the "${table.name}" table.`); + + let newColumn; + if (newTableColumnOrName instanceof TableColumn) { + newColumn = newTableColumnOrName; + } else { + newColumn = oldColumn.clone(); + newColumn.name = newTableColumnOrName; + } + + return this.changeColumn(table, oldColumn, newColumn); + } + + /** + * Changes a column in the table. + */ + async changeColumn(tableOrName: Table|string, oldTableColumnOrName: TableColumn|string, newColumn: TableColumn): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + let clonedTable = table.clone(); + const upQueries: Query[] = []; + const downQueries: Query[] = []; + + const oldColumn = oldTableColumnOrName instanceof TableColumn + ? oldTableColumnOrName + : table.columns.find(column => column.name === oldTableColumnOrName); + if (!oldColumn) + throw new Error(`Column "${oldTableColumnOrName}" was not found in the "${table.name}" table.`); + + if (oldColumn.type !== newColumn.type || oldColumn.length !== newColumn.length) { + // To avoid data conversion, we just recreate column + await this.dropColumn(table, oldColumn); + await this.addColumn(table, newColumn); + + // update cloned table + clonedTable = table.clone(); + + } else { + if (oldColumn.name !== newColumn.name) { + // rename column + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME COLUMN "${oldColumn.name}" TO "${newColumn.name}"`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME COLUMN "${newColumn.name}" TO "${oldColumn.name}"`)); + + // rename ENUM type + if (oldColumn.type === "enum" || oldColumn.type === "simple-enum") { + const oldEnumType = await this.getEnumTypeName(table, oldColumn); + upQueries.push(new Query(`ALTER TYPE "${oldEnumType.enumTypeSchema}"."${oldEnumType.enumTypeName}" RENAME TO ${this.buildEnumName(table, newColumn, false)}`)); + downQueries.push(new Query(`ALTER TYPE ${this.buildEnumName(table, newColumn)} RENAME TO "${oldEnumType.enumTypeName}"`)); + } + + // rename column primary key constraint + if (oldColumn.isPrimary === true) { + const primaryColumns = clonedTable.primaryColumns; + + // build old primary constraint name + const columnNames = primaryColumns.map(column => column.name); + const oldPkName = this.connection.namingStrategy.primaryKeyName(clonedTable, columnNames); + + // replace old column name with new column name + columnNames.splice(columnNames.indexOf(oldColumn.name), 1); + columnNames.push(newColumn.name); + + // build new primary constraint name + const newPkName = this.connection.namingStrategy.primaryKeyName(clonedTable, columnNames); + + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME CONSTRAINT "${oldPkName}" TO "${newPkName}"`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME CONSTRAINT "${newPkName}" TO "${oldPkName}"`)); + } + + // rename column sequence + if (oldColumn.isGenerated === true && newColumn.generationStrategy === "increment") { + const schema = this.extractSchema(table); + + // building sequence name. Sequence without schema needed because it must be supplied in RENAME TO without + // schema name, but schema needed in ALTER SEQUENCE argument. + const seqName = this.buildSequenceName(table, oldColumn.name, undefined, true, true); + const newSeqName = this.buildSequenceName(table, newColumn.name, undefined, true, true); + + const up = schema ? `ALTER SEQUENCE "${schema}"."${seqName}" RENAME TO "${newSeqName}"` : `ALTER SEQUENCE "${seqName}" RENAME TO "${newSeqName}"`; + const down = schema ? `ALTER SEQUENCE "${schema}"."${newSeqName}" RENAME TO "${seqName}"` : `ALTER SEQUENCE "${newSeqName}" RENAME TO "${seqName}"`; + upQueries.push(new Query(up)); + downQueries.push(new Query(down)); + } + + // rename unique constraints + clonedTable.findColumnUniques(oldColumn).forEach(unique => { + // build new constraint name + unique.columnNames.splice(unique.columnNames.indexOf(oldColumn.name), 1); + unique.columnNames.push(newColumn.name); + const newUniqueName = this.connection.namingStrategy.uniqueConstraintName(clonedTable, unique.columnNames); + + // build queries + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME CONSTRAINT "${unique.name}" TO "${newUniqueName}"`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME CONSTRAINT "${newUniqueName}" TO "${unique.name}"`)); + + // replace constraint name + unique.name = newUniqueName; + }); + + // rename index constraints + clonedTable.findColumnIndices(oldColumn).forEach(index => { + // build new constraint name + index.columnNames.splice(index.columnNames.indexOf(oldColumn.name), 1); + index.columnNames.push(newColumn.name); + const schema = this.extractSchema(table); + const newIndexName = this.connection.namingStrategy.indexName(clonedTable, index.columnNames, index.where); + + // build queries + const up = schema ? `ALTER INDEX "${schema}"."${index.name}" RENAME TO "${newIndexName}"` : `ALTER INDEX "${index.name}" RENAME TO "${newIndexName}"`; + const down = schema ? `ALTER INDEX "${schema}"."${newIndexName}" RENAME TO "${index.name}"` : `ALTER INDEX "${newIndexName}" RENAME TO "${index.name}"`; + upQueries.push(new Query(up)); + downQueries.push(new Query(down)); + + // replace constraint name + index.name = newIndexName; + }); + + // rename foreign key constraints + clonedTable.findColumnForeignKeys(oldColumn).forEach(foreignKey => { + // build new constraint name + foreignKey.columnNames.splice(foreignKey.columnNames.indexOf(oldColumn.name), 1); + foreignKey.columnNames.push(newColumn.name); + const newForeignKeyName = this.connection.namingStrategy.foreignKeyName(clonedTable, foreignKey.columnNames, foreignKey.referencedTableName, foreignKey.referencedColumnNames); + + // build queries + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME CONSTRAINT "${foreignKey.name}" TO "${newForeignKeyName}"`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME CONSTRAINT "${newForeignKeyName}" TO "${foreignKey.name}"`)); + + // replace constraint name + foreignKey.name = newForeignKeyName; + }); + + // rename old column in the Table object + const oldTableColumn = clonedTable.columns.find(column => column.name === oldColumn.name); + clonedTable.columns[clonedTable.columns.indexOf(oldTableColumn!)].name = newColumn.name; + oldColumn.name = newColumn.name; + } + + if (newColumn.precision !== oldColumn.precision || newColumn.scale !== oldColumn.scale) { + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" TYPE ${this.driver.createFullType(newColumn)}`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" TYPE ${this.driver.createFullType(oldColumn)}`)); + } + + if ( + (newColumn.type === "enum" || newColumn.type === "simple-enum") + && (oldColumn.type === "enum" || oldColumn.type === "simple-enum") + && !OrmUtils.isArraysEqual(newColumn.enum!, oldColumn.enum!) + ) { + const enumName = this.buildEnumName(table, newColumn); + const arraySuffix = newColumn.isArray ? "[]" : ""; + const oldEnumName = this.buildEnumName(table, newColumn, true, false, true); + const oldEnumNameWithoutSchema = this.buildEnumName(table, newColumn, false, false, true); + const enumTypeBeforeColumnChange = await this.getEnumTypeName(table, oldColumn); + + // rename old ENUM + upQueries.push(new Query(`ALTER TYPE "${enumTypeBeforeColumnChange.enumTypeSchema}"."${enumTypeBeforeColumnChange.enumTypeName}" RENAME TO ${oldEnumNameWithoutSchema}`)); + downQueries.push(new Query(`ALTER TYPE ${oldEnumName} RENAME TO "${enumTypeBeforeColumnChange.enumTypeName}"`)); + + // create new ENUM + upQueries.push(this.createEnumTypeSql(table, newColumn)); + downQueries.push(this.dropEnumTypeSql(table, oldColumn)); + + // if column have default value, we must drop it to avoid issues with type casting + if (newColumn.default !== null && newColumn.default !== undefined) { + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" DROP DEFAULT`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" SET DEFAULT ${newColumn.default}`)); + } + + // build column types + const upType = `${enumName}${arraySuffix} USING "${newColumn.name}"::"text"::${enumName}${arraySuffix}`; + const downType = `${oldEnumName}${arraySuffix} USING "${newColumn.name}"::"text"::${oldEnumName}${arraySuffix}`; + + // update column to use new type + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" TYPE ${upType}`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" TYPE ${downType}`)); + + // if column have default value and we dropped it before, we must bring it back + if (newColumn.default !== null && newColumn.default !== undefined) { + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" SET DEFAULT ${newColumn.default}`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" DROP DEFAULT`)); + } + + // remove old ENUM + upQueries.push(this.dropEnumTypeSql(table, newColumn, oldEnumName)); + downQueries.push(this.createEnumTypeSql(table, oldColumn, oldEnumName)); + } + + if (oldColumn.isNullable !== newColumn.isNullable) { + if (newColumn.isNullable) { + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${oldColumn.name}" DROP NOT NULL`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${oldColumn.name}" SET NOT NULL`)); + } else { + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${oldColumn.name}" SET NOT NULL`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${oldColumn.name}" DROP NOT NULL`)); + } + } + + if (oldColumn.comment !== newColumn.comment) { + upQueries.push(new Query(`COMMENT ON COLUMN ${this.escapePath(table)}."${oldColumn.name}" IS '${newColumn.comment}'`)); + downQueries.push(new Query(`COMMENT ON COLUMN ${this.escapePath(table)}."${newColumn.name}" IS '${oldColumn.comment}'`)); + } + + if (newColumn.isPrimary !== oldColumn.isPrimary) { + const primaryColumns = clonedTable.primaryColumns; + + // if primary column state changed, we must always drop existed constraint. + if (primaryColumns.length > 0) { + const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, primaryColumns.map(column => column.name)); + const columnNames = primaryColumns.map(column => `"${column.name}"`).join(", "); + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${pkName}"`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNames})`)); + } + + if (newColumn.isPrimary === true) { + primaryColumns.push(newColumn); + // update column in table + const column = clonedTable.columns.find(column => column.name === newColumn.name); + column!.isPrimary = true; + const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, primaryColumns.map(column => column.name)); + const columnNames = primaryColumns.map(column => `"${column.name}"`).join(", "); + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNames})`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${pkName}"`)); + + } else { + const primaryColumn = primaryColumns.find(c => c.name === newColumn.name); + primaryColumns.splice(primaryColumns.indexOf(primaryColumn!), 1); + + // update column in table + const column = clonedTable.columns.find(column => column.name === newColumn.name); + column!.isPrimary = false; + + // if we have another primary keys, we must recreate constraint. + if (primaryColumns.length > 0) { + const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, primaryColumns.map(column => column.name)); + const columnNames = primaryColumns.map(column => `"${column.name}"`).join(", "); + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNames})`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${pkName}"`)); + } + } + } + + if (newColumn.isUnique !== oldColumn.isUnique) { + if (newColumn.isUnique === true) { + const uniqueConstraint = new TableUnique({ + name: this.connection.namingStrategy.uniqueConstraintName(table.name, [newColumn.name]), + columnNames: [newColumn.name] + }); + clonedTable.uniques.push(uniqueConstraint); + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${uniqueConstraint.name}" UNIQUE ("${newColumn.name}")`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${uniqueConstraint.name}"`)); + + } else { + const uniqueConstraint = clonedTable.uniques.find(unique => { + return unique.columnNames.length === 1 && !!unique.columnNames.find(columnName => columnName === newColumn.name); + }); + clonedTable.uniques.splice(clonedTable.uniques.indexOf(uniqueConstraint!), 1); + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${uniqueConstraint!.name}"`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${uniqueConstraint!.name}" UNIQUE ("${newColumn.name}")`)); + } + } + + if (oldColumn.isGenerated !== newColumn.isGenerated && newColumn.generationStrategy !== "uuid") { + if (newColumn.isGenerated === true) { + upQueries.push(new Query(`CREATE SEQUENCE ${this.buildSequenceName(table, newColumn)} OWNED BY ${this.escapePath(table)}."${newColumn.name}"`)); + downQueries.push(new Query(`DROP SEQUENCE ${this.buildSequenceName(table, newColumn)}`)); + + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" SET DEFAULT nextval('${this.buildSequenceName(table, newColumn, undefined, true)}')`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" DROP DEFAULT`)); + + } else { + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" DROP DEFAULT`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" SET DEFAULT nextval('${this.buildSequenceName(table, newColumn, undefined, true)}')`)); + + upQueries.push(new Query(`DROP SEQUENCE ${this.buildSequenceName(table, newColumn)}`)); + downQueries.push(new Query(`CREATE SEQUENCE ${this.buildSequenceName(table, newColumn)} OWNED BY ${this.escapePath(table)}."${newColumn.name}"`)); + } + } + + if (newColumn.default !== oldColumn.default) { + if (newColumn.default !== null && newColumn.default !== undefined) { + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" SET DEFAULT ${newColumn.default}`)); + + if (oldColumn.default !== null && oldColumn.default !== undefined) { + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" SET DEFAULT ${oldColumn.default}`)); + } else { + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" DROP DEFAULT`)); + } + + } else if (oldColumn.default !== null && oldColumn.default !== undefined) { + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" DROP DEFAULT`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" SET DEFAULT ${oldColumn.default}`)); + } + } + + if ((newColumn.spatialFeatureType || "").toLowerCase() !== (oldColumn.spatialFeatureType || "").toLowerCase() || newColumn.srid !== oldColumn.srid) { + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" TYPE ${this.driver.createFullType(newColumn)}`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" TYPE ${this.driver.createFullType(oldColumn)}`)); + } + + } + + await this.executeQueries(upQueries, downQueries); + this.replaceCachedTable(table, clonedTable); + } + + /** + * Changes a column in the table. + */ + async changeColumns(tableOrName: Table|string, changedColumns: { newColumn: TableColumn, oldColumn: TableColumn }[]): Promise { + await PromiseUtils.runInSequence(changedColumns, changedColumn => this.changeColumn(tableOrName, changedColumn.oldColumn, changedColumn.newColumn)); + } + + /** + * Drops column in the table. + */ + async dropColumn(tableOrName: Table|string, columnOrName: TableColumn|string): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + const column = columnOrName instanceof TableColumn ? columnOrName : table.findColumnByName(columnOrName); + if (!column) + throw new Error(`Column "${columnOrName}" was not found in table "${table.name}"`); + + const clonedTable = table.clone(); + const upQueries: Query[] = []; + const downQueries: Query[] = []; + + // drop primary key constraint + if (column.isPrimary) { + const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, clonedTable.primaryColumns.map(column => column.name)); + const columnNames = clonedTable.primaryColumns.map(primaryColumn => `"${primaryColumn.name}"`).join(", "); + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(clonedTable)} DROP CONSTRAINT "${pkName}"`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(clonedTable)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNames})`)); + + // update column in table + const tableColumn = clonedTable.findColumnByName(column.name); + tableColumn!.isPrimary = false; + + // if primary key have multiple columns, we must recreate it without dropped column + if (clonedTable.primaryColumns.length > 0) { + const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, clonedTable.primaryColumns.map(column => column.name)); + const columnNames = clonedTable.primaryColumns.map(primaryColumn => `"${primaryColumn.name}"`).join(", "); + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(clonedTable)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNames})`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(clonedTable)} DROP CONSTRAINT "${pkName}"`)); + } + } + + // drop column index + const columnIndex = clonedTable.indices.find(index => index.columnNames.length === 1 && index.columnNames[0] === column.name); + if (columnIndex) { + clonedTable.indices.splice(clonedTable.indices.indexOf(columnIndex), 1); + upQueries.push(this.dropIndexSql(table, columnIndex)); + downQueries.push(this.createIndexSql(table, columnIndex)); + } + + // drop column check + const columnCheck = clonedTable.checks.find(check => !!check.columnNames && check.columnNames.length === 1 && check.columnNames[0] === column.name); + if (columnCheck) { + clonedTable.checks.splice(clonedTable.checks.indexOf(columnCheck), 1); + upQueries.push(this.dropCheckConstraintSql(table, columnCheck)); + downQueries.push(this.createCheckConstraintSql(table, columnCheck)); + } + + // drop column unique + const columnUnique = clonedTable.uniques.find(unique => unique.columnNames.length === 1 && unique.columnNames[0] === column.name); + if (columnUnique) { + clonedTable.uniques.splice(clonedTable.uniques.indexOf(columnUnique), 1); + upQueries.push(this.dropUniqueConstraintSql(table, columnUnique)); + downQueries.push(this.createUniqueConstraintSql(table, columnUnique)); + } + + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP COLUMN "${column.name}"`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD ${this.buildCreateColumnSql(table, column)}`)); + + // drop enum type + if (column.type === "enum" || column.type === "simple-enum") { + const hasEnum = await this.hasEnumType(table, column); + if (hasEnum) { + const enumType = await this.getEnumTypeName(table, column); + const escapedEnumName = `"${enumType.enumTypeSchema}"."${enumType.enumTypeName}"`; + upQueries.push(this.dropEnumTypeSql(table, column, escapedEnumName)); + downQueries.push(this.createEnumTypeSql(table, column, escapedEnumName)); + } + } + + await this.executeQueries(upQueries, downQueries); + + clonedTable.removeColumn(column); + this.replaceCachedTable(table, clonedTable); + } + + /** + * Drops the columns in the table. + */ + async dropColumns(tableOrName: Table|string, columns: TableColumn[]): Promise { + await PromiseUtils.runInSequence(columns, column => this.dropColumn(tableOrName, column)); + } + + /** + * Creates a new primary key. + */ + async createPrimaryKey(tableOrName: Table|string, columnNames: string[]): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + const clonedTable = table.clone(); + + const up = this.createPrimaryKeySql(table, columnNames); + + // mark columns as primary, because dropPrimaryKeySql build constraint name from table primary column names. + clonedTable.columns.forEach(column => { + if (columnNames.find(columnName => columnName === column.name)) + column.isPrimary = true; + }); + const down = this.dropPrimaryKeySql(clonedTable); + + await this.executeQueries(up, down); + this.replaceCachedTable(table, clonedTable); + } + + /** + * Updates composite primary keys. + */ + async updatePrimaryKeys(tableOrName: Table|string, columns: TableColumn[]): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + const clonedTable = table.clone(); + const columnNames = columns.map(column => column.name); + const upQueries: Query[] = []; + const downQueries: Query[] = []; + + // if table already have primary columns, we must drop them. + const primaryColumns = clonedTable.primaryColumns; + if (primaryColumns.length > 0) { + const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, primaryColumns.map(column => column.name)); + const columnNamesString = primaryColumns.map(column => `"${column.name}"`).join(", "); + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${pkName}"`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNamesString})`)); + } + + // update columns in table. + clonedTable.columns + .filter(column => columnNames.indexOf(column.name) !== -1) + .forEach(column => column.isPrimary = true); + + const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, columnNames); + const columnNamesString = columnNames.map(columnName => `"${columnName}"`).join(", "); + upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNamesString})`)); + downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${pkName}"`)); + + await this.executeQueries(upQueries, downQueries); + this.replaceCachedTable(table, clonedTable); + } + + /** + * Drops a primary key. + */ + async dropPrimaryKey(tableOrName: Table|string): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + const up = this.dropPrimaryKeySql(table); + const down = this.createPrimaryKeySql(table, table.primaryColumns.map(column => column.name)); + await this.executeQueries(up, down); + table.primaryColumns.forEach(column => { + column.isPrimary = false; + }); + } + + /** + * Creates new unique constraint. + */ + async createUniqueConstraint(tableOrName: Table|string, uniqueConstraint: TableUnique): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + + // new unique constraint may be passed without name. In this case we generate unique name manually. + if (!uniqueConstraint.name) + uniqueConstraint.name = this.connection.namingStrategy.uniqueConstraintName(table.name, uniqueConstraint.columnNames); + + const up = this.createUniqueConstraintSql(table, uniqueConstraint); + const down = this.dropUniqueConstraintSql(table, uniqueConstraint); + await this.executeQueries(up, down); + table.addUniqueConstraint(uniqueConstraint); + } + + /** + * Creates new unique constraints. + */ + async createUniqueConstraints(tableOrName: Table|string, uniqueConstraints: TableUnique[]): Promise { + await PromiseUtils.runInSequence(uniqueConstraints, uniqueConstraint => this.createUniqueConstraint(tableOrName, uniqueConstraint)); + } + + /** + * Drops unique constraint. + */ + async dropUniqueConstraint(tableOrName: Table|string, uniqueOrName: TableUnique|string): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + const uniqueConstraint = uniqueOrName instanceof TableUnique ? uniqueOrName : table.uniques.find(u => u.name === uniqueOrName); + if (!uniqueConstraint) + throw new Error(`Supplied unique constraint was not found in table ${table.name}`); + + const up = this.dropUniqueConstraintSql(table, uniqueConstraint); + const down = this.createUniqueConstraintSql(table, uniqueConstraint); + await this.executeQueries(up, down); + table.removeUniqueConstraint(uniqueConstraint); + } + + /** + * Drops unique constraints. + */ + async dropUniqueConstraints(tableOrName: Table|string, uniqueConstraints: TableUnique[]): Promise { + await PromiseUtils.runInSequence(uniqueConstraints, uniqueConstraint => this.dropUniqueConstraint(tableOrName, uniqueConstraint)); + } + + /** + * Creates new check constraint. + */ + async createCheckConstraint(tableOrName: Table|string, checkConstraint: TableCheck): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + + // new unique constraint may be passed without name. In this case we generate unique name manually. + if (!checkConstraint.name) + checkConstraint.name = this.connection.namingStrategy.checkConstraintName(table.name, checkConstraint.expression!); + + const up = this.createCheckConstraintSql(table, checkConstraint); + const down = this.dropCheckConstraintSql(table, checkConstraint); + await this.executeQueries(up, down); + table.addCheckConstraint(checkConstraint); + } + + /** + * Creates new check constraints. + */ + async createCheckConstraints(tableOrName: Table|string, checkConstraints: TableCheck[]): Promise { + const promises = checkConstraints.map(checkConstraint => this.createCheckConstraint(tableOrName, checkConstraint)); + await Promise.all(promises); + } + + /** + * Drops check constraint. + */ + async dropCheckConstraint(tableOrName: Table|string, checkOrName: TableCheck|string): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + const checkConstraint = checkOrName instanceof TableCheck ? checkOrName : table.checks.find(c => c.name === checkOrName); + if (!checkConstraint) + throw new Error(`Supplied check constraint was not found in table ${table.name}`); + + const up = this.dropCheckConstraintSql(table, checkConstraint); + const down = this.createCheckConstraintSql(table, checkConstraint); + await this.executeQueries(up, down); + table.removeCheckConstraint(checkConstraint); + } + + /** + * Drops check constraints. + */ + async dropCheckConstraints(tableOrName: Table|string, checkConstraints: TableCheck[]): Promise { + const promises = checkConstraints.map(checkConstraint => this.dropCheckConstraint(tableOrName, checkConstraint)); + await Promise.all(promises); + } + + /** + * Creates new exclusion constraint. + */ + async createExclusionConstraint(tableOrName: Table|string, exclusionConstraint: TableExclusion): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + + // new unique constraint may be passed without name. In this case we generate unique name manually. + if (!exclusionConstraint.name) + exclusionConstraint.name = this.connection.namingStrategy.exclusionConstraintName(table.name, exclusionConstraint.expression!); + + const up = this.createExclusionConstraintSql(table, exclusionConstraint); + const down = this.dropExclusionConstraintSql(table, exclusionConstraint); + await this.executeQueries(up, down); + table.addExclusionConstraint(exclusionConstraint); + } + + /** + * Creates new exclusion constraints. + */ + async createExclusionConstraints(tableOrName: Table|string, exclusionConstraints: TableExclusion[]): Promise { + const promises = exclusionConstraints.map(exclusionConstraint => this.createExclusionConstraint(tableOrName, exclusionConstraint)); + await Promise.all(promises); + } + + /** + * Drops exclusion constraint. + */ + async dropExclusionConstraint(tableOrName: Table|string, exclusionOrName: TableExclusion|string): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + const exclusionConstraint = exclusionOrName instanceof TableExclusion ? exclusionOrName : table.exclusions.find(c => c.name === exclusionOrName); + if (!exclusionConstraint) + throw new Error(`Supplied exclusion constraint was not found in table ${table.name}`); + + const up = this.dropExclusionConstraintSql(table, exclusionConstraint); + const down = this.createExclusionConstraintSql(table, exclusionConstraint); + await this.executeQueries(up, down); + table.removeExclusionConstraint(exclusionConstraint); + } + + /** + * Drops exclusion constraints. + */ + async dropExclusionConstraints(tableOrName: Table|string, exclusionConstraints: TableExclusion[]): Promise { + const promises = exclusionConstraints.map(exclusionConstraint => this.dropExclusionConstraint(tableOrName, exclusionConstraint)); + await Promise.all(promises); + } + + /** + * Creates a new foreign key. + */ + async createForeignKey(tableOrName: Table|string, foreignKey: TableForeignKey): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + + // new FK may be passed without name. In this case we generate FK name manually. + if (!foreignKey.name) + foreignKey.name = this.connection.namingStrategy.foreignKeyName(table.name, foreignKey.columnNames, foreignKey.referencedTableName, foreignKey.referencedColumnNames); + + const up = this.createForeignKeySql(table, foreignKey); + const down = this.dropForeignKeySql(table, foreignKey); + await this.executeQueries(up, down); + table.addForeignKey(foreignKey); + } + + /** + * Creates a new foreign keys. + */ + async createForeignKeys(tableOrName: Table|string, foreignKeys: TableForeignKey[]): Promise { + await PromiseUtils.runInSequence(foreignKeys, foreignKey => this.createForeignKey(tableOrName, foreignKey)); + } + + /** + * Drops a foreign key from the table. + */ + async dropForeignKey(tableOrName: Table|string, foreignKeyOrName: TableForeignKey|string): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + const foreignKey = foreignKeyOrName instanceof TableForeignKey ? foreignKeyOrName : table.foreignKeys.find(fk => fk.name === foreignKeyOrName); + if (!foreignKey) + throw new Error(`Supplied foreign key was not found in table ${table.name}`); + + const up = this.dropForeignKeySql(table, foreignKey); + const down = this.createForeignKeySql(table, foreignKey); + await this.executeQueries(up, down); + table.removeForeignKey(foreignKey); + } + + /** + * Drops a foreign keys from the table. + */ + async dropForeignKeys(tableOrName: Table|string, foreignKeys: TableForeignKey[]): Promise { + await PromiseUtils.runInSequence(foreignKeys, foreignKey => this.dropForeignKey(tableOrName, foreignKey)); + } + + /** + * Creates a new index. + */ + async createIndex(tableOrName: Table|string, index: TableIndex): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + + // new index may be passed without name. In this case we generate index name manually. + if (!index.name) + index.name = this.connection.namingStrategy.indexName(table.name, index.columnNames, index.where); + + const up = this.createIndexSql(table, index); + const down = this.dropIndexSql(table, index); + await this.executeQueries(up, down); + table.addIndex(index); + } + + /** + * Creates a new indices + */ + async createIndices(tableOrName: Table|string, indices: TableIndex[]): Promise { + await PromiseUtils.runInSequence(indices, index => this.createIndex(tableOrName, index)); + } + + /** + * Drops an index from the table. + */ + async dropIndex(tableOrName: Table|string, indexOrName: TableIndex|string): Promise { + const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); + const index = indexOrName instanceof TableIndex ? indexOrName : table.indices.find(i => i.name === indexOrName); + if (!index) + throw new Error(`Supplied index was not found in table ${table.name}`); + + const up = this.dropIndexSql(table, index); + const down = this.createIndexSql(table, index); + await this.executeQueries(up, down); + table.removeIndex(index); + } + + /** + * Drops an indices from the table. + */ + async dropIndices(tableOrName: Table|string, indices: TableIndex[]): Promise { + await PromiseUtils.runInSequence(indices, index => this.dropIndex(tableOrName, index)); + } + + /** + * Clears all table contents. + * Note: this operation uses SQL's TRUNCATE query which cannot be reverted in transactions. + */ + async clearTable(tableName: string): Promise { + await this.query(`TRUNCATE TABLE ${this.escapePath(tableName)}`); + } + + /** + * Removes all tables from the currently connected database. + */ + async clearDatabase(): Promise { + const schemas: string[] = []; + this.connection.entityMetadatas + .filter(metadata => metadata.schema) + .forEach(metadata => { + const isSchemaExist = !!schemas.find(schema => schema === metadata.schema); + if (!isSchemaExist) + schemas.push(metadata.schema!); + }); + schemas.push(this.driver.options.database || "current_schema()"); + const schemaNamesString = schemas.map(name => { + return name === "current_schema()" ? name : "'" + name + "'"; + }).join(", "); + + await this.startTransaction(); + try { + const selectViewDropsQuery = `SELECT 'DROP VIEW IF EXISTS "' || schemaname || '"."' || viewname || '" CASCADE;' as "query" ` + + `FROM "pg_views" WHERE "schemaname" IN (${schemaNamesString}) AND "viewname" NOT IN ('geography_columns', 'geometry_columns', 'raster_columns', 'raster_overviews')`; + const dropViewQueries: ObjectLiteral[] = await this.query(selectViewDropsQuery); + await Promise.all(dropViewQueries.map(q => this.query(q["query"]))); + + // ignore spatial_ref_sys; it's a special table supporting PostGIS + // TODO generalize this as this.driver.ignoreTables + const selectTableDropsQuery = `SELECT 'DROP TABLE IF EXISTS "' || schemaname || '"."' || tablename || '" CASCADE;' as "query" FROM "pg_tables" WHERE "schemaname" IN (${schemaNamesString}) AND "tablename" NOT IN ('spatial_ref_sys')`; + const dropTableQueries: ObjectLiteral[] = await this.query(selectTableDropsQuery); + await Promise.all(dropTableQueries.map(q => this.query(q["query"]))); + await this.dropEnumTypes(schemaNamesString); + + await this.commitTransaction(); + + } catch (error) { + try { // we throw original error even if rollback thrown an error + await this.rollbackTransaction(); + } catch (rollbackError) { } + throw error; + } + } + + // ------------------------------------------------------------------------- + // Protected Methods + // ------------------------------------------------------------------------- + + protected async loadViews(viewNames: string[]): Promise { + const hasTable = await this.hasTable(this.getTypeormMetadataTableName()); + if (!hasTable) + return Promise.resolve([]); + + const currentSchemaQuery = await this.query(`SELECT * FROM current_schema()`); + const currentSchema = currentSchemaQuery[0]["current_schema"]; + + const viewsCondition = viewNames.map(viewName => { + let [schema, name] = viewName.split("."); + if (!name) { + name = schema; + schema = this.driver.options.database || currentSchema; + } + return `("t"."schema" = '${schema}' AND "t"."name" = '${name}')`; + }).join(" OR "); + + const query = `SELECT "t".*, "v"."check_option" FROM ${this.escapePath(this.getTypeormMetadataTableName())} "t" ` + + `INNER JOIN "information_schema"."views" "v" ON "v"."table_schema" = "t"."schema" AND "v"."table_name" = "t"."name" WHERE "t"."type" = 'VIEW' ${viewsCondition ? `AND (${viewsCondition})` : ""}`; + const dbViews = await this.query(query); + return dbViews.map((dbView: any) => { + const view = new View(); + const schema = dbView["schema"] === currentSchema && !this.driver.options.database ? undefined : dbView["schema"]; + view.name = this.driver.buildTableName(dbView["name"], schema); + view.expression = dbView["value"]; + return view; + }); + } + + /** + * Loads all tables (with given names) from the database and creates a Table from them. + */ + protected async loadTables(tableNames: string[]): Promise { + + // if no tables given then no need to proceed + if (!tableNames || !tableNames.length) + return []; + + const currentSchemaQuery = await this.query(`SELECT * FROM current_schema()`); + const currentSchema = currentSchemaQuery[0]["current_schema"]; + + const tablesCondition = tableNames.map(tableName => { + let [schema, name] = tableName.split("."); + if (!name) { + name = schema; + schema = this.driver.options.database || currentSchema; + } + return `("table_schema" = '${schema}' AND "table_name" = '${name}')`; + }).join(" OR "); + const tablesSql = `SELECT * FROM "information_schema"."tables" WHERE ` + tablesCondition; + const columnsSql = `SELECT *, ('"' || "udt_schema" || '"."' || "udt_name" || '"')::"regtype" AS "regtype" FROM "information_schema"."columns" WHERE ` + tablesCondition; + + const constraintsCondition = tableNames.map(tableName => { + let [schema, name] = tableName.split("."); + if (!name) { + name = schema; + schema = this.driver.options.database || currentSchema; + } + return `("ns"."nspname" = '${schema}' AND "t"."relname" = '${name}')`; + }).join(" OR "); + + const constraintsSql = `SELECT "ns"."nspname" AS "table_schema", "t"."relname" AS "table_name", "cnst"."conname" AS "constraint_name", ` + + `pg_get_constraintdef("cnst"."oid") AS "expression", ` + + `CASE "cnst"."contype" WHEN 'p' THEN 'PRIMARY' WHEN 'u' THEN 'UNIQUE' WHEN 'c' THEN 'CHECK' WHEN 'x' THEN 'EXCLUDE' END AS "constraint_type", "a"."attname" AS "column_name" ` + + `FROM "pg_constraint" "cnst" ` + + `INNER JOIN "pg_class" "t" ON "t"."oid" = "cnst"."conrelid" ` + + `INNER JOIN "pg_namespace" "ns" ON "ns"."oid" = "cnst"."connamespace" ` + + `LEFT JOIN "pg_attribute" "a" ON "a"."attrelid" = "cnst"."conrelid" AND "a"."attnum" = ANY ("cnst"."conkey") ` + + `WHERE "t"."relkind" = 'r' AND (${constraintsCondition})`; + + const indicesSql = `SELECT "ns"."nspname" AS "table_schema", "t"."relname" AS "table_name", "i"."relname" AS "constraint_name", "a"."attname" AS "column_name", ` + + `CASE "ix"."indisunique" WHEN 't' THEN 'TRUE' ELSE'FALSE' END AS "is_unique", pg_get_expr("ix"."indpred", "ix"."indrelid") AS "condition", ` + + `"types"."typname" AS "type_name" ` + + `FROM "pg_class" "t" ` + + `INNER JOIN "pg_index" "ix" ON "ix"."indrelid" = "t"."oid" ` + + `INNER JOIN "pg_attribute" "a" ON "a"."attrelid" = "t"."oid" AND "a"."attnum" = ANY ("ix"."indkey") ` + + `INNER JOIN "pg_namespace" "ns" ON "ns"."oid" = "t"."relnamespace" ` + + `INNER JOIN "pg_class" "i" ON "i"."oid" = "ix"."indexrelid" ` + + `INNER JOIN "pg_type" "types" ON "types"."oid" = "a"."atttypid" ` + + `LEFT JOIN "pg_constraint" "cnst" ON "cnst"."conname" = "i"."relname" ` + + `WHERE "t"."relkind" = 'r' AND "cnst"."contype" IS NULL AND (${constraintsCondition})`; + + const foreignKeysCondition = tableNames.map(tableName => { + let [schema, name] = tableName.split("."); + if (!name) { + name = schema; + schema = this.driver.options.database || currentSchema; + } + return `("ns"."nspname" = '${schema}' AND "cl"."relname" = '${name}')`; + }).join(" OR "); + const foreignKeysSql = `SELECT "con"."conname" AS "constraint_name", "con"."nspname" AS "table_schema", "con"."relname" AS "table_name", "att2"."attname" AS "column_name", ` + + `"ns"."nspname" AS "referenced_table_schema", "cl"."relname" AS "referenced_table_name", "att"."attname" AS "referenced_column_name", "con"."confdeltype" AS "on_delete", ` + + `"con"."confupdtype" AS "on_update", "con"."condeferrable" AS "deferrable", "con"."condeferred" AS "deferred" ` + + `FROM ( ` + + `SELECT UNNEST ("con1"."conkey") AS "parent", UNNEST ("con1"."confkey") AS "child", "con1"."confrelid", "con1"."conrelid", "con1"."conname", "con1"."contype", "ns"."nspname", ` + + `"cl"."relname", "con1"."condeferrable", ` + + `CASE WHEN "con1"."condeferred" THEN 'INITIALLY DEFERRED' ELSE 'INITIALLY IMMEDIATE' END as condeferred, ` + + `CASE "con1"."confdeltype" WHEN 'a' THEN 'NO ACTION' WHEN 'r' THEN 'RESTRICT' WHEN 'c' THEN 'CASCADE' WHEN 'n' THEN 'SET NULL' WHEN 'd' THEN 'SET DEFAULT' END as "confdeltype", ` + + `CASE "con1"."confupdtype" WHEN 'a' THEN 'NO ACTION' WHEN 'r' THEN 'RESTRICT' WHEN 'c' THEN 'CASCADE' WHEN 'n' THEN 'SET NULL' WHEN 'd' THEN 'SET DEFAULT' END as "confupdtype" ` + + `FROM "pg_class" "cl" ` + + `INNER JOIN "pg_namespace" "ns" ON "cl"."relnamespace" = "ns"."oid" ` + + `INNER JOIN "pg_constraint" "con1" ON "con1"."conrelid" = "cl"."oid" ` + + `WHERE "con1"."contype" = 'f' AND (${foreignKeysCondition}) ` + + `) "con" ` + + `INNER JOIN "pg_attribute" "att" ON "att"."attrelid" = "con"."confrelid" AND "att"."attnum" = "con"."child" ` + + `INNER JOIN "pg_class" "cl" ON "cl"."oid" = "con"."confrelid" ` + + `INNER JOIN "pg_namespace" "ns" ON "cl"."relnamespace" = "ns"."oid" ` + + `INNER JOIN "pg_attribute" "att2" ON "att2"."attrelid" = "con"."conrelid" AND "att2"."attnum" = "con"."parent"`; + const [dbTables, dbColumns, dbConstraints, dbIndices, dbForeignKeys]: ObjectLiteral[][] = await Promise.all([ + this.query(tablesSql), + this.query(columnsSql), + this.query(constraintsSql), + this.query(indicesSql), + this.query(foreignKeysSql), + ]); + + // if tables were not found in the db, no need to proceed + if (!dbTables.length) + return []; + + // create tables for loaded tables + return Promise.all(dbTables.map(async dbTable => { + const table = new Table(); + + // We do not need to join schema name, when database is by default. + // In this case we need local variable `tableFullName` for below comparision. + const schema = dbTable["table_schema"] === currentSchema && !this.driver.options.database ? undefined : dbTable["table_schema"]; + table.name = this.driver.buildTableName(dbTable["table_name"], schema); + const tableFullName = this.driver.buildTableName(dbTable["table_name"], dbTable["table_schema"]); + + // create columns from the loaded columns + table.columns = await Promise.all(dbColumns + .filter(dbColumn => this.driver.buildTableName(dbColumn["table_name"], dbColumn["table_schema"]) === tableFullName) + .map(async dbColumn => { + + const columnConstraints = dbConstraints.filter(dbConstraint => { + return this.driver.buildTableName(dbConstraint["table_name"], dbConstraint["table_schema"]) === tableFullName && dbConstraint["column_name"] === dbColumn["column_name"]; + }); + + const tableColumn = new TableColumn(); + tableColumn.name = dbColumn["column_name"]; + tableColumn.type = dbColumn["regtype"].toLowerCase(); + + if (tableColumn.type === "numeric" || tableColumn.type === "decimal" || tableColumn.type === "float") { + // If one of these properties was set, and another was not, Postgres sets '0' in to unspecified property + // we set 'undefined' in to unspecified property to avoid changing column on sync + if (dbColumn["numeric_precision"] !== null && !this.isDefaultColumnPrecision(table, tableColumn, dbColumn["numeric_precision"])) { + tableColumn.precision = dbColumn["numeric_precision"]; + } else if (dbColumn["numeric_scale"] !== null && !this.isDefaultColumnScale(table, tableColumn, dbColumn["numeric_scale"])) { + tableColumn.precision = undefined; + } + if (dbColumn["numeric_scale"] !== null && !this.isDefaultColumnScale(table, tableColumn, dbColumn["numeric_scale"])) { + tableColumn.scale = dbColumn["numeric_scale"]; + } else if (dbColumn["numeric_precision"] !== null && !this.isDefaultColumnPrecision(table, tableColumn, dbColumn["numeric_precision"])) { + tableColumn.scale = undefined; + } + } + + if (dbColumn["data_type"].toLowerCase() === "array") { + tableColumn.isArray = true; + const type = tableColumn.type.replace("[]", ""); + tableColumn.type = this.connection.driver.normalizeType({type: type}); + } + + if (tableColumn.type === "interval" + || tableColumn.type === "time without time zone" + || tableColumn.type === "time with time zone" + || tableColumn.type === "timestamp without time zone" + || tableColumn.type === "timestamp with time zone") { + tableColumn.precision = !this.isDefaultColumnPrecision(table, tableColumn, dbColumn["datetime_precision"]) ? dbColumn["datetime_precision"] : undefined; + } + + if (tableColumn.type.indexOf("enum") !== -1) { + tableColumn.type = "enum"; + const sql = `SELECT "e"."enumlabel" AS "value" FROM "pg_enum" "e" ` + + `INNER JOIN "pg_type" "t" ON "t"."oid" = "e"."enumtypid" ` + + `INNER JOIN "pg_namespace" "n" ON "n"."oid" = "t"."typnamespace" ` + + `WHERE "n"."nspname" = '${dbTable["table_schema"]}' AND "t"."typname" = '${this.buildEnumName(table, tableColumn.name, false, true)}'`; + const results: ObjectLiteral[] = await this.query(sql); + tableColumn.enum = results.map(result => result["value"]); + } + + if (tableColumn.type === "geometry") { + const geometryColumnSql = `SELECT * FROM ( + SELECT + "f_table_schema" "table_schema", + "f_table_name" "table_name", + "f_geometry_column" "column_name", + "srid", + "type" + FROM "geometry_columns" + ) AS _ + WHERE (${tablesCondition}) AND "column_name" = '${tableColumn.name}' AND "table_name" = '${table.name}'`; + + const results: ObjectLiteral[] = await this.query(geometryColumnSql); + tableColumn.spatialFeatureType = results[0].type; + tableColumn.srid = results[0].srid; + } + + if (tableColumn.type === "geography") { + const geographyColumnSql = `SELECT * FROM ( + SELECT + "f_table_schema" "table_schema", + "f_table_name" "table_name", + "f_geography_column" "column_name", + "srid", + "type" + FROM "geography_columns" + ) AS _ + WHERE (${tablesCondition}) AND "column_name" = '${tableColumn.name}' AND "table_name" = '${table.name}'`; + + const results: ObjectLiteral[] = await this.query(geographyColumnSql); + tableColumn.spatialFeatureType = results[0].type; + tableColumn.srid = results[0].srid; + } + + // check only columns that have length property + if (this.driver.withLengthColumnTypes.indexOf(tableColumn.type as ColumnType) !== -1 && dbColumn["character_maximum_length"]) { + const length = dbColumn["character_maximum_length"].toString(); + tableColumn.length = !this.isDefaultColumnLength(table, tableColumn, length) ? length : ""; + } + tableColumn.isNullable = dbColumn["is_nullable"] === "YES"; + tableColumn.isPrimary = !!columnConstraints.find(constraint => constraint["constraint_type"] === "PRIMARY"); + + const uniqueConstraint = columnConstraints.find(constraint => constraint["constraint_type"] === "UNIQUE"); + const isConstraintComposite = uniqueConstraint + ? !!dbConstraints.find(dbConstraint => dbConstraint["constraint_type"] === "UNIQUE" + && dbConstraint["constraint_name"] === uniqueConstraint["constraint_name"] + && dbConstraint["column_name"] !== dbColumn["column_name"]) + : false; + tableColumn.isUnique = !!uniqueConstraint && !isConstraintComposite; + + if (dbColumn["column_default"] !== null && dbColumn["column_default"] !== undefined) { + if (dbColumn["column_default"].replace(/"/gi, "") === `nextval('${this.buildSequenceName(table, dbColumn["column_name"], currentSchema, true)}'::regclass)`) { + tableColumn.isGenerated = true; + tableColumn.generationStrategy = "increment"; + } else if (dbColumn["column_default"] === "gen_random_uuid()" || /^uuid_generate_v\d\(\)/.test(dbColumn["column_default"])) { + tableColumn.isGenerated = true; + tableColumn.generationStrategy = "uuid"; + } else { + tableColumn.default = dbColumn["column_default"].replace(/::.*/, ""); + } + } + + tableColumn.comment = ""; // dbColumn["COLUMN_COMMENT"]; + if (dbColumn["character_set_name"]) + tableColumn.charset = dbColumn["character_set_name"]; + if (dbColumn["collation_name"]) + tableColumn.collation = dbColumn["collation_name"]; + return tableColumn; + })); + + // find unique constraints of table, group them by constraint name and build TableUnique. + const tableUniqueConstraints = OrmUtils.uniq(dbConstraints.filter(dbConstraint => { + return this.driver.buildTableName(dbConstraint["table_name"], dbConstraint["table_schema"]) === tableFullName + && dbConstraint["constraint_type"] === "UNIQUE"; + }), dbConstraint => dbConstraint["constraint_name"]); + + table.uniques = tableUniqueConstraints.map(constraint => { + const uniques = dbConstraints.filter(dbC => dbC["constraint_name"] === constraint["constraint_name"]); + return new TableUnique({ + name: constraint["constraint_name"], + columnNames: uniques.map(u => u["column_name"]) + }); + }); + + // find check constraints of table, group them by constraint name and build TableCheck. + const tableCheckConstraints = OrmUtils.uniq(dbConstraints.filter(dbConstraint => { + return this.driver.buildTableName(dbConstraint["table_name"], dbConstraint["table_schema"]) === tableFullName + && dbConstraint["constraint_type"] === "CHECK"; + }), dbConstraint => dbConstraint["constraint_name"]); + + table.checks = tableCheckConstraints.map(constraint => { + const checks = dbConstraints.filter(dbC => dbC["constraint_name"] === constraint["constraint_name"]); + return new TableCheck({ + name: constraint["constraint_name"], + columnNames: checks.map(c => c["column_name"]), + expression: constraint["expression"].replace(/^\s*CHECK\s*\((.*)\)\s*$/i, "$1") + }); + }); + + // find exclusion constraints of table, group them by constraint name and build TableExclusion. + const tableExclusionConstraints = OrmUtils.uniq(dbConstraints.filter(dbConstraint => { + return this.driver.buildTableName(dbConstraint["table_name"], dbConstraint["table_schema"]) === tableFullName + && dbConstraint["constraint_type"] === "EXCLUDE"; + }), dbConstraint => dbConstraint["constraint_name"]); + + table.exclusions = tableExclusionConstraints.map(constraint => { + return new TableExclusion({ + name: constraint["constraint_name"], + expression: constraint["expression"].substring(8) // trim EXCLUDE from start of expression + }); + }); + + // find foreign key constraints of table, group them by constraint name and build TableForeignKey. + const tableForeignKeyConstraints = OrmUtils.uniq(dbForeignKeys.filter(dbForeignKey => { + return this.driver.buildTableName(dbForeignKey["table_name"], dbForeignKey["table_schema"]) === tableFullName; + }), dbForeignKey => dbForeignKey["constraint_name"]); + + table.foreignKeys = tableForeignKeyConstraints.map(dbForeignKey => { + const foreignKeys = dbForeignKeys.filter(dbFk => dbFk["constraint_name"] === dbForeignKey["constraint_name"]); + + // if referenced table located in currently used schema, we don't need to concat schema name to table name. + const schema = dbForeignKey["referenced_table_schema"] === currentSchema ? undefined : dbForeignKey["referenced_table_schema"]; + const referencedTableName = this.driver.buildTableName(dbForeignKey["referenced_table_name"], schema); + + return new TableForeignKey({ + name: dbForeignKey["constraint_name"], + columnNames: foreignKeys.map(dbFk => dbFk["column_name"]), + referencedTableName: referencedTableName, + referencedColumnNames: foreignKeys.map(dbFk => dbFk["referenced_column_name"]), + onDelete: dbForeignKey["on_delete"], + onUpdate: dbForeignKey["on_update"], + deferrable: dbForeignKey["deferrable"] ? dbForeignKey["deferred"] : undefined, + }); + }); + + // find index constraints of table, group them by constraint name and build TableIndex. + const tableIndexConstraints = OrmUtils.uniq(dbIndices.filter(dbIndex => { + return this.driver.buildTableName(dbIndex["table_name"], dbIndex["table_schema"]) === tableFullName; + }), dbIndex => dbIndex["constraint_name"]); + + table.indices = tableIndexConstraints.map(constraint => { + const indices = dbIndices.filter(index => { + return index["table_schema"] === constraint["table_schema"] + && index["table_name"] === constraint["table_name"] + && index["constraint_name"] === constraint["constraint_name"]; + }); + return new TableIndex({ + table: table, + name: constraint["constraint_name"], + columnNames: indices.map(i => i["column_name"]), + isUnique: constraint["is_unique"] === "TRUE", + where: constraint["condition"], + isSpatial: indices.every(i => this.driver.spatialTypes.indexOf(i["type_name"]) >= 0), + isFulltext: false + }); + }); + + return table; + })); + } + + /** + * Builds create table sql. + */ + protected createTableSql(table: Table, createForeignKeys?: boolean): Query { + const columnDefinitions = table.columns.map(column => this.buildCreateColumnSql(table, column)).join(", "); + let sql = `CREATE TABLE ${this.escapePath(table)} (${columnDefinitions}`; + + table.columns + .filter(column => column.isUnique) + .forEach(column => { + const isUniqueExist = table.uniques.some(unique => unique.columnNames.length === 1 && unique.columnNames[0] === column.name); + if (!isUniqueExist) + table.uniques.push(new TableUnique({ + name: this.connection.namingStrategy.uniqueConstraintName(table.name, [column.name]), + columnNames: [column.name] + })); + }); + + if (table.uniques.length > 0) { + const uniquesSql = table.uniques.map(unique => { + const uniqueName = unique.name ? unique.name : this.connection.namingStrategy.uniqueConstraintName(table.name, unique.columnNames); + const columnNames = unique.columnNames.map(columnName => `"${columnName}"`).join(", "); + return `CONSTRAINT "${uniqueName}" UNIQUE (${columnNames})`; + }).join(", "); + + sql += `, ${uniquesSql}`; + } + + if (table.checks.length > 0) { + const checksSql = table.checks.map(check => { + const checkName = check.name ? check.name : this.connection.namingStrategy.checkConstraintName(table.name, check.expression!); + return `CONSTRAINT "${checkName}" CHECK (${check.expression})`; + }).join(", "); + + sql += `, ${checksSql}`; + } + + if (table.exclusions.length > 0) { + const exclusionsSql = table.exclusions.map(exclusion => { + const exclusionName = exclusion.name ? exclusion.name : this.connection.namingStrategy.exclusionConstraintName(table.name, exclusion.expression!); + return `CONSTRAINT "${exclusionName}" EXCLUDE ${exclusion.expression}`; + }).join(", "); + + sql += `, ${exclusionsSql}`; + } + + if (table.foreignKeys.length > 0 && createForeignKeys) { + const foreignKeysSql = table.foreignKeys.map(fk => { + const columnNames = fk.columnNames.map(columnName => `"${columnName}"`).join(", "); + if (!fk.name) + fk.name = this.connection.namingStrategy.foreignKeyName(table.name, fk.columnNames, fk.referencedTableName, fk.referencedColumnNames); + const referencedColumnNames = fk.referencedColumnNames.map(columnName => `"${columnName}"`).join(", "); + + let constraint = `CONSTRAINT "${fk.name}" FOREIGN KEY (${columnNames}) REFERENCES ${this.escapePath(fk.referencedTableName)} (${referencedColumnNames})`; + if (fk.onDelete) + constraint += ` ON DELETE ${fk.onDelete}`; + if (fk.onUpdate) + constraint += ` ON UPDATE ${fk.onUpdate}`; + if (fk.deferrable) + constraint += ` DEFERRABLE ${fk.deferrable}`; + + return constraint; + }).join(", "); + + sql += `, ${foreignKeysSql}`; + } + + const primaryColumns = table.columns.filter(column => column.isPrimary); + if (primaryColumns.length > 0) { + const primaryKeyName = this.connection.namingStrategy.primaryKeyName(table.name, primaryColumns.map(column => column.name)); + const columnNames = primaryColumns.map(column => `"${column.name}"`).join(", "); + sql += `, CONSTRAINT "${primaryKeyName}" PRIMARY KEY (${columnNames})`; + } + + sql += `)`; + + return new Query(sql); + } + + /** + * Builds drop table sql. + */ + protected dropTableSql(tableOrPath: Table|string): Query { + return new Query(`DROP TABLE ${this.escapePath(tableOrPath)}`); + } + + protected createViewSql(view: View): Query { + const materializedClause = view.materialized ? "MATERIALIZED " : ""; + const viewName = this.escapePath(view); + + if (typeof view.expression === "string") { + return new Query(`CREATE ${materializedClause}VIEW ${viewName} AS ${view.expression}`); + } else { + return new Query(`CREATE ${materializedClause}VIEW ${viewName} AS ${view.expression(this.connection).getQuery()}`); + } + } + + protected async insertViewDefinitionSql(view: View): Promise { + const currentSchemaQuery = await this.query(`SELECT * FROM current_schema()`); + const currentSchema = currentSchemaQuery[0]["current_schema"]; + const splittedName = view.name.split("."); + let schema = this.driver.options.database || currentSchema; + let name = view.name; + if (splittedName.length === 2) { + schema = splittedName[0]; + name = splittedName[1]; + } + + const expression = typeof view.expression === "string" ? view.expression.trim() : view.expression(this.connection).getQuery(); + const [query, parameters] = this.connection.createQueryBuilder() + .insert() + .into(this.getTypeormMetadataTableName()) + .values({ type: "VIEW", schema: schema, name: name, value: expression }) + .getQueryAndParameters(); + + return new Query(query, parameters); + } + + /** + * Builds drop view sql. + */ + protected dropViewSql(viewOrPath: View|string): Query { + return new Query(`DROP VIEW ${this.escapePath(viewOrPath)}`); + } + + /** + * Builds remove view sql. + */ + protected async deleteViewDefinitionSql(viewOrPath: View|string): Promise { + const currentSchemaQuery = await this.query(`SELECT * FROM current_schema()`); + const currentSchema = currentSchemaQuery[0]["current_schema"]; + const viewName = viewOrPath instanceof View ? viewOrPath.name : viewOrPath; + const splittedName = viewName.split("."); + let schema = this.driver.options.database || currentSchema; + let name = viewName; + if (splittedName.length === 2) { + schema = splittedName[0]; + name = splittedName[1]; + } + + const qb = this.connection.createQueryBuilder(); + const [query, parameters] = qb.delete() + .from(this.getTypeormMetadataTableName()) + .where(`${qb.escape("type")} = 'VIEW'`) + .andWhere(`${qb.escape("schema")} = :schema`, { schema }) + .andWhere(`${qb.escape("name")} = :name`, { name }) + .getQueryAndParameters(); + + return new Query(query, parameters); + } + + /** + * Extracts schema name from given Table object or table name string. + */ + protected extractSchema(target: Table|string): string|undefined { + const tableName = target instanceof Table ? target.name : target; + return tableName.indexOf(".") === -1 ? this.driver.options.database : tableName.split(".")[0]; + } + + /** + * Drops ENUM type from given schemas. + */ + protected async dropEnumTypes(schemaNames: string): Promise { + const selectDropsQuery = `SELECT 'DROP TYPE IF EXISTS "' || n.nspname || '"."' || t.typname || '" CASCADE;' as "query" FROM "pg_type" "t" ` + + `INNER JOIN "pg_enum" "e" ON "e"."enumtypid" = "t"."oid" ` + + `INNER JOIN "pg_namespace" "n" ON "n"."oid" = "t"."typnamespace" ` + + `WHERE "n"."nspname" IN (${schemaNames}) GROUP BY "n"."nspname", "t"."typname"`; + const dropQueries: ObjectLiteral[] = await this.query(selectDropsQuery); + await Promise.all(dropQueries.map(q => this.query(q["query"]))); + } + + /** + * Checks if enum with the given name exist in the database. + */ + protected async hasEnumType(table: Table, column: TableColumn): Promise { + const schema = this.parseTableName(table).schema; + const enumName = this.buildEnumName(table, column, false, true); + const sql = `SELECT "n"."nspname", "t"."typname" FROM "pg_type" "t" ` + + `INNER JOIN "pg_namespace" "n" ON "n"."oid" = "t"."typnamespace" ` + + `WHERE "n"."nspname" = ${schema} AND "t"."typname" = '${enumName}'`; + const result = await this.query(sql); + return result.length ? true : false; + } + + /** + * Builds create ENUM type sql. + */ + protected createEnumTypeSql(table: Table, column: TableColumn, enumName?: string): Query { + if (!enumName) + enumName = this.buildEnumName(table, column); + const enumValues = column.enum!.map(value => `'${value.replace("'", "''")}'`).join(", "); + return new Query(`CREATE TYPE ${enumName} AS ENUM(${enumValues})`); + } + + /** + * Builds create ENUM type sql. + */ + protected dropEnumTypeSql(table: Table, column: TableColumn, enumName?: string): Query { + if (!enumName) + enumName = this.buildEnumName(table, column); + return new Query(`DROP TYPE ${enumName}`); + } + + /** + * Builds create index sql. + */ + protected createIndexSql(table: Table, index: TableIndex): Query { + const columns = index.columnNames.map(columnName => `"${columnName}"`).join(", "); + return new Query(`CREATE ${index.isUnique ? "UNIQUE " : ""}INDEX "${index.name}" ON ${this.escapePath(table)} ${index.isSpatial ? "USING GiST " : ""}(${columns}) ${index.where ? "WHERE " + index.where : ""}`); + } + + /** + * Builds drop index sql. + */ + protected dropIndexSql(table: Table, indexOrName: TableIndex|string): Query { + let indexName = indexOrName instanceof TableIndex ? indexOrName.name : indexOrName; + const schema = this.extractSchema(table); + return schema ? new Query(`DROP INDEX "${schema}"."${indexName}"`) : new Query(`DROP INDEX "${indexName}"`); + } + + /** + * Builds create primary key sql. + */ + protected createPrimaryKeySql(table: Table, columnNames: string[]): Query { + const primaryKeyName = this.connection.namingStrategy.primaryKeyName(table.name, columnNames); + const columnNamesString = columnNames.map(columnName => `"${columnName}"`).join(", "); + return new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${primaryKeyName}" PRIMARY KEY (${columnNamesString})`); + } + + /** + * Builds drop primary key sql. + */ + protected dropPrimaryKeySql(table: Table): Query { + const columnNames = table.primaryColumns.map(column => column.name); + const primaryKeyName = this.connection.namingStrategy.primaryKeyName(table.name, columnNames); + return new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${primaryKeyName}"`); + } + + /** + * Builds create unique constraint sql. + */ + protected createUniqueConstraintSql(table: Table, uniqueConstraint: TableUnique): Query { + const columnNames = uniqueConstraint.columnNames.map(column => `"` + column + `"`).join(", "); + return new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${uniqueConstraint.name}" UNIQUE (${columnNames})`); + } + + /** + * Builds drop unique constraint sql. + */ + protected dropUniqueConstraintSql(table: Table, uniqueOrName: TableUnique|string): Query { + const uniqueName = uniqueOrName instanceof TableUnique ? uniqueOrName.name : uniqueOrName; + return new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${uniqueName}"`); + } + + /** + * Builds create check constraint sql. + */ + protected createCheckConstraintSql(table: Table, checkConstraint: TableCheck): Query { + return new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${checkConstraint.name}" CHECK (${checkConstraint.expression})`); + } + + /** + * Builds drop check constraint sql. + */ + protected dropCheckConstraintSql(table: Table, checkOrName: TableCheck|string): Query { + const checkName = checkOrName instanceof TableCheck ? checkOrName.name : checkOrName; + return new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${checkName}"`); + } + + /** + * Builds create exclusion constraint sql. + */ + protected createExclusionConstraintSql(table: Table, exclusionConstraint: TableExclusion): Query { + return new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${exclusionConstraint.name}" EXCLUDE ${exclusionConstraint.expression}`); + } + + /** + * Builds drop exclusion constraint sql. + */ + protected dropExclusionConstraintSql(table: Table, exclusionOrName: TableExclusion|string): Query { + const exclusionName = exclusionOrName instanceof TableExclusion ? exclusionOrName.name : exclusionOrName; + return new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${exclusionName}"`); + } + + /** + * Builds create foreign key sql. + */ + protected createForeignKeySql(table: Table, foreignKey: TableForeignKey): Query { + const columnNames = foreignKey.columnNames.map(column => `"` + column + `"`).join(", "); + const referencedColumnNames = foreignKey.referencedColumnNames.map(column => `"` + column + `"`).join(","); + let sql = `ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${foreignKey.name}" FOREIGN KEY (${columnNames}) ` + + `REFERENCES ${this.escapePath(foreignKey.referencedTableName)}(${referencedColumnNames})`; + if (foreignKey.onDelete) + sql += ` ON DELETE ${foreignKey.onDelete}`; + if (foreignKey.onUpdate) + sql += ` ON UPDATE ${foreignKey.onUpdate}`; + if (foreignKey.deferrable) + sql += ` DEFERRABLE ${foreignKey.deferrable}`; + + return new Query(sql); + } + + /** + * Builds drop foreign key sql. + */ + protected dropForeignKeySql(table: Table, foreignKeyOrName: TableForeignKey|string): Query { + const foreignKeyName = foreignKeyOrName instanceof TableForeignKey ? foreignKeyOrName.name : foreignKeyOrName; + return new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${foreignKeyName}"`); + } + + /** + * Builds sequence name from given table and column. + */ + protected buildSequenceName(table: Table, columnOrName: TableColumn|string, currentSchema?: string, disableEscape?: true, skipSchema?: boolean): string { + const columnName = columnOrName instanceof TableColumn ? columnOrName.name : columnOrName; + let schema: string|undefined = undefined; + let tableName: string|undefined = undefined; + + if (table.name.indexOf(".") === -1) { + tableName = table.name; + } else { + schema = table.name.split(".")[0]; + tableName = table.name.split(".")[1]; + } + + if (schema && schema !== currentSchema && !skipSchema) { + return disableEscape ? `${schema}.${tableName}_${columnName}_seq` : `"${schema}"."${tableName}_${columnName}_seq"`; + + } else { + return disableEscape ? `${tableName}_${columnName}_seq` : `"${tableName}_${columnName}_seq"`; + } + } + + /** + * Builds ENUM type name from given table and column. + */ + protected buildEnumName(table: Table, columnOrName: TableColumn|string, withSchema: boolean = true, disableEscape?: boolean, toOld?: boolean): string { + /** + * If enumName is specified in column options then use it instead + */ + if (columnOrName instanceof TableColumn && columnOrName.enumName) { + let enumName = columnOrName.enumName; + if (toOld) + enumName = enumName + "_old"; + return disableEscape ? enumName : `"${enumName}"`; + } + const columnName = columnOrName instanceof TableColumn ? columnOrName.name : columnOrName; + const schema = table.name.indexOf(".") === -1 ? this.driver.options.database : table.name.split(".")[0]; + const tableName = table.name.indexOf(".") === -1 ? table.name : table.name.split(".")[1]; + let enumName = schema && withSchema ? `${schema}.${tableName}_${columnName.toLowerCase()}_enum` : `${tableName}_${columnName.toLowerCase()}_enum`; + if (toOld) + enumName = enumName + "_old"; + return enumName.split(".").map(i => { + return disableEscape ? i : `"${i}"`; + }).join("."); + } + + protected async getEnumTypeName(table: Table, column: TableColumn) { + const currentSchemaQuery = await this.query(`SELECT * FROM current_schema()`); + const currentSchema = currentSchemaQuery[0]["current_schema"]; + let [schema, name] = table.name.split("."); + if (!name) { + name = schema; + schema = this.driver.options.database || currentSchema; + } + const result = await this.query(`SELECT "udt_schema", "udt_name" ` + + `FROM "information_schema"."columns" WHERE "table_schema" = '${schema}' AND "table_name" = '${name}' AND "column_name"='${column.name}'`); + return { + enumTypeSchema: result[0]["udt_schema"], + enumTypeName: result[0]["udt_name"] + }; + } + + /** + * Escapes given table or view path. + */ + protected escapePath(target: Table|View|string, disableEscape?: boolean): string { + let tableName = target instanceof Table || target instanceof View ? target.name : target; + tableName = tableName.indexOf(".") === -1 && this.driver.options.database ? `${this.driver.options.database}.${tableName}` : tableName; + + return tableName.split(".").map(i => { + return disableEscape ? i : `"${i}"`; + }).join("."); + } + + /** + * Returns object with table schema and table name. + */ + protected parseTableName(target: Table|string) { + const tableName = target instanceof Table ? target.name : target; + if (tableName.indexOf(".") === -1) { + return { + schema: this.driver.options.database ? `'${this.driver.options.database}'` : "current_schema()", + tableName: `'${tableName}'` + }; + } else { + return { + schema: `'${tableName.split(".")[0]}'`, + tableName: `'${tableName.split(".")[1]}'` + }; + } + } + + /** + * Builds a query for create column. + */ + protected buildCreateColumnSql(table: Table, column: TableColumn) { + let c = "\"" + column.name + "\""; + if (column.isGenerated === true && column.generationStrategy !== "uuid") { + if (column.type === "integer" || column.type === "int" || column.type === "int4") + c += " SERIAL"; + if (column.type === "smallint" || column.type === "int2") + c += " SMALLSERIAL"; + if (column.type === "bigint" || column.type === "int8") + c += " BIGSERIAL"; + } + if (column.type === "enum" || column.type === "simple-enum") { + c += " " + this.buildEnumName(table, column); + if (column.isArray) + c += " array"; + + } else if (!column.isGenerated || column.type === "uuid") { + c += " " + this.connection.driver.createFullType(column); + } + if (column.charset) + c += " CHARACTER SET \"" + column.charset + "\""; + if (column.collation) + c += " COLLATE \"" + column.collation + "\""; + if (column.isNullable !== true) + c += " NOT NULL"; + if (column.default !== undefined && column.default !== null) + c += " DEFAULT " + column.default; + if (column.isGenerated && column.generationStrategy === "uuid" && !column.default) + c += ` DEFAULT ${this.driver.uuidGenerator}`; + + return c; + } + +} diff --git a/src/driver/types/DatabaseType.ts b/src/driver/types/DatabaseType.ts index 68a3045013..4426c9829d 100644 --- a/src/driver/types/DatabaseType.ts +++ b/src/driver/types/DatabaseType.ts @@ -16,4 +16,5 @@ export type DatabaseType = "mssql"| "mongodb"| "aurora-data-api"| + "aurora-data-api-pg"| "expo"; diff --git a/src/error/MissingDriverError.ts b/src/error/MissingDriverError.ts index 01abd5492c..a5634f4679 100644 --- a/src/error/MissingDriverError.ts +++ b/src/error/MissingDriverError.ts @@ -7,7 +7,7 @@ export class MissingDriverError extends Error { constructor(driverType: string) { super(); Object.setPrototypeOf(this, MissingDriverError.prototype); - this.message = `Wrong driver: "${driverType}" given. Supported drivers are: "cordova", "expo", "mariadb", "mongodb", "mssql", "mysql", "oracle", "postgres", "sqlite", "sqljs", "react-native".`; + this.message = `Wrong driver: "${driverType}" given. Supported drivers are: "cordova", "expo", "mariadb", "mongodb", "mssql", "mysql", "oracle", "postgres", "sqlite", "sqljs", "react-native", "aurora-data-api", "aurora-data-api-pg".`; } -} \ No newline at end of file +} diff --git a/src/metadata-builder/EntityMetadataBuilder.ts b/src/metadata-builder/EntityMetadataBuilder.ts index d4c764d648..9eace62ebb 100644 --- a/src/metadata-builder/EntityMetadataBuilder.ts +++ b/src/metadata-builder/EntityMetadataBuilder.ts @@ -23,6 +23,7 @@ import {SqlServerDriver} from "../driver/sqlserver/SqlServerDriver"; import {PostgresDriver} from "../driver/postgres/PostgresDriver"; import {ExclusionMetadata} from "../metadata/ExclusionMetadata"; import {AuroraDataApiDriver} from "../driver/aurora-data-api/AuroraDataApiDriver"; +import {AuroraDataApiPostgresDriver} from "../driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Builds EntityMetadata objects and all its sub-metadatas. @@ -489,7 +490,7 @@ export class EntityMetadataBuilder { }); // Only PostgreSQL supports exclusion constraints. - if (this.connection.driver instanceof PostgresDriver) { + if (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver) { entityMetadata.exclusions = this.metadataArgsStorage.filterExclusions(entityMetadata.inheritanceTree).map(args => { return new ExclusionMetadata({ entityMetadata, args }); }); diff --git a/src/metadata/EntityMetadata.ts b/src/metadata/EntityMetadata.ts index d4da9d2f61..2bebdd3697 100644 --- a/src/metadata/EntityMetadata.ts +++ b/src/metadata/EntityMetadata.ts @@ -25,6 +25,7 @@ import {RelationMetadata} from "./RelationMetadata"; import {TableType} from "./types/TableTypes"; import {TreeType} from "./types/TreeTypes"; import {UniqueMetadata} from "./UniqueMetadata"; +import {AuroraDataApiPostgresDriver} from "../driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Contains all entity metadata. @@ -852,11 +853,11 @@ export class EntityMetadata { */ protected buildTablePath(): string { let tablePath = this.tableName; - if (this.schema && ((this.connection.driver instanceof PostgresDriver) || (this.connection.driver instanceof SqlServerDriver) || (this.connection.driver instanceof SapDriver))) { + if (this.schema && ((this.connection.driver instanceof PostgresDriver) || (this.connection.driver instanceof AuroraDataApiPostgresDriver) || (this.connection.driver instanceof SqlServerDriver) || (this.connection.driver instanceof SapDriver))) { tablePath = this.schema + "." + tablePath; } - if (this.database && !(this.connection.driver instanceof PostgresDriver)) { + if (this.database && !(this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver)) { if (!this.schema && this.connection.driver instanceof SqlServerDriver) { tablePath = this.database + ".." + tablePath; } else { @@ -874,7 +875,7 @@ export class EntityMetadata { if (!this.schema) return undefined; - return this.database && !(this.connection.driver instanceof PostgresDriver) ? this.database + "." + this.schema : this.schema; + return this.database && !(this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver) ? this.database + "." + this.schema : this.schema; } } diff --git a/src/query-builder/DeleteQueryBuilder.ts b/src/query-builder/DeleteQueryBuilder.ts index 4be20ba377..87947f7b97 100644 --- a/src/query-builder/DeleteQueryBuilder.ts +++ b/src/query-builder/DeleteQueryBuilder.ts @@ -16,6 +16,7 @@ import {MysqlDriver} from "../driver/mysql/MysqlDriver"; import {BroadcasterResult} from "../subscriber/BroadcasterResult"; import {EntitySchema} from "../index"; import {AuroraDataApiDriver} from "../driver/aurora-data-api/AuroraDataApiDriver"; +import { AuroraDataApiPostgresDriver } from "../driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Allows to build complex sql queries in a fashion way and execute those queries. @@ -75,7 +76,7 @@ export class DeleteQueryBuilder extends QueryBuilder implements deleteResult.raw = result; deleteResult.affected = result.affectedRows; - } else if (driver instanceof SqlServerDriver || driver instanceof PostgresDriver || driver instanceof CockroachDriver) { + } else if (driver instanceof SqlServerDriver || driver instanceof PostgresDriver || driver instanceof AuroraDataApiPostgresDriver || driver instanceof CockroachDriver) { deleteResult.raw = result[0] ? result[0] : null; // don't return 0 because it could confuse. null means that we did not receive this value deleteResult.affected = typeof result[1] === "number" ? result[1] : null; @@ -257,7 +258,7 @@ export class DeleteQueryBuilder extends QueryBuilder implements const whereExpression = this.createWhereExpression(); const returningExpression = this.createReturningExpression(); - if (returningExpression && (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof CockroachDriver)) { + if (returningExpression && (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver || this.connection.driver instanceof CockroachDriver)) { return `DELETE FROM ${tableName}${whereExpression} RETURNING ${returningExpression}`; } else if (returningExpression !== "" && this.connection.driver instanceof SqlServerDriver) { diff --git a/src/query-builder/InsertQueryBuilder.ts b/src/query-builder/InsertQueryBuilder.ts index 1ea9b4233c..e36fb31a84 100644 --- a/src/query-builder/InsertQueryBuilder.ts +++ b/src/query-builder/InsertQueryBuilder.ts @@ -19,6 +19,7 @@ import {BroadcasterResult} from "../subscriber/BroadcasterResult"; import {EntitySchema} from "../entity-schema/EntitySchema"; import {OracleDriver} from "../driver/oracle/OracleDriver"; import {AuroraDataApiDriver} from "../driver/aurora-data-api/AuroraDataApiDriver"; +import {AuroraDataApiPostgresDriver} from "../driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Allows to build complex sql queries in a fashion way and execute those queries. @@ -263,7 +264,7 @@ export class InsertQueryBuilder extends QueryBuilder { if (statement && Array.isArray(statement.overwrite)) { if (this.connection.driver instanceof MysqlDriver || this.connection.driver instanceof AuroraDataApiDriver) { this.expressionMap.onUpdate.overwrite = statement.overwrite.map(column => `${column} = VALUES(${column})`).join(", "); - } else if (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AbstractSqliteDriver || this.connection.driver instanceof CockroachDriver) { + } else if (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver || this.connection.driver instanceof AbstractSqliteDriver || this.connection.driver instanceof CockroachDriver) { this.expressionMap.onUpdate.overwrite = statement.overwrite.map(column => `${column} = EXCLUDED.${column}`).join(", "); } } @@ -314,7 +315,7 @@ export class InsertQueryBuilder extends QueryBuilder { query += ` DEFAULT VALUES`; } } - if (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AbstractSqliteDriver || this.connection.driver instanceof CockroachDriver) { + if (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver || this.connection.driver instanceof AbstractSqliteDriver || this.connection.driver instanceof CockroachDriver) { query += `${this.expressionMap.onIgnore ? " ON CONFLICT DO NOTHING " : ""}`; query += `${this.expressionMap.onConflict ? " ON CONFLICT " + this.expressionMap.onConflict : ""}`; if (this.expressionMap.onUpdate) { @@ -331,7 +332,7 @@ export class InsertQueryBuilder extends QueryBuilder { } // add RETURNING expression - if (returningExpression && (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof OracleDriver || this.connection.driver instanceof CockroachDriver)) { + if (returningExpression && (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver || this.connection.driver instanceof OracleDriver || this.connection.driver instanceof CockroachDriver)) { query += ` RETURNING ${returningExpression}`; } @@ -493,7 +494,7 @@ export class InsertQueryBuilder extends QueryBuilder { } else { expression += `${geomFromText}(${this.connection.driver.createParameter(paramName, parametersCount)})`; } - } else if (this.connection.driver instanceof PostgresDriver && this.connection.driver.spatialTypes.indexOf(column.type) !== -1) { + } else if ((this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver) && this.connection.driver.spatialTypes.indexOf(column.type) !== -1) { if (column.srid != null) { expression += `ST_SetSRID(ST_GeomFromGeoJSON(${this.connection.driver.createParameter(paramName, parametersCount)}), ${column.srid})::${column.type}`; } else { diff --git a/src/query-builder/SelectQueryBuilder.ts b/src/query-builder/SelectQueryBuilder.ts index 483b2dced8..3cfb272f9b 100644 --- a/src/query-builder/SelectQueryBuilder.ts +++ b/src/query-builder/SelectQueryBuilder.ts @@ -36,6 +36,7 @@ import {SelectQueryBuilderOption} from "./SelectQueryBuilderOption"; import {ObjectUtils} from "../util/ObjectUtils"; import {DriverUtils} from "../driver/DriverUtils"; import {AuroraDataApiDriver} from "../driver/aurora-data-api/AuroraDataApiDriver"; +import {AuroraDataApiPostgresDriver} from "../driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Allows to build complex sql queries in a fashion way and execute those queries. @@ -1441,7 +1442,7 @@ export class SelectQueryBuilder extends QueryBuilder implements const {driver} = this.connection; let select = "SELECT "; - if (driver instanceof PostgresDriver && selectDistinctOn.length > 0) { + if ((driver instanceof PostgresDriver || driver instanceof AuroraDataApiPostgresDriver) && selectDistinctOn.length > 0) { const selectDistinctOnMap = selectDistinctOn.map( (on) => this.replacePropertyNames(on) ).join(", "); @@ -1650,7 +1651,7 @@ export class SelectQueryBuilder extends QueryBuilder implements if (driver instanceof MysqlDriver || driver instanceof AuroraDataApiDriver) { return " LOCK IN SHARE MODE"; - } else if (driver instanceof PostgresDriver) { + } else if (driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver ) { return " FOR SHARE"; } else if (driver instanceof OracleDriver) { @@ -1663,7 +1664,7 @@ export class SelectQueryBuilder extends QueryBuilder implements throw new LockNotSupportedOnGivenDriverError(); } case "pessimistic_write": - if (driver instanceof MysqlDriver || driver instanceof AuroraDataApiDriver || driver instanceof PostgresDriver || driver instanceof OracleDriver) { + if (driver instanceof MysqlDriver || driver instanceof AuroraDataApiDriver || driver instanceof PostgresDriver || driver instanceof AuroraDataApiPostgresDriver || driver instanceof OracleDriver) { return " FOR UPDATE"; } else if (driver instanceof SqlServerDriver) { @@ -1727,7 +1728,7 @@ export class SelectQueryBuilder extends QueryBuilder implements selectionPath = `${asText}(${selectionPath})`; } - if (this.connection.driver instanceof PostgresDriver) + if (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver ) // cast to JSON to trigger parsing in the driver selectionPath = `ST_AsGeoJSON(${selectionPath})::json`; diff --git a/src/query-builder/UpdateQueryBuilder.ts b/src/query-builder/UpdateQueryBuilder.ts index 1bf6c68b6d..f7fba2f2b3 100644 --- a/src/query-builder/UpdateQueryBuilder.ts +++ b/src/query-builder/UpdateQueryBuilder.ts @@ -24,6 +24,7 @@ import {UpdateValuesMissingError} from "../error/UpdateValuesMissingError"; import {EntityColumnNotFound} from "../error/EntityColumnNotFound"; import {QueryDeepPartialEntity} from "./QueryPartialEntity"; import {AuroraDataApiDriver} from "../driver/aurora-data-api/AuroraDataApiDriver"; +import {AuroraDataApiPostgresDriver} from "../driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Allows to build complex sql queries in a fashion way and execute those queries. @@ -100,7 +101,7 @@ export class UpdateQueryBuilder extends QueryBuilder implements parameters, ); - if (this.connection.driver instanceof PostgresDriver) { + if (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver) { updateResult.raw = result[0]; updateResult.affected = result[1]; } @@ -446,7 +447,7 @@ export class UpdateQueryBuilder extends QueryBuilder implements } else { expression = `${geomFromText}(${this.connection.driver.createParameter(paramName, parametersCount)})`; } - } else if (this.connection.driver instanceof PostgresDriver && this.connection.driver.spatialTypes.indexOf(column.type) !== -1) { + } else if ((this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver) && this.connection.driver.spatialTypes.indexOf(column.type) !== -1) { if (column.srid != null) { expression = `ST_SetSRID(ST_GeomFromGeoJSON(${this.connection.driver.createParameter(paramName, parametersCount)}), ${column.srid})::${column.type}`; } else { @@ -516,7 +517,7 @@ export class UpdateQueryBuilder extends QueryBuilder implements const returningExpression = this.createReturningExpression(); // generate and return sql update query - if (returningExpression && (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof OracleDriver || this.connection.driver instanceof CockroachDriver)) { + if (returningExpression && (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver || this.connection.driver instanceof OracleDriver || this.connection.driver instanceof CockroachDriver)) { return `UPDATE ${this.getTableName(this.getMainTableName())} SET ${updateColumnAndValues.join(", ")}${whereExpression} RETURNING ${returningExpression}`; } else if (returningExpression && this.connection.driver instanceof SqlServerDriver) { diff --git a/src/schema-builder/RdbmsSchemaBuilder.ts b/src/schema-builder/RdbmsSchemaBuilder.ts index 2c625c9a6e..3dfbaad0d5 100644 --- a/src/schema-builder/RdbmsSchemaBuilder.ts +++ b/src/schema-builder/RdbmsSchemaBuilder.ts @@ -24,6 +24,7 @@ import {TableExclusion} from "./table/TableExclusion"; import {View} from "./view/View"; import {AuroraDataApiDriver} from "../driver/aurora-data-api/AuroraDataApiDriver"; import { ForeignKeyMetadata } from "../metadata/ForeignKeyMetadata"; +import {AuroraDataApiPostgresDriver} from "../driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Creates complete tables schemas in the database based on the entity metadatas. @@ -339,7 +340,7 @@ export class RdbmsSchemaBuilder implements SchemaBuilder { protected async dropOldExclusions(): Promise { // Only PostgreSQL supports exclusion constraints - if (!(this.connection.driver instanceof PostgresDriver)) + if (!(this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver)) return; await PromiseUtils.runInSequence(this.entityToSyncMetadatas, async metadata => { @@ -618,7 +619,7 @@ export class RdbmsSchemaBuilder implements SchemaBuilder { */ protected async createNewExclusions(): Promise { // Only PostgreSQL supports exclusion constraints - if (!(this.connection.driver instanceof PostgresDriver)) + if (!(this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver)) return; await PromiseUtils.runInSequence(this.entityToSyncMetadatas, async metadata => { diff --git a/test/functional/entity-schema/exclusions/exclusions-basic.ts b/test/functional/entity-schema/exclusions/exclusions-basic.ts index 1e20cda483..d199466a48 100644 --- a/test/functional/entity-schema/exclusions/exclusions-basic.ts +++ b/test/functional/entity-schema/exclusions/exclusions-basic.ts @@ -3,6 +3,7 @@ import {closeTestingConnections, createTestingConnections, reloadTestingDatabase import {Connection} from "../../../../src/connection/Connection"; import {MeetingSchema} from "./entity/Meeting"; import {PostgresDriver} from "../../../../src/driver/postgres/PostgresDriver"; +import { AuroraDataApiPostgresDriver } from "../../../../src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; describe("entity-schema > exclusions", () => { @@ -15,7 +16,7 @@ describe("entity-schema > exclusions", () => { it("should create an exclusion constraint", () => Promise.all(connections.map(async connection => { // Only PostgreSQL supports exclusion constraints. - if (!(connection.driver instanceof PostgresDriver)) + if (!(connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver)) return; const queryRunner = connection.createQueryRunner(); diff --git a/test/functional/multi-schema-and-database/multi-schema-and-database-basic-functionality/multi-schema-and-database-basic-functionality.ts b/test/functional/multi-schema-and-database/multi-schema-and-database-basic-functionality/multi-schema-and-database-basic-functionality.ts index c6fbdd915e..edde116821 100644 --- a/test/functional/multi-schema-and-database/multi-schema-and-database-basic-functionality/multi-schema-and-database-basic-functionality.ts +++ b/test/functional/multi-schema-and-database/multi-schema-and-database-basic-functionality/multi-schema-and-database-basic-functionality.ts @@ -11,6 +11,7 @@ import {Person} from "./entity/Person"; import {Question} from "./entity/Question"; import {Answer} from "./entity/Answer"; import {MysqlDriver} from "../../../../src/driver/mysql/MysqlDriver"; +import { AuroraDataApiPostgresDriver } from "../../../../src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; describe("multi-schema-and-database > basic-functionality", () => { @@ -41,7 +42,7 @@ describe("multi-schema-and-database > basic-functionality", () => { .where("post.id = :id", {id: 1}) .getSql(); - if (connection.driver instanceof PostgresDriver) + if (connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver) sql.should.be.equal(`SELECT "post"."id" AS "post_id", "post"."name" AS "post_name" FROM "custom"."post" "post" WHERE "post"."id" = $1`); if (connection.driver instanceof SqlServerDriver) @@ -64,7 +65,7 @@ describe("multi-schema-and-database > basic-functionality", () => { .where("user.id = :id", {id: 1}) .getSql(); - if (connection.driver instanceof PostgresDriver) + if (connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver) sql.should.be.equal(`SELECT "user"."id" AS "user_id", "user"."name" AS "user_name" FROM "userSchema"."user" "user" WHERE "user"."id" = $1`); if (connection.driver instanceof SqlServerDriver) @@ -102,7 +103,7 @@ describe("multi-schema-and-database > basic-functionality", () => { .where("category.id = :id", {id: 1}) .getSql(); - if (connection.driver instanceof PostgresDriver) + if (connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver) sql.should.be.equal(`SELECT "category"."id" AS "category_id", "category"."name" AS "category_name",` + ` "category"."postId" AS "category_postId", "post"."id" AS "post_id", "post"."name" AS "post_name"` + ` FROM "guest"."category" "category" INNER JOIN "custom"."post" "post" ON "post"."id"="category"."postId" WHERE "category"."id" = $1`); @@ -140,7 +141,7 @@ describe("multi-schema-and-database > basic-functionality", () => { (await query.getRawOne())!.should.be.not.empty; - if (connection.driver instanceof PostgresDriver) + if (connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver) query.getSql().should.be.equal(`SELECT * FROM "guest"."category" "category", "userSchema"."user" "user",` + ` "custom"."post" "post" WHERE "category"."id" = $1 AND "post"."id" = "category"."postId"`); diff --git a/test/functional/query-builder/locking/query-builder-locking.ts b/test/functional/query-builder/locking/query-builder-locking.ts index eefdddfd05..f9fbeb6c14 100644 --- a/test/functional/query-builder/locking/query-builder-locking.ts +++ b/test/functional/query-builder/locking/query-builder-locking.ts @@ -18,6 +18,7 @@ import {SqlServerDriver} from "../../../../src/driver/sqlserver/SqlServerDriver" import {AbstractSqliteDriver} from "../../../../src/driver/sqlite-abstract/AbstractSqliteDriver"; import {OracleDriver} from "../../../../src/driver/oracle/OracleDriver"; import {LockNotSupportedOnGivenDriverError} from "../../../../src/error/LockNotSupportedOnGivenDriverError"; +import { AuroraDataApiPostgresDriver } from "../../../../src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; describe("query builder > locking", () => { @@ -132,7 +133,7 @@ describe("query builder > locking", () => { .where("post.id = :id", { id: 1 }) .getSql(); - if (connection.driver instanceof MysqlDriver || connection.driver instanceof PostgresDriver || connection.driver instanceof OracleDriver) { + if (connection.driver instanceof MysqlDriver || connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver || connection.driver instanceof OracleDriver) { expect(sql.indexOf("FOR UPDATE") !== -1).to.be.true; } else if (connection.driver instanceof SqlServerDriver) { diff --git a/test/functional/query-builder/order-by/query-builder-order-by.ts b/test/functional/query-builder/order-by/query-builder-order-by.ts index 8ff9242396..e6aaac1091 100644 --- a/test/functional/query-builder/order-by/query-builder-order-by.ts +++ b/test/functional/query-builder/order-by/query-builder-order-by.ts @@ -5,6 +5,7 @@ import {expect} from "chai"; import {Post} from "./entity/Post"; import {PostgresDriver} from "../../../../src/driver/postgres/PostgresDriver"; import {MysqlDriver} from "../../../../src/driver/mysql/MysqlDriver"; +import { AuroraDataApiPostgresDriver } from "../../../../src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; describe("query builder > order-by", () => { @@ -51,7 +52,7 @@ describe("query builder > order-by", () => { }))); it("should be always in right order(custom order)", () => Promise.all(connections.map(async connection => { - if (!(connection.driver instanceof PostgresDriver)) // NULLS FIRST / LAST only supported by postgres + if (!(connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver)) // NULLS FIRST / LAST only supported by postgres return; const post1 = new Post(); @@ -135,4 +136,4 @@ describe("query builder > order-by", () => { expect(loadedPost2!.num2).to.be.equal(2); }))); -}); \ No newline at end of file +}); diff --git a/test/functional/query-runner/rename-column.ts b/test/functional/query-runner/rename-column.ts index b033a46fb0..c59d36edb5 100644 --- a/test/functional/query-runner/rename-column.ts +++ b/test/functional/query-runner/rename-column.ts @@ -9,6 +9,7 @@ import {SqlServerDriver} from "../../../src/driver/sqlserver/SqlServerDriver"; import {PostgresDriver} from "../../../src/driver/postgres/PostgresDriver"; import {AbstractSqliteDriver} from "../../../src/driver/sqlite-abstract/AbstractSqliteDriver"; import {MysqlDriver} from "../../../src/driver/mysql/MysqlDriver"; +import { AuroraDataApiPostgresDriver } from "../../../src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; describe("query runner > rename column", () => { @@ -123,7 +124,7 @@ describe("query runner > rename column", () => { await queryRunner.createDatabase("testDB", true); await queryRunner.createSchema("testDB.testSchema", true); - } else if (connection.driver instanceof PostgresDriver) { + } else if (connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver) { questionTableName = "testSchema.question"; categoryTableName = "testSchema.category"; await queryRunner.createSchema("testSchema", true); diff --git a/test/functional/query-runner/rename-table.ts b/test/functional/query-runner/rename-table.ts index 15fbd52647..738d731544 100644 --- a/test/functional/query-runner/rename-table.ts +++ b/test/functional/query-runner/rename-table.ts @@ -8,6 +8,7 @@ import {Table} from "../../../src/schema-builder/table/Table"; import {AbstractSqliteDriver} from "../../../src/driver/sqlite-abstract/AbstractSqliteDriver"; import {PostgresDriver} from "../../../src/driver/postgres/PostgresDriver"; import {MysqlDriver} from "../../../src/driver/mysql/MysqlDriver"; +import { AuroraDataApiPostgresDriver } from "../../../src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; describe("query runner > rename table", () => { @@ -105,7 +106,7 @@ describe("query runner > rename table", () => { await queryRunner.createDatabase("testDB", true); await queryRunner.createSchema("testDB.testSchema", true); - } else if (connection.driver instanceof PostgresDriver || connection.driver instanceof SapDriver) { + } else if (connection.driver instanceof PostgresDriver || connection.driver instanceof SapDriver || connection.driver instanceof AuroraDataApiPostgresDriver) { questionTableName = "testSchema.question"; renamedQuestionTableName = "testSchema.renamedQuestion"; categoryTableName = "testSchema.category"; diff --git a/test/functional/repository/find-options-locking/find-options-locking.ts b/test/functional/repository/find-options-locking/find-options-locking.ts index 876c67ff2e..b60bbedb5b 100644 --- a/test/functional/repository/find-options-locking/find-options-locking.ts +++ b/test/functional/repository/find-options-locking/find-options-locking.ts @@ -18,6 +18,7 @@ import {SqlServerDriver} from "../../../../src/driver/sqlserver/SqlServerDriver" import {AbstractSqliteDriver} from "../../../../src/driver/sqlite-abstract/AbstractSqliteDriver"; import {OracleDriver} from "../../../../src/driver/oracle/OracleDriver"; import {LockNotSupportedOnGivenDriverError} from "../../../../src/error/LockNotSupportedOnGivenDriverError"; +import { AuroraDataApiPostgresDriver } from "../../../../src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; describe("repository > find options > locking", () => { @@ -85,7 +86,7 @@ describe("repository > find options > locking", () => { if (connection.driver instanceof MysqlDriver) { expect(executedSql[0].indexOf("LOCK IN SHARE MODE") !== -1).to.be.true; - } else if (connection.driver instanceof PostgresDriver) { + } else if (connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver) { expect(executedSql[0].indexOf("FOR SHARE") !== -1).to.be.true; } else if (connection.driver instanceof OracleDriver) { @@ -115,7 +116,7 @@ describe("repository > find options > locking", () => { .findOne(1, {lock: {mode: "pessimistic_write"}}); }); - if (connection.driver instanceof MysqlDriver || connection.driver instanceof PostgresDriver || connection.driver instanceof OracleDriver) { + if (connection.driver instanceof MysqlDriver || connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver || connection.driver instanceof OracleDriver) { expect(executedSql[0].indexOf("FOR UPDATE") !== -1).to.be.true; } else if (connection.driver instanceof SqlServerDriver) { From 65bdd275411d6cf9e09572c5dab540bb13081333 Mon Sep 17 00:00:00 2001 From: ayankovsky Date: Mon, 20 Apr 2020 20:33:34 +0200 Subject: [PATCH 2/2] Refactored the code to be more supportable --- package-lock.json | 2 +- src/driver/DriverFactory.ts | 2 +- .../AuroraDataApiPostgresDriver.ts | 888 -------- .../AuroraDataApiPostgresQueryRunner.ts | 1947 +---------------- src/driver/postgres/PostgresDriver.ts | 118 +- src/metadata-builder/EntityMetadataBuilder.ts | 3 +- src/metadata/EntityMetadata.ts | 7 +- src/query-builder/DeleteQueryBuilder.ts | 5 +- src/query-builder/InsertQueryBuilder.ts | 9 +- src/query-builder/SelectQueryBuilder.ts | 9 +- src/query-builder/UpdateQueryBuilder.ts | 7 +- src/schema-builder/RdbmsSchemaBuilder.ts | 5 +- .../exclusions/exclusions-basic.ts | 3 +- ...schema-and-database-basic-functionality.ts | 9 +- .../locking/query-builder-locking.ts | 3 +- .../order-by/query-builder-order-by.ts | 3 +- test/functional/query-runner/rename-column.ts | 3 +- test/functional/query-runner/rename-table.ts | 3 +- .../find-options-locking.ts | 5 +- 19 files changed, 161 insertions(+), 2870 deletions(-) delete mode 100644 src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver.ts diff --git a/package-lock.json b/package-lock.json index 4a15a9cb27..2cd0856f29 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "typeorm", - "version": "0.2.22", + "version": "0.2.24", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/src/driver/DriverFactory.ts b/src/driver/DriverFactory.ts index ae0f7a5eb6..80f58df4ef 100644 --- a/src/driver/DriverFactory.ts +++ b/src/driver/DriverFactory.ts @@ -15,7 +15,7 @@ import {AuroraDataApiDriver} from "./aurora-data-api/AuroraDataApiDriver"; import {Driver} from "./Driver"; import {Connection} from "../connection/Connection"; import {SapDriver} from "./sap/SapDriver"; -import {AuroraDataApiPostgresDriver} from "./aurora-data-api-pg/AuroraDataApiPostgresDriver"; +import {AuroraDataApiPostgresDriver} from "./postgres/PostgresDriver"; /** * Helps to create drivers. diff --git a/src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver.ts b/src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver.ts deleted file mode 100644 index b8174ceb2d..0000000000 --- a/src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver.ts +++ /dev/null @@ -1,888 +0,0 @@ -import {Driver} from "../Driver"; -import {ObjectLiteral} from "../.."; -import {ColumnMetadata} from "../../metadata/ColumnMetadata"; -import {AuroraDataApiPostgresQueryRunner} from "./AuroraDataApiPostgresQueryRunner"; -import {DateUtils} from "../../util/DateUtils"; -import {PlatformTools} from "../../platform/PlatformTools"; -import {Connection} from "../.."; -import {RdbmsSchemaBuilder} from "../../schema-builder/RdbmsSchemaBuilder"; -import {AuroraDataApiPostgresConnectionOptions} from "./AuroraDataApiPostgresConnectionOptions"; -import {MappedColumnTypes} from "../types/MappedColumnTypes"; -import {ColumnType} from "../types/ColumnTypes"; -import {QueryRunner} from "../../query-runner/QueryRunner"; -import {DataTypeDefaults} from "../types/DataTypeDefaults"; -import {TableColumn} from "../../schema-builder/table/TableColumn"; -import {EntityMetadata} from "../../metadata/EntityMetadata"; -import {OrmUtils} from "../../util/OrmUtils"; -import {ApplyValueTransformers} from "../../util/ApplyValueTransformers"; - -/** - * Organizes communication with PostgreSQL DBMS. - */ -export class AuroraDataApiPostgresDriver implements Driver { - - // ------------------------------------------------------------------------- - // Public Properties - // ------------------------------------------------------------------------- - - /** - * Connection used by driver. - */ - connection: Connection; - - /** - * Aurora Data API underlying library. - */ - DataApiDriver: any; - - client: any; - - /** - * Pool for master database. - */ - master: any; - - /** - * Pool for slave databases. - * Used in replication. - */ - slaves: any[] = []; - - /** - * We store all created query runners because we need to release them. - */ - connectedQueryRunners: QueryRunner[] = []; - - // ------------------------------------------------------------------------- - // Public Implemented Properties - // ------------------------------------------------------------------------- - - /** - * Connection options. - */ - options: AuroraDataApiPostgresConnectionOptions; - - /** - * Master database used to perform all write queries. - */ - database?: string; - - /** - * Indicates if replication is enabled. - */ - isReplicated: boolean = false; - - /** - * Indicates if tree tables are supported by this driver. - */ - treeSupport = true; - - /** - * Gets list of supported column data types by a driver. - * - * @see https://www.tutorialspoint.com/postgresql/postgresql_data_types.htm - * @see https://www.postgresql.org/docs/9.2/static/datatype.html - */ - supportedDataTypes: ColumnType[] = [ - "int", - "int2", - "int4", - "int8", - "smallint", - "integer", - "bigint", - "decimal", - "numeric", - "real", - "float", - "float4", - "float8", - "double precision", - "money", - "character varying", - "varchar", - "character", - "char", - "text", - "citext", - "hstore", - "bytea", - "bit", - "varbit", - "bit varying", - "timetz", - "timestamptz", - "timestamp", - "timestamp without time zone", - "timestamp with time zone", - "date", - "time", - "time without time zone", - "time with time zone", - "interval", - "bool", - "boolean", - "enum", - "point", - "line", - "lseg", - "box", - "path", - "polygon", - "circle", - "cidr", - "inet", - "macaddr", - "tsvector", - "tsquery", - "uuid", - "xml", - "json", - "jsonb", - "int4range", - "int8range", - "numrange", - "tsrange", - "tstzrange", - "daterange", - "geometry", - "geography", - "cube" - ]; - - /** - * Gets list of spatial column data types. - */ - spatialTypes: ColumnType[] = [ - "geometry", - "geography" - ]; - - /** - * Gets list of column data types that support length by a driver. - */ - withLengthColumnTypes: ColumnType[] = [ - "character varying", - "varchar", - "character", - "char", - "bit", - "varbit", - "bit varying" - ]; - - /** - * Gets list of column data types that support precision by a driver. - */ - withPrecisionColumnTypes: ColumnType[] = [ - "numeric", - "decimal", - "interval", - "time without time zone", - "time with time zone", - "timestamp without time zone", - "timestamp with time zone" - ]; - - /** - * Gets list of column data types that support scale by a driver. - */ - withScaleColumnTypes: ColumnType[] = [ - "numeric", - "decimal" - ]; - - /** - * Orm has special columns and we need to know what database column types should be for those types. - * Column types are driver dependant. - */ - mappedDataTypes: MappedColumnTypes = { - createDate: "timestamp", - createDateDefault: "now()", - updateDate: "timestamp", - updateDateDefault: "now()", - deleteDate: "timestamp", - deleteDateNullable: true, - version: "int4", - treeLevel: "int4", - migrationId: "int4", - migrationName: "varchar", - migrationTimestamp: "int8", - cacheId: "int4", - cacheIdentifier: "varchar", - cacheTime: "int8", - cacheDuration: "int4", - cacheQuery: "text", - cacheResult: "text", - metadataType: "varchar", - metadataDatabase: "varchar", - metadataSchema: "varchar", - metadataTable: "varchar", - metadataName: "varchar", - metadataValue: "text", - }; - - /** - * Default values of length, precision and scale depends on column data type. - * Used in the cases when length/precision/scale is not specified by user. - */ - dataTypeDefaults: DataTypeDefaults = { - "character": { length: 1 }, - "bit": { length: 1 }, - "interval": { precision: 6 }, - "time without time zone": { precision: 6 }, - "time with time zone": { precision: 6 }, - "timestamp without time zone": { precision: 6 }, - "timestamp with time zone": { precision: 6 }, - }; - - /** - * Max length allowed by Postgres for aliases. - * @see https://www.postgresql.org/docs/current/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS - */ - maxAliasLength = 63; - - // ------------------------------------------------------------------------- - // Constructor - // ------------------------------------------------------------------------- - - constructor(connection: Connection) { - this.connection = connection; - this.options = connection.options as AuroraDataApiPostgresConnectionOptions; - this.isReplicated = false; - - // load data-api package - this.loadDependencies(); - - this.client = new this.DataApiDriver( - this.options.region, - this.options.secretArn, - this.options.resourceArn, - this.options.database, - (query: string, parameters?: any[]) => this.connection.logger.logQuery(query, parameters), - ); - } - - // ------------------------------------------------------------------------- - // Public Implemented Methods - // ------------------------------------------------------------------------- - - /** - * Performs connection to the database. - * Based on pooling options, it can either create connection immediately, - * either create a pool and create connection when needed. - */ - async connect(): Promise { - } - - /** - * Makes any action after connection (e.g. create extensions in Postgres driver). - */ - async afterConnect(): Promise { - const hasUuidColumns = this.connection.entityMetadatas.some(metadata => { - return metadata.generatedColumns.filter(column => column.generationStrategy === "uuid").length > 0; - }); - const hasCitextColumns = this.connection.entityMetadatas.some(metadata => { - return metadata.columns.filter(column => column.type === "citext").length > 0; - }); - const hasHstoreColumns = this.connection.entityMetadatas.some(metadata => { - return metadata.columns.filter(column => column.type === "hstore").length > 0; - }); - const hasCubeColumns = this.connection.entityMetadatas.some(metadata => { - return metadata.columns.filter(column => column.type === "cube").length > 0; - }); - const hasGeometryColumns = this.connection.entityMetadatas.some(metadata => { - return metadata.columns.filter(column => this.spatialTypes.indexOf(column.type) >= 0).length > 0; - }); - const hasExclusionConstraints = this.connection.entityMetadatas.some(metadata => { - return metadata.exclusions.length > 0; - }); - if (hasUuidColumns || hasCitextColumns || hasHstoreColumns || hasGeometryColumns || hasCubeColumns || hasExclusionConstraints) { - await Promise.all([this.master, ...this.slaves].map(pool => { - return new Promise(async (ok, fail) => { - const { logger } = this.connection; - if (hasUuidColumns) - try { - await this.executeQuery(this.connection, `CREATE EXTENSION IF NOT EXISTS "${this.options.uuidExtension || "uuid-ossp"}"`); - } catch (_) { - logger.log("warn", `At least one of the entities has uuid column, but the '${this.options.uuidExtension || "uuid-ossp"}' extension cannot be installed automatically. Please install it manually using superuser rights, or select another uuid extension.`); - } - if (hasCitextColumns) - try { - await this.executeQuery(this.connection, `CREATE EXTENSION IF NOT EXISTS "citext"`); - } catch (_) { - logger.log("warn", "At least one of the entities has citext column, but the 'citext' extension cannot be installed automatically. Please install it manually using superuser rights"); - } - if (hasHstoreColumns) - try { - await this.executeQuery(this.connection, `CREATE EXTENSION IF NOT EXISTS "hstore"`); - } catch (_) { - logger.log("warn", "At least one of the entities has hstore column, but the 'hstore' extension cannot be installed automatically. Please install it manually using superuser rights"); - } - if (hasGeometryColumns) - try { - await this.executeQuery(this.connection, `CREATE EXTENSION IF NOT EXISTS "postgis"`); - } catch (_) { - logger.log("warn", "At least one of the entities has a geometry column, but the 'postgis' extension cannot be installed automatically. Please install it manually using superuser rights"); - } - if (hasCubeColumns) - try { - await this.executeQuery(this.connection, `CREATE EXTENSION IF NOT EXISTS "cube"`); - } catch (_) { - logger.log("warn", "At least one of the entities has a cube column, but the 'cube' extension cannot be installed automatically. Please install it manually using superuser rights"); - } - if (hasExclusionConstraints) - try { - // The btree_gist extension provides operator support in PostgreSQL exclusion constraints - await this.executeQuery(this.connection, `CREATE EXTENSION IF NOT EXISTS "btree_gist"`); - } catch (_) { - logger.log("warn", "At least one of the entities has an exclusion constraint, but the 'btree_gist' extension cannot be installed automatically. Please install it manually using superuser rights"); - } - ok(); - }); - })); - } - - return Promise.resolve(); - } - - /** - * Closes connection with database. - */ - async disconnect(): Promise { - } - - /** - * Creates a schema builder used to build and sync a schema. - */ - createSchemaBuilder() { - return new RdbmsSchemaBuilder(this.connection); - } - - /** - * Creates a query runner used to execute database queries. - */ - createQueryRunner(mode: "master"|"slave" = "master") { - return new AuroraDataApiPostgresQueryRunner(this, mode); - } - - /** - * Prepares given value to a value to be persisted, based on its column type and metadata. - */ - preparePersistentValue(value: any, columnMetadata: ColumnMetadata): any { - if (columnMetadata.transformer) - value = ApplyValueTransformers.transformTo(columnMetadata.transformer, value); - - if (value === null || value === undefined) - return value; - - if (columnMetadata.type === Boolean) { - return value === true ? 1 : 0; - - } else if (columnMetadata.type === "date") { - return DateUtils.mixedDateToDateString(value); - - } else if (columnMetadata.type === "time") { - return DateUtils.mixedDateToTimeString(value); - - } else if (columnMetadata.type === "datetime" - || columnMetadata.type === Date - || columnMetadata.type === "timestamp" - || columnMetadata.type === "timestamp with time zone" - || columnMetadata.type === "timestamp without time zone") { - return DateUtils.mixedDateToDate(value); - - } else if (["json", "jsonb", ...this.spatialTypes].indexOf(columnMetadata.type) >= 0) { - return JSON.stringify(value); - - } else if (columnMetadata.type === "hstore") { - if (typeof value === "string") { - return value; - } else { - // https://www.postgresql.org/docs/9.0/hstore.html - const quoteString = (value: unknown) => { - // If a string to be quoted is `null` or `undefined`, we return a literal unquoted NULL. - // This way, NULL values can be stored in the hstore object. - if (value === null || typeof value === "undefined") { - return "NULL"; - } - // Convert non-null values to string since HStore only stores strings anyway. - // To include a double quote or a backslash in a key or value, escape it with a backslash. - return `"${`${value}`.replace(/(?=["\\])/g, "\\")}"`; - }; - return Object.keys(value).map(key => quoteString(key) + "=>" + quoteString(value[key])).join(","); - } - - } else if (columnMetadata.type === "simple-array") { - return DateUtils.simpleArrayToString(value); - - } else if (columnMetadata.type === "simple-json") { - return DateUtils.simpleJsonToString(value); - - } else if (columnMetadata.type === "cube") { - if (columnMetadata.isArray) { - return `{${value.map((cube: number[]) => `"(${cube.join(",")})"`).join(",")}}`; - } - return `(${value.join(",")})`; - - } else if ( - ( - columnMetadata.type === "enum" - || columnMetadata.type === "simple-enum" - ) - && !columnMetadata.isArray - ) { - return "" + value; - } - - return value; - } - - /** - * Prepares given value to a value to be persisted, based on its column type or metadata. - */ - prepareHydratedValue(value: any, columnMetadata: ColumnMetadata): any { - if (value === null || value === undefined) - return columnMetadata.transformer ? ApplyValueTransformers.transformFrom(columnMetadata.transformer, value) : value; - - if (columnMetadata.type === Boolean) { - value = value ? true : false; - - } else if (columnMetadata.type === "datetime" - || columnMetadata.type === Date - || columnMetadata.type === "timestamp" - || columnMetadata.type === "timestamp with time zone" - || columnMetadata.type === "timestamp without time zone") { - value = DateUtils.normalizeHydratedDate(value); - - } else if (columnMetadata.type === "date") { - value = DateUtils.mixedDateToDateString(value); - - } else if (columnMetadata.type === "time") { - value = DateUtils.mixedTimeToString(value); - - } else if (columnMetadata.type === "hstore") { - if (columnMetadata.hstoreType === "object") { - const unescapeString = (str: string) => str.replace(/\\./g, (m) => m[1]); - const regexp = /"([^"\\]*(?:\\.[^"\\]*)*)"=>(?:(NULL)|"([^"\\]*(?:\\.[^"\\]*)*)")(?:,|$)/g; - const object: ObjectLiteral = {}; - `${value}`.replace(regexp, (_, key, nullValue, stringValue) => { - object[unescapeString(key)] = nullValue ? null : unescapeString(stringValue); - return ""; - }); - return object; - - } else { - return value; - } - - } else if (columnMetadata.type === "simple-array") { - value = DateUtils.stringToSimpleArray(value); - - } else if (columnMetadata.type === "simple-json") { - value = DateUtils.stringToSimpleJson(value); - - } else if (columnMetadata.type === "cube") { - value = value.replace(/[\(\)\s]+/g, ""); // remove whitespace - if (columnMetadata.isArray) { - /** - * Strips these groups from `{"1,2,3","",NULL}`: - * 1. ["1,2,3", undefined] <- cube of arity 3 - * 2. ["", undefined] <- cube of arity 0 - * 3. [undefined, "NULL"] <- NULL - */ - const regexp = /(?:\"((?:[\d\s\.,])*)\")|(?:(NULL))/g; - const unparsedArrayString = value; - - value = []; - let cube: RegExpExecArray | null = null; - // Iterate through all regexp matches for cubes/null in array - while ((cube = regexp.exec(unparsedArrayString)) !== null) { - if (cube[1] !== undefined) { - value.push(cube[1].split(",").filter(Boolean).map(Number)); - } else { - value.push(undefined); - } - } - } else { - value = value.split(",").filter(Boolean).map(Number); - } - - } else if (columnMetadata.type === "enum" || columnMetadata.type === "simple-enum" ) { - if (columnMetadata.isArray) { - // manually convert enum array to array of values (pg does not support, see https://github.com/brianc/node-pg-types/issues/56) - value = value !== "{}" ? (value as string).substr(1, (value as string).length - 2).split(",") : []; - // convert to number if that exists in poosible enum options - value = value.map((val: string) => { - return !isNaN(+val) && columnMetadata.enum!.indexOf(parseInt(val)) >= 0 ? parseInt(val) : val; - }); - } else { - // convert to number if that exists in poosible enum options - value = !isNaN(+value) && columnMetadata.enum!.indexOf(parseInt(value)) >= 0 ? parseInt(value) : value; - } - } - - if (columnMetadata.transformer) - value = ApplyValueTransformers.transformFrom(columnMetadata.transformer, value); - - return value; - } - - /** - * Replaces parameters in the given sql with special escaping character - * and an array of parameter names to be passed to a query. - */ - escapeQueryWithParameters(sql: string, parameters: ObjectLiteral, nativeParameters: ObjectLiteral): [string, any[]] { - const builtParameters: any[] = Object.keys(nativeParameters).map(key => nativeParameters[key]); - if (!parameters || !Object.keys(parameters).length) - return [sql, builtParameters]; - - const keys = Object.keys(parameters).map(parameter => "(:(\\.\\.\\.)?" + parameter + "\\b)").join("|"); - sql = sql.replace(new RegExp(keys, "g"), (key: string): string => { - let value: any; - let isArray = false; - if (key.substr(0, 4) === ":...") { - isArray = true; - value = parameters[key.substr(4)]; - } else { - value = parameters[key.substr(1)]; - } - - if (isArray) { - return value.map((v: any) => { - builtParameters.push(v); - return "$" + builtParameters.length; - }).join(", "); - - } else if (value instanceof Function) { - return value(); - - } else { - builtParameters.push(value); - return "$" + builtParameters.length; - } - }); // todo: make replace only in value statements, otherwise problems - return [sql, builtParameters]; - } - - /** - * Escapes a column name. - */ - escape(columnName: string): string { - return "\"" + columnName + "\""; - } - - /** - * Build full table name with schema name and table name. - * E.g. "mySchema"."myTable" - */ - buildTableName(tableName: string, schema?: string): string { - return schema ? `${schema}.${tableName}` : tableName; - } - - /** - * Creates a database type from a given column metadata. - */ - normalizeType(column: { type?: ColumnType, length?: number | string, precision?: number|null, scale?: number, isArray?: boolean }): string { - if (column.type === Number || column.type === "int" || column.type === "int4") { - return "integer"; - - } else if (column.type === String || column.type === "varchar") { - return "character varying"; - - } else if (column.type === Date || column.type === "timestamp") { - return "timestamp without time zone"; - - } else if (column.type === "timestamptz") { - return "timestamp with time zone"; - - } else if (column.type === "time") { - return "time without time zone"; - - } else if (column.type === "timetz") { - return "time with time zone"; - - } else if (column.type === Boolean || column.type === "bool") { - return "boolean"; - - } else if (column.type === "simple-array") { - return "text"; - - } else if (column.type === "simple-json") { - return "text"; - - } else if (column.type === "simple-enum") { - return "enum"; - - } else if (column.type === "int2") { - return "smallint"; - - } else if (column.type === "int8") { - return "bigint"; - - } else if (column.type === "decimal") { - return "numeric"; - - } else if (column.type === "float8" || column.type === "float") { - return "double precision"; - - } else if (column.type === "float4") { - return "real"; - - } else if (column.type === "char") { - return "character"; - - } else if (column.type === "varbit") { - return "bit varying"; - - } else { - return column.type as string || ""; - } - } - - /** - * Normalizes "default" value of the column. - */ - normalizeDefault(columnMetadata: ColumnMetadata): string { - const defaultValue = columnMetadata.default; - const arrayCast = columnMetadata.isArray ? `::${columnMetadata.type}[]` : ""; - - if ( - ( - columnMetadata.type === "enum" - || columnMetadata.type === "simple-enum" - ) && defaultValue !== undefined - ) { - if (columnMetadata.isArray && Array.isArray(defaultValue)) { - return `'{${defaultValue.map((val: string) => `${val}`).join(",")}}'`; - } - return `'${defaultValue}'`; - } - - if (typeof defaultValue === "number") { - return "" + defaultValue; - - } else if (typeof defaultValue === "boolean") { - return defaultValue === true ? "true" : "false"; - - } else if (typeof defaultValue === "function") { - return defaultValue(); - - } else if (typeof defaultValue === "string") { - return `'${defaultValue}'${arrayCast}`; - - } else if (defaultValue === null) { - return `null`; - - } else if (typeof defaultValue === "object") { - return `'${JSON.stringify(defaultValue)}'`; - - } else { - return defaultValue; - } - } - - /** - * Normalizes "isUnique" value of the column. - */ - normalizeIsUnique(column: ColumnMetadata): boolean { - return column.entityMetadata.uniques.some(uq => uq.columns.length === 1 && uq.columns[0] === column); - } - - /** - * Returns default column lengths, which is required on column creation. - */ - getColumnLength(column: ColumnMetadata): string { - return column.length ? column.length.toString() : ""; - } - - /** - * Creates column type definition including length, precision and scale - */ - createFullType(column: TableColumn): string { - let type = column.type; - - if (column.length) { - type += "(" + column.length + ")"; - } else if (column.precision !== null && column.precision !== undefined && column.scale !== null && column.scale !== undefined) { - type += "(" + column.precision + "," + column.scale + ")"; - } else if (column.precision !== null && column.precision !== undefined) { - type += "(" + column.precision + ")"; - } - - if (column.type === "time without time zone") { - type = "TIME" + (column.precision !== null && column.precision !== undefined ? "(" + column.precision + ")" : ""); - - } else if (column.type === "time with time zone") { - type = "TIME" + (column.precision !== null && column.precision !== undefined ? "(" + column.precision + ")" : "") + " WITH TIME ZONE"; - - } else if (column.type === "timestamp without time zone") { - type = "TIMESTAMP" + (column.precision !== null && column.precision !== undefined ? "(" + column.precision + ")" : ""); - - } else if (column.type === "timestamp with time zone") { - type = "TIMESTAMP" + (column.precision !== null && column.precision !== undefined ? "(" + column.precision + ")" : "") + " WITH TIME ZONE"; - } else if (this.spatialTypes.indexOf(column.type as ColumnType) >= 0) { - if (column.spatialFeatureType != null && column.srid != null) { - type = `${column.type}(${column.spatialFeatureType},${column.srid})`; - } else if (column.spatialFeatureType != null) { - type = `${column.type}(${column.spatialFeatureType})`; - } else { - type = column.type; - } - } - - if (column.isArray) - type += " array"; - - return type; - } - - /** - * Obtains a new database connection to a master server. - * Used for replication. - * If replication is not setup then returns default connection's database connection. - */ - obtainMasterConnection(): Promise { - return new Promise((ok, fail) => { - this.master.connect((err: any, connection: any, release: any) => { - err ? fail(err) : ok([connection, release]); - }); - }); - } - - /** - * Obtains a new database connection to a slave server. - * Used for replication. - * If replication is not setup then returns master (default) connection's database connection. - */ - obtainSlaveConnection(): Promise { - if (!this.slaves.length) - return this.obtainMasterConnection(); - - return new Promise((ok, fail) => { - const random = Math.floor(Math.random() * this.slaves.length); - this.slaves[random].connect((err: any, connection: any, release: any) => { - err ? fail(err) : ok([connection, release]); - }); - }); - } - - /** - * Creates generated map of values generated or returned by database after INSERT query. - * - * todo: slow. optimize Object.keys(), OrmUtils.mergeDeep and column.createValueMap parts - */ - createGeneratedMap(metadata: EntityMetadata, insertResult: ObjectLiteral) { - if (!insertResult) - return undefined; - - return Object.keys(insertResult).reduce((map, key) => { - const column = metadata.findColumnWithDatabaseName(key); - if (column) { - OrmUtils.mergeDeep(map, column.createValueMap(insertResult[key])); - // OrmUtils.mergeDeep(map, column.createValueMap(this.prepareHydratedValue(insertResult[key], column))); // TODO: probably should be like there, but fails on enums, fix later - } - return map; - }, {} as ObjectLiteral); - } - - /** - * Differentiate columns of this table and columns from the given column metadatas columns - * and returns only changed. - */ - findChangedColumns(tableColumns: TableColumn[], columnMetadatas: ColumnMetadata[]): ColumnMetadata[] { - return columnMetadatas.filter(columnMetadata => { - const tableColumn = tableColumns.find(c => c.name === columnMetadata.databaseName); - if (!tableColumn) - return false; // we don't need new columns, we only need exist and changed - - return tableColumn.name !== columnMetadata.databaseName - || tableColumn.type !== this.normalizeType(columnMetadata) - || tableColumn.length !== columnMetadata.length - || tableColumn.precision !== columnMetadata.precision - || tableColumn.scale !== columnMetadata.scale - // || tableColumn.comment !== columnMetadata.comment // todo - || (!tableColumn.isGenerated && this.lowerDefaultValueIfNecessary(this.normalizeDefault(columnMetadata)) !== tableColumn.default) // we included check for generated here, because generated columns already can have default values - || tableColumn.isPrimary !== columnMetadata.isPrimary - || tableColumn.isNullable !== columnMetadata.isNullable - || tableColumn.isUnique !== this.normalizeIsUnique(columnMetadata) - || (tableColumn.enum && columnMetadata.enum && !OrmUtils.isArraysEqual(tableColumn.enum, columnMetadata.enum.map(val => val + ""))) // enums in postgres are always strings - || tableColumn.isGenerated !== columnMetadata.isGenerated - || (tableColumn.spatialFeatureType || "").toLowerCase() !== (columnMetadata.spatialFeatureType || "").toLowerCase() - || tableColumn.srid !== columnMetadata.srid; - }); - } - - private lowerDefaultValueIfNecessary(value: string | undefined) { - // Postgres saves function calls in default value as lowercase #2733 - if (!value) { - return value; - } - return value.split(`'`).map((v, i) => { - return i % 2 === 1 ? v : v.toLowerCase(); - }).join(`'`); - } - /** - * Returns true if driver supports RETURNING / OUTPUT statement. - */ - isReturningSqlSupported(): boolean { - return true; - } - - /** - * Returns true if driver supports uuid values generation on its own. - */ - isUUIDGenerationSupported(): boolean { - return true; - } - - get uuidGenerator(): string { - return this.options.uuidExtension === "pgcrypto" ? "gen_random_uuid()" : "uuid_generate_v4()"; - } - - /** - * Creates an escaped parameter. - */ - createParameter(parameterName: string, index: number): string { - return "$" + (index + 1); - } - - // ------------------------------------------------------------------------- - // Public Methods - // ------------------------------------------------------------------------- - - /** - * Loads postgres query stream package. - */ - loadStreamDependency() { - try { - return PlatformTools.load("pg-query-stream"); - - } catch (e) { // todo: better error for browser env - throw new Error(`To use streams you should install pg-query-stream package. Please run npm i pg-query-stream --save command.`); - } - } - - // ------------------------------------------------------------------------- - // Protected Methods - // ------------------------------------------------------------------------- - - /** - * If driver dependency is not given explicitly, then try to load it via "require". - */ - protected loadDependencies(): void { - const { pg } = PlatformTools.load("typeorm-aurora-data-api-driver"); - - // Driver uses rollup for publishing, which has issues when using typeorm in combination with webpack - // See https://github.com/webpack/webpack/issues/4742#issuecomment-295556787 - this.DataApiDriver = pg; - } - - /** - * Executes given query. - */ - protected executeQuery(connection: any, query: string) { - return this.client.query(query); - } - -} diff --git a/src/driver/aurora-data-api-pg/AuroraDataApiPostgresQueryRunner.ts b/src/driver/aurora-data-api-pg/AuroraDataApiPostgresQueryRunner.ts index 388ba26f11..2a553c134c 100644 --- a/src/driver/aurora-data-api-pg/AuroraDataApiPostgresQueryRunner.ts +++ b/src/driver/aurora-data-api-pg/AuroraDataApiPostgresQueryRunner.ts @@ -1,31 +1,23 @@ -import {PromiseUtils} from "../../"; -import {ObjectLiteral} from "../../common/ObjectLiteral"; import {QueryRunnerAlreadyReleasedError} from "../../error/QueryRunnerAlreadyReleasedError"; import {TransactionAlreadyStartedError} from "../../error/TransactionAlreadyStartedError"; import {TransactionNotStartedError} from "../../error/TransactionNotStartedError"; -import {ColumnType} from "../../index"; -import {ReadStream} from "../../platform/PlatformTools"; -import {BaseQueryRunner} from "../../query-runner/BaseQueryRunner"; import {QueryRunner} from "../../query-runner/QueryRunner"; -import {TableIndexOptions} from "../../schema-builder/options/TableIndexOptions"; -import {Table} from "../../schema-builder/table/Table"; -import {TableCheck} from "../../schema-builder/table/TableCheck"; -import {TableColumn} from "../../schema-builder/table/TableColumn"; -import {TableExclusion} from "../../schema-builder/table/TableExclusion"; -import {TableForeignKey} from "../../schema-builder/table/TableForeignKey"; -import {TableIndex} from "../../schema-builder/table/TableIndex"; -import {TableUnique} from "../../schema-builder/table/TableUnique"; -import {View} from "../../schema-builder/view/View"; -import {Broadcaster} from "../../subscriber/Broadcaster"; -import {OrmUtils} from "../../util/OrmUtils"; -import {Query} from "../Query"; import {IsolationLevel} from "../types/IsolationLevel"; -import {AuroraDataApiPostgresDriver} from "./AuroraDataApiPostgresDriver"; +import {AuroraDataApiPostgresDriver} from "../postgres/PostgresDriver"; +import {PostgresQueryRunner} from "../postgres/PostgresQueryRunner"; + +class PostgresQueryRunnerWrapper extends PostgresQueryRunner { + driver: any; + + constructor(driver: any, mode: "master"|"slave") { + super(driver, mode); + } +} /** * Runs queries on a single postgres database connection. */ -export class AuroraDataApiPostgresQueryRunner extends BaseQueryRunner implements QueryRunner { +export class AuroraDataApiPostgresQueryRunner extends PostgresQueryRunnerWrapper implements QueryRunner { // ------------------------------------------------------------------------- // Public Implemented Properties @@ -55,11 +47,7 @@ export class AuroraDataApiPostgresQueryRunner extends BaseQueryRunner implements // ------------------------------------------------------------------------- constructor(driver: AuroraDataApiPostgresDriver, mode: "master"|"slave" = "master") { - super(); - this.driver = driver; - this.connection = driver.connection; - this.mode = mode; - this.broadcaster = new Broadcaster(this); + super(driver, mode); } // ------------------------------------------------------------------------- @@ -97,21 +85,6 @@ export class AuroraDataApiPostgresQueryRunner extends BaseQueryRunner implements return this.databaseConnectionPromise; } - /** - * Releases used database connection. - * You cannot use query runner methods once its released. - */ - release(): Promise { - this.isReleased = true; - if (this.releaseCallback) - this.releaseCallback(); - - const index = this.driver.connectedQueryRunners.indexOf(this); - if (index !== -1) this.driver.connectedQueryRunners.splice(index); - - return Promise.resolve(); - } - /** * Starts transaction on the current connection. */ @@ -162,1900 +135,4 @@ export class AuroraDataApiPostgresQueryRunner extends BaseQueryRunner implements return result; } - - /** - * Returns raw data stream. - */ - stream(query: string, parameters?: any[], onEnd?: Function, onError?: Function): Promise { - const QueryStream = this.driver.loadStreamDependency(); - if (this.isReleased) - throw new QueryRunnerAlreadyReleasedError(); - - return new Promise(async (ok, fail) => { - try { - const databaseConnection = await this.connect(); - this.driver.connection.logger.logQuery(query, parameters, this); - const stream = databaseConnection.query(new QueryStream(query, parameters)); - if (onEnd) stream.on("end", onEnd); - if (onError) stream.on("error", onError); - ok(stream); - - } catch (err) { - fail(err); - } - }); - } - - /** - * Returns all available database names including system databases. - */ - async getDatabases(): Promise { - return Promise.resolve([]); - } - - /** - * Returns all available schema names including system schemas. - * If database parameter specified, returns schemas of that database. - */ - async getSchemas(database?: string): Promise { - return Promise.resolve([]); - } - - /** - * Checks if database with the given name exist. - */ - async hasDatabase(database: string): Promise { - return Promise.resolve(false); - } - - /** - * Checks if schema with the given name exist. - */ - async hasSchema(schema: string): Promise { - const result = await this.query(`SELECT * FROM "information_schema"."schemata" WHERE "schema_name" = '${schema}'`); - return result.length ? true : false; - } - - /** - * Checks if table with the given name exist in the database. - */ - async hasTable(tableOrName: Table|string): Promise { - const parsedTableName = this.parseTableName(tableOrName); - const sql = `SELECT * FROM "information_schema"."tables" WHERE "table_schema" = ${parsedTableName.schema} AND "table_name" = ${parsedTableName.tableName}`; - const result = await this.query(sql); - return result.length ? true : false; - } - - /** - * Checks if column with the given name exist in the given table. - */ - async hasColumn(tableOrName: Table|string, columnName: string): Promise { - const parsedTableName = this.parseTableName(tableOrName); - const sql = `SELECT * FROM "information_schema"."columns" WHERE "table_schema" = ${parsedTableName.schema} AND "table_name" = ${parsedTableName.tableName} AND "column_name" = '${columnName}'`; - const result = await this.query(sql); - return result.length ? true : false; - } - - /** - * Creates a new database. - * Postgres does not supports database creation inside a transaction block. - */ - async createDatabase(database: string, ifNotExist?: boolean): Promise { - await Promise.resolve(); - } - - /** - * Drops database. - * Postgres does not supports database drop inside a transaction block. - */ - async dropDatabase(database: string, ifExist?: boolean): Promise { - return Promise.resolve(); - } - - /** - * Creates a new table schema. - */ - async createSchema(schema: string, ifNotExist?: boolean): Promise { - const up = ifNotExist ? `CREATE SCHEMA IF NOT EXISTS "${schema}"` : `CREATE SCHEMA "${schema}"`; - const down = `DROP SCHEMA "${schema}" CASCADE`; - await this.executeQueries(new Query(up), new Query(down)); - } - - /** - * Drops table schema. - */ - async dropSchema(schemaPath: string, ifExist?: boolean, isCascade?: boolean): Promise { - const schema = schemaPath.indexOf(".") === -1 ? schemaPath : schemaPath.split(".")[0]; - const up = ifExist ? `DROP SCHEMA IF EXISTS "${schema}" ${isCascade ? "CASCADE" : ""}` : `DROP SCHEMA "${schema}" ${isCascade ? "CASCADE" : ""}`; - const down = `CREATE SCHEMA "${schema}"`; - await this.executeQueries(new Query(up), new Query(down)); - } - - /** - * Creates a new table. - */ - async createTable(table: Table, ifNotExist: boolean = false, createForeignKeys: boolean = true, createIndices: boolean = true): Promise { - if (ifNotExist) { - const isTableExist = await this.hasTable(table); - if (isTableExist) return Promise.resolve(); - } - const upQueries: Query[] = []; - const downQueries: Query[] = []; - - // if table have column with ENUM type, we must create this type in postgres. - await Promise.all(table.columns - .filter(column => column.type === "enum" || column.type === "simple-enum") - .map(async column => { - const hasEnum = await this.hasEnumType(table, column); - // TODO: Should also check if values of existing type matches expected ones - if (!hasEnum) { - upQueries.push(this.createEnumTypeSql(table, column)); - downQueries.push(this.dropEnumTypeSql(table, column)); - } - return Promise.resolve(); - })); - - upQueries.push(this.createTableSql(table, createForeignKeys)); - downQueries.push(this.dropTableSql(table)); - - // if createForeignKeys is true, we must drop created foreign keys in down query. - // createTable does not need separate method to create foreign keys, because it create fk's in the same query with table creation. - if (createForeignKeys) - table.foreignKeys.forEach(foreignKey => downQueries.push(this.dropForeignKeySql(table, foreignKey))); - - if (createIndices) { - table.indices.forEach(index => { - - // new index may be passed without name. In this case we generate index name manually. - if (!index.name) - index.name = this.connection.namingStrategy.indexName(table.name, index.columnNames, index.where); - upQueries.push(this.createIndexSql(table, index)); - downQueries.push(this.dropIndexSql(table, index)); - }); - } - - await this.executeQueries(upQueries, downQueries); - } - - /** - * Drops the table. - */ - async dropTable(target: Table|string, ifExist?: boolean, dropForeignKeys: boolean = true, dropIndices: boolean = true): Promise {// It needs because if table does not exist and dropForeignKeys or dropIndices is true, we don't need - // to perform drop queries for foreign keys and indices. - if (ifExist) { - const isTableExist = await this.hasTable(target); - if (!isTableExist) return Promise.resolve(); - } - - // if dropTable called with dropForeignKeys = true, we must create foreign keys in down query. - const createForeignKeys: boolean = dropForeignKeys; - const tableName = target instanceof Table ? target.name : target; - const table = await this.getCachedTable(tableName); - const upQueries: Query[] = []; - const downQueries: Query[] = []; - - - if (dropIndices) { - table.indices.forEach(index => { - upQueries.push(this.dropIndexSql(table, index)); - downQueries.push(this.createIndexSql(table, index)); - }); - } - - if (dropForeignKeys) - table.foreignKeys.forEach(foreignKey => upQueries.push(this.dropForeignKeySql(table, foreignKey))); - - upQueries.push(this.dropTableSql(table)); - downQueries.push(this.createTableSql(table, createForeignKeys)); - - await this.executeQueries(upQueries, downQueries); - } - - /** - * Creates a new view. - */ - async createView(view: View): Promise { - const upQueries: Query[] = []; - const downQueries: Query[] = []; - upQueries.push(this.createViewSql(view)); - upQueries.push(await this.insertViewDefinitionSql(view)); - downQueries.push(this.dropViewSql(view)); - downQueries.push(await this.deleteViewDefinitionSql(view)); - await this.executeQueries(upQueries, downQueries); - } - - /** - * Drops the view. - */ - async dropView(target: View|string): Promise { - const viewName = target instanceof View ? target.name : target; - const view = await this.getCachedView(viewName); - - const upQueries: Query[] = []; - const downQueries: Query[] = []; - upQueries.push(await this.deleteViewDefinitionSql(view)); - upQueries.push(this.dropViewSql(view)); - downQueries.push(await this.insertViewDefinitionSql(view)); - downQueries.push(this.createViewSql(view)); - await this.executeQueries(upQueries, downQueries); - } - - /** - * Renames the given table. - */ - async renameTable(oldTableOrName: Table|string, newTableName: string): Promise { - const upQueries: Query[] = []; - const downQueries: Query[] = []; - const oldTable = oldTableOrName instanceof Table ? oldTableOrName : await this.getCachedTable(oldTableOrName); - const newTable = oldTable.clone(); - const oldTableName = oldTable.name.indexOf(".") === -1 ? oldTable.name : oldTable.name.split(".")[1]; - const schemaName = oldTable.name.indexOf(".") === -1 ? undefined : oldTable.name.split(".")[0]; - newTable.name = schemaName ? `${schemaName}.${newTableName}` : newTableName; - - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(oldTable)} RENAME TO "${newTableName}"`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(newTable)} RENAME TO "${oldTableName}"`)); - - // rename column primary key constraint - if (newTable.primaryColumns.length > 0) { - const columnNames = newTable.primaryColumns.map(column => column.name); - - const oldPkName = this.connection.namingStrategy.primaryKeyName(oldTable, columnNames); - const newPkName = this.connection.namingStrategy.primaryKeyName(newTable, columnNames); - - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(newTable)} RENAME CONSTRAINT "${oldPkName}" TO "${newPkName}"`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(newTable)} RENAME CONSTRAINT "${newPkName}" TO "${oldPkName}"`)); - } - - // rename unique constraints - newTable.uniques.forEach(unique => { - // build new constraint name - const newUniqueName = this.connection.namingStrategy.uniqueConstraintName(newTable, unique.columnNames); - - // build queries - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(newTable)} RENAME CONSTRAINT "${unique.name}" TO "${newUniqueName}"`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(newTable)} RENAME CONSTRAINT "${newUniqueName}" TO "${unique.name}"`)); - - // replace constraint name - unique.name = newUniqueName; - }); - - // rename index constraints - newTable.indices.forEach(index => { - // build new constraint name - const schema = this.extractSchema(newTable); - const newIndexName = this.connection.namingStrategy.indexName(newTable, index.columnNames, index.where); - - // build queries - const up = schema ? `ALTER INDEX "${schema}"."${index.name}" RENAME TO "${newIndexName}"` : `ALTER INDEX "${index.name}" RENAME TO "${newIndexName}"`; - const down = schema ? `ALTER INDEX "${schema}"."${newIndexName}" RENAME TO "${index.name}"` : `ALTER INDEX "${newIndexName}" RENAME TO "${index.name}"`; - upQueries.push(new Query(up)); - downQueries.push(new Query(down)); - - // replace constraint name - index.name = newIndexName; - }); - - // rename foreign key constraints - newTable.foreignKeys.forEach(foreignKey => { - // build new constraint name - const newForeignKeyName = this.connection.namingStrategy.foreignKeyName(newTable, foreignKey.columnNames, foreignKey.referencedTableName, foreignKey.referencedColumnNames); - - // build queries - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(newTable)} RENAME CONSTRAINT "${foreignKey.name}" TO "${newForeignKeyName}"`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(newTable)} RENAME CONSTRAINT "${newForeignKeyName}" TO "${foreignKey.name}"`)); - - // replace constraint name - foreignKey.name = newForeignKeyName; - }); - - // rename ENUM types - const enumColumns = newTable.columns.filter(column => column.type === "enum" || column.type === "simple-enum"); - for (let column of enumColumns) { - const oldEnumType = await this.getEnumTypeName(oldTable, column); - upQueries.push(new Query(`ALTER TYPE "${oldEnumType.enumTypeSchema}"."${oldEnumType.enumTypeName}" RENAME TO ${this.buildEnumName(newTable, column, false)}`)); - downQueries.push(new Query(`ALTER TYPE ${this.buildEnumName(newTable, column)} RENAME TO "${oldEnumType.enumTypeName}"`)); - } - await this.executeQueries(upQueries, downQueries); - } - - /** - * Creates a new column from the column in the table. - */ - async addColumn(tableOrName: Table|string, column: TableColumn): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - const clonedTable = table.clone(); - const upQueries: Query[] = []; - const downQueries: Query[] = []; - - if (column.type === "enum" || column.type === "simple-enum") { - const hasEnum = await this.hasEnumType(table, column); - if (!hasEnum) { - upQueries.push(this.createEnumTypeSql(table, column)); - downQueries.push(this.dropEnumTypeSql(table, column)); - } - } - - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD ${this.buildCreateColumnSql(table, column)}`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP COLUMN "${column.name}"`)); - - // create or update primary key constraint - if (column.isPrimary) { - const primaryColumns = clonedTable.primaryColumns; - // if table already have primary key, me must drop it and recreate again - if (primaryColumns.length > 0) { - const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, primaryColumns.map(column => column.name)); - const columnNames = primaryColumns.map(column => `"${column.name}"`).join(", "); - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${pkName}"`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNames})`)); - } - - primaryColumns.push(column); - const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, primaryColumns.map(column => column.name)); - const columnNames = primaryColumns.map(column => `"${column.name}"`).join(", "); - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNames})`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${pkName}"`)); - } - - // create column index - const columnIndex = clonedTable.indices.find(index => index.columnNames.length === 1 && index.columnNames[0] === column.name); - if (columnIndex) { - upQueries.push(this.createIndexSql(table, columnIndex)); - downQueries.push(this.dropIndexSql(table, columnIndex)); - } - - // create unique constraint - if (column.isUnique) { - const uniqueConstraint = new TableUnique({ - name: this.connection.namingStrategy.uniqueConstraintName(table.name, [column.name]), - columnNames: [column.name] - }); - clonedTable.uniques.push(uniqueConstraint); - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${uniqueConstraint.name}" UNIQUE ("${column.name}")`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${uniqueConstraint.name}"`)); - } - - await this.executeQueries(upQueries, downQueries); - - clonedTable.addColumn(column); - this.replaceCachedTable(table, clonedTable); - } - - /** - * Creates a new columns from the column in the table. - */ - async addColumns(tableOrName: Table|string, columns: TableColumn[]): Promise { - await PromiseUtils.runInSequence(columns, column => this.addColumn(tableOrName, column)); - } - - /** - * Renames column in the given table. - */ - async renameColumn(tableOrName: Table|string, oldTableColumnOrName: TableColumn|string, newTableColumnOrName: TableColumn|string): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - const oldColumn = oldTableColumnOrName instanceof TableColumn ? oldTableColumnOrName : table.columns.find(c => c.name === oldTableColumnOrName); - if (!oldColumn) - throw new Error(`Column "${oldTableColumnOrName}" was not found in the "${table.name}" table.`); - - let newColumn; - if (newTableColumnOrName instanceof TableColumn) { - newColumn = newTableColumnOrName; - } else { - newColumn = oldColumn.clone(); - newColumn.name = newTableColumnOrName; - } - - return this.changeColumn(table, oldColumn, newColumn); - } - - /** - * Changes a column in the table. - */ - async changeColumn(tableOrName: Table|string, oldTableColumnOrName: TableColumn|string, newColumn: TableColumn): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - let clonedTable = table.clone(); - const upQueries: Query[] = []; - const downQueries: Query[] = []; - - const oldColumn = oldTableColumnOrName instanceof TableColumn - ? oldTableColumnOrName - : table.columns.find(column => column.name === oldTableColumnOrName); - if (!oldColumn) - throw new Error(`Column "${oldTableColumnOrName}" was not found in the "${table.name}" table.`); - - if (oldColumn.type !== newColumn.type || oldColumn.length !== newColumn.length) { - // To avoid data conversion, we just recreate column - await this.dropColumn(table, oldColumn); - await this.addColumn(table, newColumn); - - // update cloned table - clonedTable = table.clone(); - - } else { - if (oldColumn.name !== newColumn.name) { - // rename column - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME COLUMN "${oldColumn.name}" TO "${newColumn.name}"`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME COLUMN "${newColumn.name}" TO "${oldColumn.name}"`)); - - // rename ENUM type - if (oldColumn.type === "enum" || oldColumn.type === "simple-enum") { - const oldEnumType = await this.getEnumTypeName(table, oldColumn); - upQueries.push(new Query(`ALTER TYPE "${oldEnumType.enumTypeSchema}"."${oldEnumType.enumTypeName}" RENAME TO ${this.buildEnumName(table, newColumn, false)}`)); - downQueries.push(new Query(`ALTER TYPE ${this.buildEnumName(table, newColumn)} RENAME TO "${oldEnumType.enumTypeName}"`)); - } - - // rename column primary key constraint - if (oldColumn.isPrimary === true) { - const primaryColumns = clonedTable.primaryColumns; - - // build old primary constraint name - const columnNames = primaryColumns.map(column => column.name); - const oldPkName = this.connection.namingStrategy.primaryKeyName(clonedTable, columnNames); - - // replace old column name with new column name - columnNames.splice(columnNames.indexOf(oldColumn.name), 1); - columnNames.push(newColumn.name); - - // build new primary constraint name - const newPkName = this.connection.namingStrategy.primaryKeyName(clonedTable, columnNames); - - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME CONSTRAINT "${oldPkName}" TO "${newPkName}"`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME CONSTRAINT "${newPkName}" TO "${oldPkName}"`)); - } - - // rename column sequence - if (oldColumn.isGenerated === true && newColumn.generationStrategy === "increment") { - const schema = this.extractSchema(table); - - // building sequence name. Sequence without schema needed because it must be supplied in RENAME TO without - // schema name, but schema needed in ALTER SEQUENCE argument. - const seqName = this.buildSequenceName(table, oldColumn.name, undefined, true, true); - const newSeqName = this.buildSequenceName(table, newColumn.name, undefined, true, true); - - const up = schema ? `ALTER SEQUENCE "${schema}"."${seqName}" RENAME TO "${newSeqName}"` : `ALTER SEQUENCE "${seqName}" RENAME TO "${newSeqName}"`; - const down = schema ? `ALTER SEQUENCE "${schema}"."${newSeqName}" RENAME TO "${seqName}"` : `ALTER SEQUENCE "${newSeqName}" RENAME TO "${seqName}"`; - upQueries.push(new Query(up)); - downQueries.push(new Query(down)); - } - - // rename unique constraints - clonedTable.findColumnUniques(oldColumn).forEach(unique => { - // build new constraint name - unique.columnNames.splice(unique.columnNames.indexOf(oldColumn.name), 1); - unique.columnNames.push(newColumn.name); - const newUniqueName = this.connection.namingStrategy.uniqueConstraintName(clonedTable, unique.columnNames); - - // build queries - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME CONSTRAINT "${unique.name}" TO "${newUniqueName}"`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME CONSTRAINT "${newUniqueName}" TO "${unique.name}"`)); - - // replace constraint name - unique.name = newUniqueName; - }); - - // rename index constraints - clonedTable.findColumnIndices(oldColumn).forEach(index => { - // build new constraint name - index.columnNames.splice(index.columnNames.indexOf(oldColumn.name), 1); - index.columnNames.push(newColumn.name); - const schema = this.extractSchema(table); - const newIndexName = this.connection.namingStrategy.indexName(clonedTable, index.columnNames, index.where); - - // build queries - const up = schema ? `ALTER INDEX "${schema}"."${index.name}" RENAME TO "${newIndexName}"` : `ALTER INDEX "${index.name}" RENAME TO "${newIndexName}"`; - const down = schema ? `ALTER INDEX "${schema}"."${newIndexName}" RENAME TO "${index.name}"` : `ALTER INDEX "${newIndexName}" RENAME TO "${index.name}"`; - upQueries.push(new Query(up)); - downQueries.push(new Query(down)); - - // replace constraint name - index.name = newIndexName; - }); - - // rename foreign key constraints - clonedTable.findColumnForeignKeys(oldColumn).forEach(foreignKey => { - // build new constraint name - foreignKey.columnNames.splice(foreignKey.columnNames.indexOf(oldColumn.name), 1); - foreignKey.columnNames.push(newColumn.name); - const newForeignKeyName = this.connection.namingStrategy.foreignKeyName(clonedTable, foreignKey.columnNames, foreignKey.referencedTableName, foreignKey.referencedColumnNames); - - // build queries - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME CONSTRAINT "${foreignKey.name}" TO "${newForeignKeyName}"`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} RENAME CONSTRAINT "${newForeignKeyName}" TO "${foreignKey.name}"`)); - - // replace constraint name - foreignKey.name = newForeignKeyName; - }); - - // rename old column in the Table object - const oldTableColumn = clonedTable.columns.find(column => column.name === oldColumn.name); - clonedTable.columns[clonedTable.columns.indexOf(oldTableColumn!)].name = newColumn.name; - oldColumn.name = newColumn.name; - } - - if (newColumn.precision !== oldColumn.precision || newColumn.scale !== oldColumn.scale) { - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" TYPE ${this.driver.createFullType(newColumn)}`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" TYPE ${this.driver.createFullType(oldColumn)}`)); - } - - if ( - (newColumn.type === "enum" || newColumn.type === "simple-enum") - && (oldColumn.type === "enum" || oldColumn.type === "simple-enum") - && !OrmUtils.isArraysEqual(newColumn.enum!, oldColumn.enum!) - ) { - const enumName = this.buildEnumName(table, newColumn); - const arraySuffix = newColumn.isArray ? "[]" : ""; - const oldEnumName = this.buildEnumName(table, newColumn, true, false, true); - const oldEnumNameWithoutSchema = this.buildEnumName(table, newColumn, false, false, true); - const enumTypeBeforeColumnChange = await this.getEnumTypeName(table, oldColumn); - - // rename old ENUM - upQueries.push(new Query(`ALTER TYPE "${enumTypeBeforeColumnChange.enumTypeSchema}"."${enumTypeBeforeColumnChange.enumTypeName}" RENAME TO ${oldEnumNameWithoutSchema}`)); - downQueries.push(new Query(`ALTER TYPE ${oldEnumName} RENAME TO "${enumTypeBeforeColumnChange.enumTypeName}"`)); - - // create new ENUM - upQueries.push(this.createEnumTypeSql(table, newColumn)); - downQueries.push(this.dropEnumTypeSql(table, oldColumn)); - - // if column have default value, we must drop it to avoid issues with type casting - if (newColumn.default !== null && newColumn.default !== undefined) { - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" DROP DEFAULT`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" SET DEFAULT ${newColumn.default}`)); - } - - // build column types - const upType = `${enumName}${arraySuffix} USING "${newColumn.name}"::"text"::${enumName}${arraySuffix}`; - const downType = `${oldEnumName}${arraySuffix} USING "${newColumn.name}"::"text"::${oldEnumName}${arraySuffix}`; - - // update column to use new type - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" TYPE ${upType}`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" TYPE ${downType}`)); - - // if column have default value and we dropped it before, we must bring it back - if (newColumn.default !== null && newColumn.default !== undefined) { - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" SET DEFAULT ${newColumn.default}`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" DROP DEFAULT`)); - } - - // remove old ENUM - upQueries.push(this.dropEnumTypeSql(table, newColumn, oldEnumName)); - downQueries.push(this.createEnumTypeSql(table, oldColumn, oldEnumName)); - } - - if (oldColumn.isNullable !== newColumn.isNullable) { - if (newColumn.isNullable) { - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${oldColumn.name}" DROP NOT NULL`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${oldColumn.name}" SET NOT NULL`)); - } else { - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${oldColumn.name}" SET NOT NULL`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${oldColumn.name}" DROP NOT NULL`)); - } - } - - if (oldColumn.comment !== newColumn.comment) { - upQueries.push(new Query(`COMMENT ON COLUMN ${this.escapePath(table)}."${oldColumn.name}" IS '${newColumn.comment}'`)); - downQueries.push(new Query(`COMMENT ON COLUMN ${this.escapePath(table)}."${newColumn.name}" IS '${oldColumn.comment}'`)); - } - - if (newColumn.isPrimary !== oldColumn.isPrimary) { - const primaryColumns = clonedTable.primaryColumns; - - // if primary column state changed, we must always drop existed constraint. - if (primaryColumns.length > 0) { - const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, primaryColumns.map(column => column.name)); - const columnNames = primaryColumns.map(column => `"${column.name}"`).join(", "); - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${pkName}"`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNames})`)); - } - - if (newColumn.isPrimary === true) { - primaryColumns.push(newColumn); - // update column in table - const column = clonedTable.columns.find(column => column.name === newColumn.name); - column!.isPrimary = true; - const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, primaryColumns.map(column => column.name)); - const columnNames = primaryColumns.map(column => `"${column.name}"`).join(", "); - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNames})`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${pkName}"`)); - - } else { - const primaryColumn = primaryColumns.find(c => c.name === newColumn.name); - primaryColumns.splice(primaryColumns.indexOf(primaryColumn!), 1); - - // update column in table - const column = clonedTable.columns.find(column => column.name === newColumn.name); - column!.isPrimary = false; - - // if we have another primary keys, we must recreate constraint. - if (primaryColumns.length > 0) { - const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, primaryColumns.map(column => column.name)); - const columnNames = primaryColumns.map(column => `"${column.name}"`).join(", "); - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNames})`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${pkName}"`)); - } - } - } - - if (newColumn.isUnique !== oldColumn.isUnique) { - if (newColumn.isUnique === true) { - const uniqueConstraint = new TableUnique({ - name: this.connection.namingStrategy.uniqueConstraintName(table.name, [newColumn.name]), - columnNames: [newColumn.name] - }); - clonedTable.uniques.push(uniqueConstraint); - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${uniqueConstraint.name}" UNIQUE ("${newColumn.name}")`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${uniqueConstraint.name}"`)); - - } else { - const uniqueConstraint = clonedTable.uniques.find(unique => { - return unique.columnNames.length === 1 && !!unique.columnNames.find(columnName => columnName === newColumn.name); - }); - clonedTable.uniques.splice(clonedTable.uniques.indexOf(uniqueConstraint!), 1); - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${uniqueConstraint!.name}"`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${uniqueConstraint!.name}" UNIQUE ("${newColumn.name}")`)); - } - } - - if (oldColumn.isGenerated !== newColumn.isGenerated && newColumn.generationStrategy !== "uuid") { - if (newColumn.isGenerated === true) { - upQueries.push(new Query(`CREATE SEQUENCE ${this.buildSequenceName(table, newColumn)} OWNED BY ${this.escapePath(table)}."${newColumn.name}"`)); - downQueries.push(new Query(`DROP SEQUENCE ${this.buildSequenceName(table, newColumn)}`)); - - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" SET DEFAULT nextval('${this.buildSequenceName(table, newColumn, undefined, true)}')`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" DROP DEFAULT`)); - - } else { - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" DROP DEFAULT`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" SET DEFAULT nextval('${this.buildSequenceName(table, newColumn, undefined, true)}')`)); - - upQueries.push(new Query(`DROP SEQUENCE ${this.buildSequenceName(table, newColumn)}`)); - downQueries.push(new Query(`CREATE SEQUENCE ${this.buildSequenceName(table, newColumn)} OWNED BY ${this.escapePath(table)}."${newColumn.name}"`)); - } - } - - if (newColumn.default !== oldColumn.default) { - if (newColumn.default !== null && newColumn.default !== undefined) { - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" SET DEFAULT ${newColumn.default}`)); - - if (oldColumn.default !== null && oldColumn.default !== undefined) { - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" SET DEFAULT ${oldColumn.default}`)); - } else { - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" DROP DEFAULT`)); - } - - } else if (oldColumn.default !== null && oldColumn.default !== undefined) { - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" DROP DEFAULT`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" SET DEFAULT ${oldColumn.default}`)); - } - } - - if ((newColumn.spatialFeatureType || "").toLowerCase() !== (oldColumn.spatialFeatureType || "").toLowerCase() || newColumn.srid !== oldColumn.srid) { - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" TYPE ${this.driver.createFullType(newColumn)}`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ALTER COLUMN "${newColumn.name}" TYPE ${this.driver.createFullType(oldColumn)}`)); - } - - } - - await this.executeQueries(upQueries, downQueries); - this.replaceCachedTable(table, clonedTable); - } - - /** - * Changes a column in the table. - */ - async changeColumns(tableOrName: Table|string, changedColumns: { newColumn: TableColumn, oldColumn: TableColumn }[]): Promise { - await PromiseUtils.runInSequence(changedColumns, changedColumn => this.changeColumn(tableOrName, changedColumn.oldColumn, changedColumn.newColumn)); - } - - /** - * Drops column in the table. - */ - async dropColumn(tableOrName: Table|string, columnOrName: TableColumn|string): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - const column = columnOrName instanceof TableColumn ? columnOrName : table.findColumnByName(columnOrName); - if (!column) - throw new Error(`Column "${columnOrName}" was not found in table "${table.name}"`); - - const clonedTable = table.clone(); - const upQueries: Query[] = []; - const downQueries: Query[] = []; - - // drop primary key constraint - if (column.isPrimary) { - const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, clonedTable.primaryColumns.map(column => column.name)); - const columnNames = clonedTable.primaryColumns.map(primaryColumn => `"${primaryColumn.name}"`).join(", "); - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(clonedTable)} DROP CONSTRAINT "${pkName}"`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(clonedTable)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNames})`)); - - // update column in table - const tableColumn = clonedTable.findColumnByName(column.name); - tableColumn!.isPrimary = false; - - // if primary key have multiple columns, we must recreate it without dropped column - if (clonedTable.primaryColumns.length > 0) { - const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, clonedTable.primaryColumns.map(column => column.name)); - const columnNames = clonedTable.primaryColumns.map(primaryColumn => `"${primaryColumn.name}"`).join(", "); - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(clonedTable)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNames})`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(clonedTable)} DROP CONSTRAINT "${pkName}"`)); - } - } - - // drop column index - const columnIndex = clonedTable.indices.find(index => index.columnNames.length === 1 && index.columnNames[0] === column.name); - if (columnIndex) { - clonedTable.indices.splice(clonedTable.indices.indexOf(columnIndex), 1); - upQueries.push(this.dropIndexSql(table, columnIndex)); - downQueries.push(this.createIndexSql(table, columnIndex)); - } - - // drop column check - const columnCheck = clonedTable.checks.find(check => !!check.columnNames && check.columnNames.length === 1 && check.columnNames[0] === column.name); - if (columnCheck) { - clonedTable.checks.splice(clonedTable.checks.indexOf(columnCheck), 1); - upQueries.push(this.dropCheckConstraintSql(table, columnCheck)); - downQueries.push(this.createCheckConstraintSql(table, columnCheck)); - } - - // drop column unique - const columnUnique = clonedTable.uniques.find(unique => unique.columnNames.length === 1 && unique.columnNames[0] === column.name); - if (columnUnique) { - clonedTable.uniques.splice(clonedTable.uniques.indexOf(columnUnique), 1); - upQueries.push(this.dropUniqueConstraintSql(table, columnUnique)); - downQueries.push(this.createUniqueConstraintSql(table, columnUnique)); - } - - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP COLUMN "${column.name}"`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD ${this.buildCreateColumnSql(table, column)}`)); - - // drop enum type - if (column.type === "enum" || column.type === "simple-enum") { - const hasEnum = await this.hasEnumType(table, column); - if (hasEnum) { - const enumType = await this.getEnumTypeName(table, column); - const escapedEnumName = `"${enumType.enumTypeSchema}"."${enumType.enumTypeName}"`; - upQueries.push(this.dropEnumTypeSql(table, column, escapedEnumName)); - downQueries.push(this.createEnumTypeSql(table, column, escapedEnumName)); - } - } - - await this.executeQueries(upQueries, downQueries); - - clonedTable.removeColumn(column); - this.replaceCachedTable(table, clonedTable); - } - - /** - * Drops the columns in the table. - */ - async dropColumns(tableOrName: Table|string, columns: TableColumn[]): Promise { - await PromiseUtils.runInSequence(columns, column => this.dropColumn(tableOrName, column)); - } - - /** - * Creates a new primary key. - */ - async createPrimaryKey(tableOrName: Table|string, columnNames: string[]): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - const clonedTable = table.clone(); - - const up = this.createPrimaryKeySql(table, columnNames); - - // mark columns as primary, because dropPrimaryKeySql build constraint name from table primary column names. - clonedTable.columns.forEach(column => { - if (columnNames.find(columnName => columnName === column.name)) - column.isPrimary = true; - }); - const down = this.dropPrimaryKeySql(clonedTable); - - await this.executeQueries(up, down); - this.replaceCachedTable(table, clonedTable); - } - - /** - * Updates composite primary keys. - */ - async updatePrimaryKeys(tableOrName: Table|string, columns: TableColumn[]): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - const clonedTable = table.clone(); - const columnNames = columns.map(column => column.name); - const upQueries: Query[] = []; - const downQueries: Query[] = []; - - // if table already have primary columns, we must drop them. - const primaryColumns = clonedTable.primaryColumns; - if (primaryColumns.length > 0) { - const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, primaryColumns.map(column => column.name)); - const columnNamesString = primaryColumns.map(column => `"${column.name}"`).join(", "); - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${pkName}"`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNamesString})`)); - } - - // update columns in table. - clonedTable.columns - .filter(column => columnNames.indexOf(column.name) !== -1) - .forEach(column => column.isPrimary = true); - - const pkName = this.connection.namingStrategy.primaryKeyName(clonedTable.name, columnNames); - const columnNamesString = columnNames.map(columnName => `"${columnName}"`).join(", "); - upQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${pkName}" PRIMARY KEY (${columnNamesString})`)); - downQueries.push(new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${pkName}"`)); - - await this.executeQueries(upQueries, downQueries); - this.replaceCachedTable(table, clonedTable); - } - - /** - * Drops a primary key. - */ - async dropPrimaryKey(tableOrName: Table|string): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - const up = this.dropPrimaryKeySql(table); - const down = this.createPrimaryKeySql(table, table.primaryColumns.map(column => column.name)); - await this.executeQueries(up, down); - table.primaryColumns.forEach(column => { - column.isPrimary = false; - }); - } - - /** - * Creates new unique constraint. - */ - async createUniqueConstraint(tableOrName: Table|string, uniqueConstraint: TableUnique): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - - // new unique constraint may be passed without name. In this case we generate unique name manually. - if (!uniqueConstraint.name) - uniqueConstraint.name = this.connection.namingStrategy.uniqueConstraintName(table.name, uniqueConstraint.columnNames); - - const up = this.createUniqueConstraintSql(table, uniqueConstraint); - const down = this.dropUniqueConstraintSql(table, uniqueConstraint); - await this.executeQueries(up, down); - table.addUniqueConstraint(uniqueConstraint); - } - - /** - * Creates new unique constraints. - */ - async createUniqueConstraints(tableOrName: Table|string, uniqueConstraints: TableUnique[]): Promise { - await PromiseUtils.runInSequence(uniqueConstraints, uniqueConstraint => this.createUniqueConstraint(tableOrName, uniqueConstraint)); - } - - /** - * Drops unique constraint. - */ - async dropUniqueConstraint(tableOrName: Table|string, uniqueOrName: TableUnique|string): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - const uniqueConstraint = uniqueOrName instanceof TableUnique ? uniqueOrName : table.uniques.find(u => u.name === uniqueOrName); - if (!uniqueConstraint) - throw new Error(`Supplied unique constraint was not found in table ${table.name}`); - - const up = this.dropUniqueConstraintSql(table, uniqueConstraint); - const down = this.createUniqueConstraintSql(table, uniqueConstraint); - await this.executeQueries(up, down); - table.removeUniqueConstraint(uniqueConstraint); - } - - /** - * Drops unique constraints. - */ - async dropUniqueConstraints(tableOrName: Table|string, uniqueConstraints: TableUnique[]): Promise { - await PromiseUtils.runInSequence(uniqueConstraints, uniqueConstraint => this.dropUniqueConstraint(tableOrName, uniqueConstraint)); - } - - /** - * Creates new check constraint. - */ - async createCheckConstraint(tableOrName: Table|string, checkConstraint: TableCheck): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - - // new unique constraint may be passed without name. In this case we generate unique name manually. - if (!checkConstraint.name) - checkConstraint.name = this.connection.namingStrategy.checkConstraintName(table.name, checkConstraint.expression!); - - const up = this.createCheckConstraintSql(table, checkConstraint); - const down = this.dropCheckConstraintSql(table, checkConstraint); - await this.executeQueries(up, down); - table.addCheckConstraint(checkConstraint); - } - - /** - * Creates new check constraints. - */ - async createCheckConstraints(tableOrName: Table|string, checkConstraints: TableCheck[]): Promise { - const promises = checkConstraints.map(checkConstraint => this.createCheckConstraint(tableOrName, checkConstraint)); - await Promise.all(promises); - } - - /** - * Drops check constraint. - */ - async dropCheckConstraint(tableOrName: Table|string, checkOrName: TableCheck|string): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - const checkConstraint = checkOrName instanceof TableCheck ? checkOrName : table.checks.find(c => c.name === checkOrName); - if (!checkConstraint) - throw new Error(`Supplied check constraint was not found in table ${table.name}`); - - const up = this.dropCheckConstraintSql(table, checkConstraint); - const down = this.createCheckConstraintSql(table, checkConstraint); - await this.executeQueries(up, down); - table.removeCheckConstraint(checkConstraint); - } - - /** - * Drops check constraints. - */ - async dropCheckConstraints(tableOrName: Table|string, checkConstraints: TableCheck[]): Promise { - const promises = checkConstraints.map(checkConstraint => this.dropCheckConstraint(tableOrName, checkConstraint)); - await Promise.all(promises); - } - - /** - * Creates new exclusion constraint. - */ - async createExclusionConstraint(tableOrName: Table|string, exclusionConstraint: TableExclusion): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - - // new unique constraint may be passed without name. In this case we generate unique name manually. - if (!exclusionConstraint.name) - exclusionConstraint.name = this.connection.namingStrategy.exclusionConstraintName(table.name, exclusionConstraint.expression!); - - const up = this.createExclusionConstraintSql(table, exclusionConstraint); - const down = this.dropExclusionConstraintSql(table, exclusionConstraint); - await this.executeQueries(up, down); - table.addExclusionConstraint(exclusionConstraint); - } - - /** - * Creates new exclusion constraints. - */ - async createExclusionConstraints(tableOrName: Table|string, exclusionConstraints: TableExclusion[]): Promise { - const promises = exclusionConstraints.map(exclusionConstraint => this.createExclusionConstraint(tableOrName, exclusionConstraint)); - await Promise.all(promises); - } - - /** - * Drops exclusion constraint. - */ - async dropExclusionConstraint(tableOrName: Table|string, exclusionOrName: TableExclusion|string): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - const exclusionConstraint = exclusionOrName instanceof TableExclusion ? exclusionOrName : table.exclusions.find(c => c.name === exclusionOrName); - if (!exclusionConstraint) - throw new Error(`Supplied exclusion constraint was not found in table ${table.name}`); - - const up = this.dropExclusionConstraintSql(table, exclusionConstraint); - const down = this.createExclusionConstraintSql(table, exclusionConstraint); - await this.executeQueries(up, down); - table.removeExclusionConstraint(exclusionConstraint); - } - - /** - * Drops exclusion constraints. - */ - async dropExclusionConstraints(tableOrName: Table|string, exclusionConstraints: TableExclusion[]): Promise { - const promises = exclusionConstraints.map(exclusionConstraint => this.dropExclusionConstraint(tableOrName, exclusionConstraint)); - await Promise.all(promises); - } - - /** - * Creates a new foreign key. - */ - async createForeignKey(tableOrName: Table|string, foreignKey: TableForeignKey): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - - // new FK may be passed without name. In this case we generate FK name manually. - if (!foreignKey.name) - foreignKey.name = this.connection.namingStrategy.foreignKeyName(table.name, foreignKey.columnNames, foreignKey.referencedTableName, foreignKey.referencedColumnNames); - - const up = this.createForeignKeySql(table, foreignKey); - const down = this.dropForeignKeySql(table, foreignKey); - await this.executeQueries(up, down); - table.addForeignKey(foreignKey); - } - - /** - * Creates a new foreign keys. - */ - async createForeignKeys(tableOrName: Table|string, foreignKeys: TableForeignKey[]): Promise { - await PromiseUtils.runInSequence(foreignKeys, foreignKey => this.createForeignKey(tableOrName, foreignKey)); - } - - /** - * Drops a foreign key from the table. - */ - async dropForeignKey(tableOrName: Table|string, foreignKeyOrName: TableForeignKey|string): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - const foreignKey = foreignKeyOrName instanceof TableForeignKey ? foreignKeyOrName : table.foreignKeys.find(fk => fk.name === foreignKeyOrName); - if (!foreignKey) - throw new Error(`Supplied foreign key was not found in table ${table.name}`); - - const up = this.dropForeignKeySql(table, foreignKey); - const down = this.createForeignKeySql(table, foreignKey); - await this.executeQueries(up, down); - table.removeForeignKey(foreignKey); - } - - /** - * Drops a foreign keys from the table. - */ - async dropForeignKeys(tableOrName: Table|string, foreignKeys: TableForeignKey[]): Promise { - await PromiseUtils.runInSequence(foreignKeys, foreignKey => this.dropForeignKey(tableOrName, foreignKey)); - } - - /** - * Creates a new index. - */ - async createIndex(tableOrName: Table|string, index: TableIndex): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - - // new index may be passed without name. In this case we generate index name manually. - if (!index.name) - index.name = this.connection.namingStrategy.indexName(table.name, index.columnNames, index.where); - - const up = this.createIndexSql(table, index); - const down = this.dropIndexSql(table, index); - await this.executeQueries(up, down); - table.addIndex(index); - } - - /** - * Creates a new indices - */ - async createIndices(tableOrName: Table|string, indices: TableIndex[]): Promise { - await PromiseUtils.runInSequence(indices, index => this.createIndex(tableOrName, index)); - } - - /** - * Drops an index from the table. - */ - async dropIndex(tableOrName: Table|string, indexOrName: TableIndex|string): Promise { - const table = tableOrName instanceof Table ? tableOrName : await this.getCachedTable(tableOrName); - const index = indexOrName instanceof TableIndex ? indexOrName : table.indices.find(i => i.name === indexOrName); - if (!index) - throw new Error(`Supplied index was not found in table ${table.name}`); - - const up = this.dropIndexSql(table, index); - const down = this.createIndexSql(table, index); - await this.executeQueries(up, down); - table.removeIndex(index); - } - - /** - * Drops an indices from the table. - */ - async dropIndices(tableOrName: Table|string, indices: TableIndex[]): Promise { - await PromiseUtils.runInSequence(indices, index => this.dropIndex(tableOrName, index)); - } - - /** - * Clears all table contents. - * Note: this operation uses SQL's TRUNCATE query which cannot be reverted in transactions. - */ - async clearTable(tableName: string): Promise { - await this.query(`TRUNCATE TABLE ${this.escapePath(tableName)}`); - } - - /** - * Removes all tables from the currently connected database. - */ - async clearDatabase(): Promise { - const schemas: string[] = []; - this.connection.entityMetadatas - .filter(metadata => metadata.schema) - .forEach(metadata => { - const isSchemaExist = !!schemas.find(schema => schema === metadata.schema); - if (!isSchemaExist) - schemas.push(metadata.schema!); - }); - schemas.push(this.driver.options.database || "current_schema()"); - const schemaNamesString = schemas.map(name => { - return name === "current_schema()" ? name : "'" + name + "'"; - }).join(", "); - - await this.startTransaction(); - try { - const selectViewDropsQuery = `SELECT 'DROP VIEW IF EXISTS "' || schemaname || '"."' || viewname || '" CASCADE;' as "query" ` + - `FROM "pg_views" WHERE "schemaname" IN (${schemaNamesString}) AND "viewname" NOT IN ('geography_columns', 'geometry_columns', 'raster_columns', 'raster_overviews')`; - const dropViewQueries: ObjectLiteral[] = await this.query(selectViewDropsQuery); - await Promise.all(dropViewQueries.map(q => this.query(q["query"]))); - - // ignore spatial_ref_sys; it's a special table supporting PostGIS - // TODO generalize this as this.driver.ignoreTables - const selectTableDropsQuery = `SELECT 'DROP TABLE IF EXISTS "' || schemaname || '"."' || tablename || '" CASCADE;' as "query" FROM "pg_tables" WHERE "schemaname" IN (${schemaNamesString}) AND "tablename" NOT IN ('spatial_ref_sys')`; - const dropTableQueries: ObjectLiteral[] = await this.query(selectTableDropsQuery); - await Promise.all(dropTableQueries.map(q => this.query(q["query"]))); - await this.dropEnumTypes(schemaNamesString); - - await this.commitTransaction(); - - } catch (error) { - try { // we throw original error even if rollback thrown an error - await this.rollbackTransaction(); - } catch (rollbackError) { } - throw error; - } - } - - // ------------------------------------------------------------------------- - // Protected Methods - // ------------------------------------------------------------------------- - - protected async loadViews(viewNames: string[]): Promise { - const hasTable = await this.hasTable(this.getTypeormMetadataTableName()); - if (!hasTable) - return Promise.resolve([]); - - const currentSchemaQuery = await this.query(`SELECT * FROM current_schema()`); - const currentSchema = currentSchemaQuery[0]["current_schema"]; - - const viewsCondition = viewNames.map(viewName => { - let [schema, name] = viewName.split("."); - if (!name) { - name = schema; - schema = this.driver.options.database || currentSchema; - } - return `("t"."schema" = '${schema}' AND "t"."name" = '${name}')`; - }).join(" OR "); - - const query = `SELECT "t".*, "v"."check_option" FROM ${this.escapePath(this.getTypeormMetadataTableName())} "t" ` + - `INNER JOIN "information_schema"."views" "v" ON "v"."table_schema" = "t"."schema" AND "v"."table_name" = "t"."name" WHERE "t"."type" = 'VIEW' ${viewsCondition ? `AND (${viewsCondition})` : ""}`; - const dbViews = await this.query(query); - return dbViews.map((dbView: any) => { - const view = new View(); - const schema = dbView["schema"] === currentSchema && !this.driver.options.database ? undefined : dbView["schema"]; - view.name = this.driver.buildTableName(dbView["name"], schema); - view.expression = dbView["value"]; - return view; - }); - } - - /** - * Loads all tables (with given names) from the database and creates a Table from them. - */ - protected async loadTables(tableNames: string[]): Promise { - - // if no tables given then no need to proceed - if (!tableNames || !tableNames.length) - return []; - - const currentSchemaQuery = await this.query(`SELECT * FROM current_schema()`); - const currentSchema = currentSchemaQuery[0]["current_schema"]; - - const tablesCondition = tableNames.map(tableName => { - let [schema, name] = tableName.split("."); - if (!name) { - name = schema; - schema = this.driver.options.database || currentSchema; - } - return `("table_schema" = '${schema}' AND "table_name" = '${name}')`; - }).join(" OR "); - const tablesSql = `SELECT * FROM "information_schema"."tables" WHERE ` + tablesCondition; - const columnsSql = `SELECT *, ('"' || "udt_schema" || '"."' || "udt_name" || '"')::"regtype" AS "regtype" FROM "information_schema"."columns" WHERE ` + tablesCondition; - - const constraintsCondition = tableNames.map(tableName => { - let [schema, name] = tableName.split("."); - if (!name) { - name = schema; - schema = this.driver.options.database || currentSchema; - } - return `("ns"."nspname" = '${schema}' AND "t"."relname" = '${name}')`; - }).join(" OR "); - - const constraintsSql = `SELECT "ns"."nspname" AS "table_schema", "t"."relname" AS "table_name", "cnst"."conname" AS "constraint_name", ` + - `pg_get_constraintdef("cnst"."oid") AS "expression", ` + - `CASE "cnst"."contype" WHEN 'p' THEN 'PRIMARY' WHEN 'u' THEN 'UNIQUE' WHEN 'c' THEN 'CHECK' WHEN 'x' THEN 'EXCLUDE' END AS "constraint_type", "a"."attname" AS "column_name" ` + - `FROM "pg_constraint" "cnst" ` + - `INNER JOIN "pg_class" "t" ON "t"."oid" = "cnst"."conrelid" ` + - `INNER JOIN "pg_namespace" "ns" ON "ns"."oid" = "cnst"."connamespace" ` + - `LEFT JOIN "pg_attribute" "a" ON "a"."attrelid" = "cnst"."conrelid" AND "a"."attnum" = ANY ("cnst"."conkey") ` + - `WHERE "t"."relkind" = 'r' AND (${constraintsCondition})`; - - const indicesSql = `SELECT "ns"."nspname" AS "table_schema", "t"."relname" AS "table_name", "i"."relname" AS "constraint_name", "a"."attname" AS "column_name", ` + - `CASE "ix"."indisunique" WHEN 't' THEN 'TRUE' ELSE'FALSE' END AS "is_unique", pg_get_expr("ix"."indpred", "ix"."indrelid") AS "condition", ` + - `"types"."typname" AS "type_name" ` + - `FROM "pg_class" "t" ` + - `INNER JOIN "pg_index" "ix" ON "ix"."indrelid" = "t"."oid" ` + - `INNER JOIN "pg_attribute" "a" ON "a"."attrelid" = "t"."oid" AND "a"."attnum" = ANY ("ix"."indkey") ` + - `INNER JOIN "pg_namespace" "ns" ON "ns"."oid" = "t"."relnamespace" ` + - `INNER JOIN "pg_class" "i" ON "i"."oid" = "ix"."indexrelid" ` + - `INNER JOIN "pg_type" "types" ON "types"."oid" = "a"."atttypid" ` + - `LEFT JOIN "pg_constraint" "cnst" ON "cnst"."conname" = "i"."relname" ` + - `WHERE "t"."relkind" = 'r' AND "cnst"."contype" IS NULL AND (${constraintsCondition})`; - - const foreignKeysCondition = tableNames.map(tableName => { - let [schema, name] = tableName.split("."); - if (!name) { - name = schema; - schema = this.driver.options.database || currentSchema; - } - return `("ns"."nspname" = '${schema}' AND "cl"."relname" = '${name}')`; - }).join(" OR "); - const foreignKeysSql = `SELECT "con"."conname" AS "constraint_name", "con"."nspname" AS "table_schema", "con"."relname" AS "table_name", "att2"."attname" AS "column_name", ` + - `"ns"."nspname" AS "referenced_table_schema", "cl"."relname" AS "referenced_table_name", "att"."attname" AS "referenced_column_name", "con"."confdeltype" AS "on_delete", ` + - `"con"."confupdtype" AS "on_update", "con"."condeferrable" AS "deferrable", "con"."condeferred" AS "deferred" ` + - `FROM ( ` + - `SELECT UNNEST ("con1"."conkey") AS "parent", UNNEST ("con1"."confkey") AS "child", "con1"."confrelid", "con1"."conrelid", "con1"."conname", "con1"."contype", "ns"."nspname", ` + - `"cl"."relname", "con1"."condeferrable", ` + - `CASE WHEN "con1"."condeferred" THEN 'INITIALLY DEFERRED' ELSE 'INITIALLY IMMEDIATE' END as condeferred, ` + - `CASE "con1"."confdeltype" WHEN 'a' THEN 'NO ACTION' WHEN 'r' THEN 'RESTRICT' WHEN 'c' THEN 'CASCADE' WHEN 'n' THEN 'SET NULL' WHEN 'd' THEN 'SET DEFAULT' END as "confdeltype", ` + - `CASE "con1"."confupdtype" WHEN 'a' THEN 'NO ACTION' WHEN 'r' THEN 'RESTRICT' WHEN 'c' THEN 'CASCADE' WHEN 'n' THEN 'SET NULL' WHEN 'd' THEN 'SET DEFAULT' END as "confupdtype" ` + - `FROM "pg_class" "cl" ` + - `INNER JOIN "pg_namespace" "ns" ON "cl"."relnamespace" = "ns"."oid" ` + - `INNER JOIN "pg_constraint" "con1" ON "con1"."conrelid" = "cl"."oid" ` + - `WHERE "con1"."contype" = 'f' AND (${foreignKeysCondition}) ` + - `) "con" ` + - `INNER JOIN "pg_attribute" "att" ON "att"."attrelid" = "con"."confrelid" AND "att"."attnum" = "con"."child" ` + - `INNER JOIN "pg_class" "cl" ON "cl"."oid" = "con"."confrelid" ` + - `INNER JOIN "pg_namespace" "ns" ON "cl"."relnamespace" = "ns"."oid" ` + - `INNER JOIN "pg_attribute" "att2" ON "att2"."attrelid" = "con"."conrelid" AND "att2"."attnum" = "con"."parent"`; - const [dbTables, dbColumns, dbConstraints, dbIndices, dbForeignKeys]: ObjectLiteral[][] = await Promise.all([ - this.query(tablesSql), - this.query(columnsSql), - this.query(constraintsSql), - this.query(indicesSql), - this.query(foreignKeysSql), - ]); - - // if tables were not found in the db, no need to proceed - if (!dbTables.length) - return []; - - // create tables for loaded tables - return Promise.all(dbTables.map(async dbTable => { - const table = new Table(); - - // We do not need to join schema name, when database is by default. - // In this case we need local variable `tableFullName` for below comparision. - const schema = dbTable["table_schema"] === currentSchema && !this.driver.options.database ? undefined : dbTable["table_schema"]; - table.name = this.driver.buildTableName(dbTable["table_name"], schema); - const tableFullName = this.driver.buildTableName(dbTable["table_name"], dbTable["table_schema"]); - - // create columns from the loaded columns - table.columns = await Promise.all(dbColumns - .filter(dbColumn => this.driver.buildTableName(dbColumn["table_name"], dbColumn["table_schema"]) === tableFullName) - .map(async dbColumn => { - - const columnConstraints = dbConstraints.filter(dbConstraint => { - return this.driver.buildTableName(dbConstraint["table_name"], dbConstraint["table_schema"]) === tableFullName && dbConstraint["column_name"] === dbColumn["column_name"]; - }); - - const tableColumn = new TableColumn(); - tableColumn.name = dbColumn["column_name"]; - tableColumn.type = dbColumn["regtype"].toLowerCase(); - - if (tableColumn.type === "numeric" || tableColumn.type === "decimal" || tableColumn.type === "float") { - // If one of these properties was set, and another was not, Postgres sets '0' in to unspecified property - // we set 'undefined' in to unspecified property to avoid changing column on sync - if (dbColumn["numeric_precision"] !== null && !this.isDefaultColumnPrecision(table, tableColumn, dbColumn["numeric_precision"])) { - tableColumn.precision = dbColumn["numeric_precision"]; - } else if (dbColumn["numeric_scale"] !== null && !this.isDefaultColumnScale(table, tableColumn, dbColumn["numeric_scale"])) { - tableColumn.precision = undefined; - } - if (dbColumn["numeric_scale"] !== null && !this.isDefaultColumnScale(table, tableColumn, dbColumn["numeric_scale"])) { - tableColumn.scale = dbColumn["numeric_scale"]; - } else if (dbColumn["numeric_precision"] !== null && !this.isDefaultColumnPrecision(table, tableColumn, dbColumn["numeric_precision"])) { - tableColumn.scale = undefined; - } - } - - if (dbColumn["data_type"].toLowerCase() === "array") { - tableColumn.isArray = true; - const type = tableColumn.type.replace("[]", ""); - tableColumn.type = this.connection.driver.normalizeType({type: type}); - } - - if (tableColumn.type === "interval" - || tableColumn.type === "time without time zone" - || tableColumn.type === "time with time zone" - || tableColumn.type === "timestamp without time zone" - || tableColumn.type === "timestamp with time zone") { - tableColumn.precision = !this.isDefaultColumnPrecision(table, tableColumn, dbColumn["datetime_precision"]) ? dbColumn["datetime_precision"] : undefined; - } - - if (tableColumn.type.indexOf("enum") !== -1) { - tableColumn.type = "enum"; - const sql = `SELECT "e"."enumlabel" AS "value" FROM "pg_enum" "e" ` + - `INNER JOIN "pg_type" "t" ON "t"."oid" = "e"."enumtypid" ` + - `INNER JOIN "pg_namespace" "n" ON "n"."oid" = "t"."typnamespace" ` + - `WHERE "n"."nspname" = '${dbTable["table_schema"]}' AND "t"."typname" = '${this.buildEnumName(table, tableColumn.name, false, true)}'`; - const results: ObjectLiteral[] = await this.query(sql); - tableColumn.enum = results.map(result => result["value"]); - } - - if (tableColumn.type === "geometry") { - const geometryColumnSql = `SELECT * FROM ( - SELECT - "f_table_schema" "table_schema", - "f_table_name" "table_name", - "f_geometry_column" "column_name", - "srid", - "type" - FROM "geometry_columns" - ) AS _ - WHERE (${tablesCondition}) AND "column_name" = '${tableColumn.name}' AND "table_name" = '${table.name}'`; - - const results: ObjectLiteral[] = await this.query(geometryColumnSql); - tableColumn.spatialFeatureType = results[0].type; - tableColumn.srid = results[0].srid; - } - - if (tableColumn.type === "geography") { - const geographyColumnSql = `SELECT * FROM ( - SELECT - "f_table_schema" "table_schema", - "f_table_name" "table_name", - "f_geography_column" "column_name", - "srid", - "type" - FROM "geography_columns" - ) AS _ - WHERE (${tablesCondition}) AND "column_name" = '${tableColumn.name}' AND "table_name" = '${table.name}'`; - - const results: ObjectLiteral[] = await this.query(geographyColumnSql); - tableColumn.spatialFeatureType = results[0].type; - tableColumn.srid = results[0].srid; - } - - // check only columns that have length property - if (this.driver.withLengthColumnTypes.indexOf(tableColumn.type as ColumnType) !== -1 && dbColumn["character_maximum_length"]) { - const length = dbColumn["character_maximum_length"].toString(); - tableColumn.length = !this.isDefaultColumnLength(table, tableColumn, length) ? length : ""; - } - tableColumn.isNullable = dbColumn["is_nullable"] === "YES"; - tableColumn.isPrimary = !!columnConstraints.find(constraint => constraint["constraint_type"] === "PRIMARY"); - - const uniqueConstraint = columnConstraints.find(constraint => constraint["constraint_type"] === "UNIQUE"); - const isConstraintComposite = uniqueConstraint - ? !!dbConstraints.find(dbConstraint => dbConstraint["constraint_type"] === "UNIQUE" - && dbConstraint["constraint_name"] === uniqueConstraint["constraint_name"] - && dbConstraint["column_name"] !== dbColumn["column_name"]) - : false; - tableColumn.isUnique = !!uniqueConstraint && !isConstraintComposite; - - if (dbColumn["column_default"] !== null && dbColumn["column_default"] !== undefined) { - if (dbColumn["column_default"].replace(/"/gi, "") === `nextval('${this.buildSequenceName(table, dbColumn["column_name"], currentSchema, true)}'::regclass)`) { - tableColumn.isGenerated = true; - tableColumn.generationStrategy = "increment"; - } else if (dbColumn["column_default"] === "gen_random_uuid()" || /^uuid_generate_v\d\(\)/.test(dbColumn["column_default"])) { - tableColumn.isGenerated = true; - tableColumn.generationStrategy = "uuid"; - } else { - tableColumn.default = dbColumn["column_default"].replace(/::.*/, ""); - } - } - - tableColumn.comment = ""; // dbColumn["COLUMN_COMMENT"]; - if (dbColumn["character_set_name"]) - tableColumn.charset = dbColumn["character_set_name"]; - if (dbColumn["collation_name"]) - tableColumn.collation = dbColumn["collation_name"]; - return tableColumn; - })); - - // find unique constraints of table, group them by constraint name and build TableUnique. - const tableUniqueConstraints = OrmUtils.uniq(dbConstraints.filter(dbConstraint => { - return this.driver.buildTableName(dbConstraint["table_name"], dbConstraint["table_schema"]) === tableFullName - && dbConstraint["constraint_type"] === "UNIQUE"; - }), dbConstraint => dbConstraint["constraint_name"]); - - table.uniques = tableUniqueConstraints.map(constraint => { - const uniques = dbConstraints.filter(dbC => dbC["constraint_name"] === constraint["constraint_name"]); - return new TableUnique({ - name: constraint["constraint_name"], - columnNames: uniques.map(u => u["column_name"]) - }); - }); - - // find check constraints of table, group them by constraint name and build TableCheck. - const tableCheckConstraints = OrmUtils.uniq(dbConstraints.filter(dbConstraint => { - return this.driver.buildTableName(dbConstraint["table_name"], dbConstraint["table_schema"]) === tableFullName - && dbConstraint["constraint_type"] === "CHECK"; - }), dbConstraint => dbConstraint["constraint_name"]); - - table.checks = tableCheckConstraints.map(constraint => { - const checks = dbConstraints.filter(dbC => dbC["constraint_name"] === constraint["constraint_name"]); - return new TableCheck({ - name: constraint["constraint_name"], - columnNames: checks.map(c => c["column_name"]), - expression: constraint["expression"].replace(/^\s*CHECK\s*\((.*)\)\s*$/i, "$1") - }); - }); - - // find exclusion constraints of table, group them by constraint name and build TableExclusion. - const tableExclusionConstraints = OrmUtils.uniq(dbConstraints.filter(dbConstraint => { - return this.driver.buildTableName(dbConstraint["table_name"], dbConstraint["table_schema"]) === tableFullName - && dbConstraint["constraint_type"] === "EXCLUDE"; - }), dbConstraint => dbConstraint["constraint_name"]); - - table.exclusions = tableExclusionConstraints.map(constraint => { - return new TableExclusion({ - name: constraint["constraint_name"], - expression: constraint["expression"].substring(8) // trim EXCLUDE from start of expression - }); - }); - - // find foreign key constraints of table, group them by constraint name and build TableForeignKey. - const tableForeignKeyConstraints = OrmUtils.uniq(dbForeignKeys.filter(dbForeignKey => { - return this.driver.buildTableName(dbForeignKey["table_name"], dbForeignKey["table_schema"]) === tableFullName; - }), dbForeignKey => dbForeignKey["constraint_name"]); - - table.foreignKeys = tableForeignKeyConstraints.map(dbForeignKey => { - const foreignKeys = dbForeignKeys.filter(dbFk => dbFk["constraint_name"] === dbForeignKey["constraint_name"]); - - // if referenced table located in currently used schema, we don't need to concat schema name to table name. - const schema = dbForeignKey["referenced_table_schema"] === currentSchema ? undefined : dbForeignKey["referenced_table_schema"]; - const referencedTableName = this.driver.buildTableName(dbForeignKey["referenced_table_name"], schema); - - return new TableForeignKey({ - name: dbForeignKey["constraint_name"], - columnNames: foreignKeys.map(dbFk => dbFk["column_name"]), - referencedTableName: referencedTableName, - referencedColumnNames: foreignKeys.map(dbFk => dbFk["referenced_column_name"]), - onDelete: dbForeignKey["on_delete"], - onUpdate: dbForeignKey["on_update"], - deferrable: dbForeignKey["deferrable"] ? dbForeignKey["deferred"] : undefined, - }); - }); - - // find index constraints of table, group them by constraint name and build TableIndex. - const tableIndexConstraints = OrmUtils.uniq(dbIndices.filter(dbIndex => { - return this.driver.buildTableName(dbIndex["table_name"], dbIndex["table_schema"]) === tableFullName; - }), dbIndex => dbIndex["constraint_name"]); - - table.indices = tableIndexConstraints.map(constraint => { - const indices = dbIndices.filter(index => { - return index["table_schema"] === constraint["table_schema"] - && index["table_name"] === constraint["table_name"] - && index["constraint_name"] === constraint["constraint_name"]; - }); - return new TableIndex({ - table: table, - name: constraint["constraint_name"], - columnNames: indices.map(i => i["column_name"]), - isUnique: constraint["is_unique"] === "TRUE", - where: constraint["condition"], - isSpatial: indices.every(i => this.driver.spatialTypes.indexOf(i["type_name"]) >= 0), - isFulltext: false - }); - }); - - return table; - })); - } - - /** - * Builds create table sql. - */ - protected createTableSql(table: Table, createForeignKeys?: boolean): Query { - const columnDefinitions = table.columns.map(column => this.buildCreateColumnSql(table, column)).join(", "); - let sql = `CREATE TABLE ${this.escapePath(table)} (${columnDefinitions}`; - - table.columns - .filter(column => column.isUnique) - .forEach(column => { - const isUniqueExist = table.uniques.some(unique => unique.columnNames.length === 1 && unique.columnNames[0] === column.name); - if (!isUniqueExist) - table.uniques.push(new TableUnique({ - name: this.connection.namingStrategy.uniqueConstraintName(table.name, [column.name]), - columnNames: [column.name] - })); - }); - - if (table.uniques.length > 0) { - const uniquesSql = table.uniques.map(unique => { - const uniqueName = unique.name ? unique.name : this.connection.namingStrategy.uniqueConstraintName(table.name, unique.columnNames); - const columnNames = unique.columnNames.map(columnName => `"${columnName}"`).join(", "); - return `CONSTRAINT "${uniqueName}" UNIQUE (${columnNames})`; - }).join(", "); - - sql += `, ${uniquesSql}`; - } - - if (table.checks.length > 0) { - const checksSql = table.checks.map(check => { - const checkName = check.name ? check.name : this.connection.namingStrategy.checkConstraintName(table.name, check.expression!); - return `CONSTRAINT "${checkName}" CHECK (${check.expression})`; - }).join(", "); - - sql += `, ${checksSql}`; - } - - if (table.exclusions.length > 0) { - const exclusionsSql = table.exclusions.map(exclusion => { - const exclusionName = exclusion.name ? exclusion.name : this.connection.namingStrategy.exclusionConstraintName(table.name, exclusion.expression!); - return `CONSTRAINT "${exclusionName}" EXCLUDE ${exclusion.expression}`; - }).join(", "); - - sql += `, ${exclusionsSql}`; - } - - if (table.foreignKeys.length > 0 && createForeignKeys) { - const foreignKeysSql = table.foreignKeys.map(fk => { - const columnNames = fk.columnNames.map(columnName => `"${columnName}"`).join(", "); - if (!fk.name) - fk.name = this.connection.namingStrategy.foreignKeyName(table.name, fk.columnNames, fk.referencedTableName, fk.referencedColumnNames); - const referencedColumnNames = fk.referencedColumnNames.map(columnName => `"${columnName}"`).join(", "); - - let constraint = `CONSTRAINT "${fk.name}" FOREIGN KEY (${columnNames}) REFERENCES ${this.escapePath(fk.referencedTableName)} (${referencedColumnNames})`; - if (fk.onDelete) - constraint += ` ON DELETE ${fk.onDelete}`; - if (fk.onUpdate) - constraint += ` ON UPDATE ${fk.onUpdate}`; - if (fk.deferrable) - constraint += ` DEFERRABLE ${fk.deferrable}`; - - return constraint; - }).join(", "); - - sql += `, ${foreignKeysSql}`; - } - - const primaryColumns = table.columns.filter(column => column.isPrimary); - if (primaryColumns.length > 0) { - const primaryKeyName = this.connection.namingStrategy.primaryKeyName(table.name, primaryColumns.map(column => column.name)); - const columnNames = primaryColumns.map(column => `"${column.name}"`).join(", "); - sql += `, CONSTRAINT "${primaryKeyName}" PRIMARY KEY (${columnNames})`; - } - - sql += `)`; - - return new Query(sql); - } - - /** - * Builds drop table sql. - */ - protected dropTableSql(tableOrPath: Table|string): Query { - return new Query(`DROP TABLE ${this.escapePath(tableOrPath)}`); - } - - protected createViewSql(view: View): Query { - const materializedClause = view.materialized ? "MATERIALIZED " : ""; - const viewName = this.escapePath(view); - - if (typeof view.expression === "string") { - return new Query(`CREATE ${materializedClause}VIEW ${viewName} AS ${view.expression}`); - } else { - return new Query(`CREATE ${materializedClause}VIEW ${viewName} AS ${view.expression(this.connection).getQuery()}`); - } - } - - protected async insertViewDefinitionSql(view: View): Promise { - const currentSchemaQuery = await this.query(`SELECT * FROM current_schema()`); - const currentSchema = currentSchemaQuery[0]["current_schema"]; - const splittedName = view.name.split("."); - let schema = this.driver.options.database || currentSchema; - let name = view.name; - if (splittedName.length === 2) { - schema = splittedName[0]; - name = splittedName[1]; - } - - const expression = typeof view.expression === "string" ? view.expression.trim() : view.expression(this.connection).getQuery(); - const [query, parameters] = this.connection.createQueryBuilder() - .insert() - .into(this.getTypeormMetadataTableName()) - .values({ type: "VIEW", schema: schema, name: name, value: expression }) - .getQueryAndParameters(); - - return new Query(query, parameters); - } - - /** - * Builds drop view sql. - */ - protected dropViewSql(viewOrPath: View|string): Query { - return new Query(`DROP VIEW ${this.escapePath(viewOrPath)}`); - } - - /** - * Builds remove view sql. - */ - protected async deleteViewDefinitionSql(viewOrPath: View|string): Promise { - const currentSchemaQuery = await this.query(`SELECT * FROM current_schema()`); - const currentSchema = currentSchemaQuery[0]["current_schema"]; - const viewName = viewOrPath instanceof View ? viewOrPath.name : viewOrPath; - const splittedName = viewName.split("."); - let schema = this.driver.options.database || currentSchema; - let name = viewName; - if (splittedName.length === 2) { - schema = splittedName[0]; - name = splittedName[1]; - } - - const qb = this.connection.createQueryBuilder(); - const [query, parameters] = qb.delete() - .from(this.getTypeormMetadataTableName()) - .where(`${qb.escape("type")} = 'VIEW'`) - .andWhere(`${qb.escape("schema")} = :schema`, { schema }) - .andWhere(`${qb.escape("name")} = :name`, { name }) - .getQueryAndParameters(); - - return new Query(query, parameters); - } - - /** - * Extracts schema name from given Table object or table name string. - */ - protected extractSchema(target: Table|string): string|undefined { - const tableName = target instanceof Table ? target.name : target; - return tableName.indexOf(".") === -1 ? this.driver.options.database : tableName.split(".")[0]; - } - - /** - * Drops ENUM type from given schemas. - */ - protected async dropEnumTypes(schemaNames: string): Promise { - const selectDropsQuery = `SELECT 'DROP TYPE IF EXISTS "' || n.nspname || '"."' || t.typname || '" CASCADE;' as "query" FROM "pg_type" "t" ` + - `INNER JOIN "pg_enum" "e" ON "e"."enumtypid" = "t"."oid" ` + - `INNER JOIN "pg_namespace" "n" ON "n"."oid" = "t"."typnamespace" ` + - `WHERE "n"."nspname" IN (${schemaNames}) GROUP BY "n"."nspname", "t"."typname"`; - const dropQueries: ObjectLiteral[] = await this.query(selectDropsQuery); - await Promise.all(dropQueries.map(q => this.query(q["query"]))); - } - - /** - * Checks if enum with the given name exist in the database. - */ - protected async hasEnumType(table: Table, column: TableColumn): Promise { - const schema = this.parseTableName(table).schema; - const enumName = this.buildEnumName(table, column, false, true); - const sql = `SELECT "n"."nspname", "t"."typname" FROM "pg_type" "t" ` + - `INNER JOIN "pg_namespace" "n" ON "n"."oid" = "t"."typnamespace" ` + - `WHERE "n"."nspname" = ${schema} AND "t"."typname" = '${enumName}'`; - const result = await this.query(sql); - return result.length ? true : false; - } - - /** - * Builds create ENUM type sql. - */ - protected createEnumTypeSql(table: Table, column: TableColumn, enumName?: string): Query { - if (!enumName) - enumName = this.buildEnumName(table, column); - const enumValues = column.enum!.map(value => `'${value.replace("'", "''")}'`).join(", "); - return new Query(`CREATE TYPE ${enumName} AS ENUM(${enumValues})`); - } - - /** - * Builds create ENUM type sql. - */ - protected dropEnumTypeSql(table: Table, column: TableColumn, enumName?: string): Query { - if (!enumName) - enumName = this.buildEnumName(table, column); - return new Query(`DROP TYPE ${enumName}`); - } - - /** - * Builds create index sql. - */ - protected createIndexSql(table: Table, index: TableIndex): Query { - const columns = index.columnNames.map(columnName => `"${columnName}"`).join(", "); - return new Query(`CREATE ${index.isUnique ? "UNIQUE " : ""}INDEX "${index.name}" ON ${this.escapePath(table)} ${index.isSpatial ? "USING GiST " : ""}(${columns}) ${index.where ? "WHERE " + index.where : ""}`); - } - - /** - * Builds drop index sql. - */ - protected dropIndexSql(table: Table, indexOrName: TableIndex|string): Query { - let indexName = indexOrName instanceof TableIndex ? indexOrName.name : indexOrName; - const schema = this.extractSchema(table); - return schema ? new Query(`DROP INDEX "${schema}"."${indexName}"`) : new Query(`DROP INDEX "${indexName}"`); - } - - /** - * Builds create primary key sql. - */ - protected createPrimaryKeySql(table: Table, columnNames: string[]): Query { - const primaryKeyName = this.connection.namingStrategy.primaryKeyName(table.name, columnNames); - const columnNamesString = columnNames.map(columnName => `"${columnName}"`).join(", "); - return new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${primaryKeyName}" PRIMARY KEY (${columnNamesString})`); - } - - /** - * Builds drop primary key sql. - */ - protected dropPrimaryKeySql(table: Table): Query { - const columnNames = table.primaryColumns.map(column => column.name); - const primaryKeyName = this.connection.namingStrategy.primaryKeyName(table.name, columnNames); - return new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${primaryKeyName}"`); - } - - /** - * Builds create unique constraint sql. - */ - protected createUniqueConstraintSql(table: Table, uniqueConstraint: TableUnique): Query { - const columnNames = uniqueConstraint.columnNames.map(column => `"` + column + `"`).join(", "); - return new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${uniqueConstraint.name}" UNIQUE (${columnNames})`); - } - - /** - * Builds drop unique constraint sql. - */ - protected dropUniqueConstraintSql(table: Table, uniqueOrName: TableUnique|string): Query { - const uniqueName = uniqueOrName instanceof TableUnique ? uniqueOrName.name : uniqueOrName; - return new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${uniqueName}"`); - } - - /** - * Builds create check constraint sql. - */ - protected createCheckConstraintSql(table: Table, checkConstraint: TableCheck): Query { - return new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${checkConstraint.name}" CHECK (${checkConstraint.expression})`); - } - - /** - * Builds drop check constraint sql. - */ - protected dropCheckConstraintSql(table: Table, checkOrName: TableCheck|string): Query { - const checkName = checkOrName instanceof TableCheck ? checkOrName.name : checkOrName; - return new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${checkName}"`); - } - - /** - * Builds create exclusion constraint sql. - */ - protected createExclusionConstraintSql(table: Table, exclusionConstraint: TableExclusion): Query { - return new Query(`ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${exclusionConstraint.name}" EXCLUDE ${exclusionConstraint.expression}`); - } - - /** - * Builds drop exclusion constraint sql. - */ - protected dropExclusionConstraintSql(table: Table, exclusionOrName: TableExclusion|string): Query { - const exclusionName = exclusionOrName instanceof TableExclusion ? exclusionOrName.name : exclusionOrName; - return new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${exclusionName}"`); - } - - /** - * Builds create foreign key sql. - */ - protected createForeignKeySql(table: Table, foreignKey: TableForeignKey): Query { - const columnNames = foreignKey.columnNames.map(column => `"` + column + `"`).join(", "); - const referencedColumnNames = foreignKey.referencedColumnNames.map(column => `"` + column + `"`).join(","); - let sql = `ALTER TABLE ${this.escapePath(table)} ADD CONSTRAINT "${foreignKey.name}" FOREIGN KEY (${columnNames}) ` + - `REFERENCES ${this.escapePath(foreignKey.referencedTableName)}(${referencedColumnNames})`; - if (foreignKey.onDelete) - sql += ` ON DELETE ${foreignKey.onDelete}`; - if (foreignKey.onUpdate) - sql += ` ON UPDATE ${foreignKey.onUpdate}`; - if (foreignKey.deferrable) - sql += ` DEFERRABLE ${foreignKey.deferrable}`; - - return new Query(sql); - } - - /** - * Builds drop foreign key sql. - */ - protected dropForeignKeySql(table: Table, foreignKeyOrName: TableForeignKey|string): Query { - const foreignKeyName = foreignKeyOrName instanceof TableForeignKey ? foreignKeyOrName.name : foreignKeyOrName; - return new Query(`ALTER TABLE ${this.escapePath(table)} DROP CONSTRAINT "${foreignKeyName}"`); - } - - /** - * Builds sequence name from given table and column. - */ - protected buildSequenceName(table: Table, columnOrName: TableColumn|string, currentSchema?: string, disableEscape?: true, skipSchema?: boolean): string { - const columnName = columnOrName instanceof TableColumn ? columnOrName.name : columnOrName; - let schema: string|undefined = undefined; - let tableName: string|undefined = undefined; - - if (table.name.indexOf(".") === -1) { - tableName = table.name; - } else { - schema = table.name.split(".")[0]; - tableName = table.name.split(".")[1]; - } - - if (schema && schema !== currentSchema && !skipSchema) { - return disableEscape ? `${schema}.${tableName}_${columnName}_seq` : `"${schema}"."${tableName}_${columnName}_seq"`; - - } else { - return disableEscape ? `${tableName}_${columnName}_seq` : `"${tableName}_${columnName}_seq"`; - } - } - - /** - * Builds ENUM type name from given table and column. - */ - protected buildEnumName(table: Table, columnOrName: TableColumn|string, withSchema: boolean = true, disableEscape?: boolean, toOld?: boolean): string { - /** - * If enumName is specified in column options then use it instead - */ - if (columnOrName instanceof TableColumn && columnOrName.enumName) { - let enumName = columnOrName.enumName; - if (toOld) - enumName = enumName + "_old"; - return disableEscape ? enumName : `"${enumName}"`; - } - const columnName = columnOrName instanceof TableColumn ? columnOrName.name : columnOrName; - const schema = table.name.indexOf(".") === -1 ? this.driver.options.database : table.name.split(".")[0]; - const tableName = table.name.indexOf(".") === -1 ? table.name : table.name.split(".")[1]; - let enumName = schema && withSchema ? `${schema}.${tableName}_${columnName.toLowerCase()}_enum` : `${tableName}_${columnName.toLowerCase()}_enum`; - if (toOld) - enumName = enumName + "_old"; - return enumName.split(".").map(i => { - return disableEscape ? i : `"${i}"`; - }).join("."); - } - - protected async getEnumTypeName(table: Table, column: TableColumn) { - const currentSchemaQuery = await this.query(`SELECT * FROM current_schema()`); - const currentSchema = currentSchemaQuery[0]["current_schema"]; - let [schema, name] = table.name.split("."); - if (!name) { - name = schema; - schema = this.driver.options.database || currentSchema; - } - const result = await this.query(`SELECT "udt_schema", "udt_name" ` + - `FROM "information_schema"."columns" WHERE "table_schema" = '${schema}' AND "table_name" = '${name}' AND "column_name"='${column.name}'`); - return { - enumTypeSchema: result[0]["udt_schema"], - enumTypeName: result[0]["udt_name"] - }; - } - - /** - * Escapes given table or view path. - */ - protected escapePath(target: Table|View|string, disableEscape?: boolean): string { - let tableName = target instanceof Table || target instanceof View ? target.name : target; - tableName = tableName.indexOf(".") === -1 && this.driver.options.database ? `${this.driver.options.database}.${tableName}` : tableName; - - return tableName.split(".").map(i => { - return disableEscape ? i : `"${i}"`; - }).join("."); - } - - /** - * Returns object with table schema and table name. - */ - protected parseTableName(target: Table|string) { - const tableName = target instanceof Table ? target.name : target; - if (tableName.indexOf(".") === -1) { - return { - schema: this.driver.options.database ? `'${this.driver.options.database}'` : "current_schema()", - tableName: `'${tableName}'` - }; - } else { - return { - schema: `'${tableName.split(".")[0]}'`, - tableName: `'${tableName.split(".")[1]}'` - }; - } - } - - /** - * Builds a query for create column. - */ - protected buildCreateColumnSql(table: Table, column: TableColumn) { - let c = "\"" + column.name + "\""; - if (column.isGenerated === true && column.generationStrategy !== "uuid") { - if (column.type === "integer" || column.type === "int" || column.type === "int4") - c += " SERIAL"; - if (column.type === "smallint" || column.type === "int2") - c += " SMALLSERIAL"; - if (column.type === "bigint" || column.type === "int8") - c += " BIGSERIAL"; - } - if (column.type === "enum" || column.type === "simple-enum") { - c += " " + this.buildEnumName(table, column); - if (column.isArray) - c += " array"; - - } else if (!column.isGenerated || column.type === "uuid") { - c += " " + this.connection.driver.createFullType(column); - } - if (column.charset) - c += " CHARACTER SET \"" + column.charset + "\""; - if (column.collation) - c += " COLLATE \"" + column.collation + "\""; - if (column.isNullable !== true) - c += " NOT NULL"; - if (column.default !== undefined && column.default !== null) - c += " DEFAULT " + column.default; - if (column.isGenerated && column.generationStrategy === "uuid" && !column.default) - c += ` DEFAULT ${this.driver.uuidGenerator}`; - - return c; - } - } diff --git a/src/driver/postgres/PostgresDriver.ts b/src/driver/postgres/PostgresDriver.ts index 7be91177b9..15ac662493 100644 --- a/src/driver/postgres/PostgresDriver.ts +++ b/src/driver/postgres/PostgresDriver.ts @@ -19,6 +19,8 @@ import {PostgresConnectionCredentialsOptions} from "./PostgresConnectionCredenti import {EntityMetadata} from "../../metadata/EntityMetadata"; import {OrmUtils} from "../../util/OrmUtils"; import {ApplyValueTransformers} from "../../util/ApplyValueTransformers"; +import {AuroraDataApiPostgresConnectionOptions} from "../aurora-data-api-pg/AuroraDataApiPostgresConnectionOptions"; +import {AuroraDataApiPostgresQueryRunner} from "../aurora-data-api-pg/AuroraDataApiPostgresQueryRunner"; /** * Organizes communication with PostgreSQL DBMS. @@ -248,7 +250,11 @@ export class PostgresDriver implements Driver { // Constructor // ------------------------------------------------------------------------- - constructor(connection: Connection) { + constructor(connection?: Connection) { + if (!connection) { + return; + } + this.connection = connection; this.options = connection.options as PostgresConnectionOptions; this.isReplicated = this.options.replication ? true : false; @@ -972,3 +978,113 @@ export class PostgresDriver implements Driver { } } + +abstract class PostgresWrapper extends PostgresDriver { + options: any; + + abstract createQueryRunner(mode: "master"|"slave"): any; +} + +/** + * Organizes communication with PostgreSQL DBMS. + */ +export class AuroraDataApiPostgresDriver extends PostgresWrapper { + + // ------------------------------------------------------------------------- + // Public Properties + // ------------------------------------------------------------------------- + + /** + * Connection used by driver. + */ + connection: Connection; + + /** + * Aurora Data API underlying library. + */ + DataApiDriver: any; + + client: any; + + // ------------------------------------------------------------------------- + // Public Implemented Properties + // ------------------------------------------------------------------------- + + /** + * Connection options. + */ + options: AuroraDataApiPostgresConnectionOptions; + + /** + * Master database used to perform all write queries. + */ + database?: string; + + // ------------------------------------------------------------------------- + // Constructor + // ------------------------------------------------------------------------- + + constructor(connection: Connection) { + super(); + this.connection = connection; + this.options = connection.options as AuroraDataApiPostgresConnectionOptions; + this.isReplicated = false; + + // load data-api package + this.loadDependencies(); + + this.client = new this.DataApiDriver( + this.options.region, + this.options.secretArn, + this.options.resourceArn, + this.options.database, + (query: string, parameters?: any[]) => this.connection.logger.logQuery(query, parameters), + ); + } + + // ------------------------------------------------------------------------- + // Public Implemented Methods + // ------------------------------------------------------------------------- + + /** + * Performs connection to the database. + * Based on pooling options, it can either create connection immediately, + * either create a pool and create connection when needed. + */ + async connect(): Promise { + } + + /** + * Closes connection with database. + */ + async disconnect(): Promise { + } + + /** + * Creates a query runner used to execute database queries. + */ + createQueryRunner(mode: "master"|"slave" = "master") { + return new AuroraDataApiPostgresQueryRunner(this, mode); + } + + // ------------------------------------------------------------------------- + // Protected Methods + // ------------------------------------------------------------------------- + + /** + * If driver dependency is not given explicitly, then try to load it via "require". + */ + protected loadDependencies(): void { + const { pg } = PlatformTools.load("typeorm-aurora-data-api-driver"); + + this.DataApiDriver = pg; + } + + /** + * Executes given query. + */ + protected executeQuery(connection: any, query: string) { + return this.client.query(query); + } + +} diff --git a/src/metadata-builder/EntityMetadataBuilder.ts b/src/metadata-builder/EntityMetadataBuilder.ts index 9eace62ebb..d4c764d648 100644 --- a/src/metadata-builder/EntityMetadataBuilder.ts +++ b/src/metadata-builder/EntityMetadataBuilder.ts @@ -23,7 +23,6 @@ import {SqlServerDriver} from "../driver/sqlserver/SqlServerDriver"; import {PostgresDriver} from "../driver/postgres/PostgresDriver"; import {ExclusionMetadata} from "../metadata/ExclusionMetadata"; import {AuroraDataApiDriver} from "../driver/aurora-data-api/AuroraDataApiDriver"; -import {AuroraDataApiPostgresDriver} from "../driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Builds EntityMetadata objects and all its sub-metadatas. @@ -490,7 +489,7 @@ export class EntityMetadataBuilder { }); // Only PostgreSQL supports exclusion constraints. - if (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver) { + if (this.connection.driver instanceof PostgresDriver) { entityMetadata.exclusions = this.metadataArgsStorage.filterExclusions(entityMetadata.inheritanceTree).map(args => { return new ExclusionMetadata({ entityMetadata, args }); }); diff --git a/src/metadata/EntityMetadata.ts b/src/metadata/EntityMetadata.ts index 2bebdd3697..d4da9d2f61 100644 --- a/src/metadata/EntityMetadata.ts +++ b/src/metadata/EntityMetadata.ts @@ -25,7 +25,6 @@ import {RelationMetadata} from "./RelationMetadata"; import {TableType} from "./types/TableTypes"; import {TreeType} from "./types/TreeTypes"; import {UniqueMetadata} from "./UniqueMetadata"; -import {AuroraDataApiPostgresDriver} from "../driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Contains all entity metadata. @@ -853,11 +852,11 @@ export class EntityMetadata { */ protected buildTablePath(): string { let tablePath = this.tableName; - if (this.schema && ((this.connection.driver instanceof PostgresDriver) || (this.connection.driver instanceof AuroraDataApiPostgresDriver) || (this.connection.driver instanceof SqlServerDriver) || (this.connection.driver instanceof SapDriver))) { + if (this.schema && ((this.connection.driver instanceof PostgresDriver) || (this.connection.driver instanceof SqlServerDriver) || (this.connection.driver instanceof SapDriver))) { tablePath = this.schema + "." + tablePath; } - if (this.database && !(this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver)) { + if (this.database && !(this.connection.driver instanceof PostgresDriver)) { if (!this.schema && this.connection.driver instanceof SqlServerDriver) { tablePath = this.database + ".." + tablePath; } else { @@ -875,7 +874,7 @@ export class EntityMetadata { if (!this.schema) return undefined; - return this.database && !(this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver) ? this.database + "." + this.schema : this.schema; + return this.database && !(this.connection.driver instanceof PostgresDriver) ? this.database + "." + this.schema : this.schema; } } diff --git a/src/query-builder/DeleteQueryBuilder.ts b/src/query-builder/DeleteQueryBuilder.ts index 87947f7b97..4be20ba377 100644 --- a/src/query-builder/DeleteQueryBuilder.ts +++ b/src/query-builder/DeleteQueryBuilder.ts @@ -16,7 +16,6 @@ import {MysqlDriver} from "../driver/mysql/MysqlDriver"; import {BroadcasterResult} from "../subscriber/BroadcasterResult"; import {EntitySchema} from "../index"; import {AuroraDataApiDriver} from "../driver/aurora-data-api/AuroraDataApiDriver"; -import { AuroraDataApiPostgresDriver } from "../driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Allows to build complex sql queries in a fashion way and execute those queries. @@ -76,7 +75,7 @@ export class DeleteQueryBuilder extends QueryBuilder implements deleteResult.raw = result; deleteResult.affected = result.affectedRows; - } else if (driver instanceof SqlServerDriver || driver instanceof PostgresDriver || driver instanceof AuroraDataApiPostgresDriver || driver instanceof CockroachDriver) { + } else if (driver instanceof SqlServerDriver || driver instanceof PostgresDriver || driver instanceof CockroachDriver) { deleteResult.raw = result[0] ? result[0] : null; // don't return 0 because it could confuse. null means that we did not receive this value deleteResult.affected = typeof result[1] === "number" ? result[1] : null; @@ -258,7 +257,7 @@ export class DeleteQueryBuilder extends QueryBuilder implements const whereExpression = this.createWhereExpression(); const returningExpression = this.createReturningExpression(); - if (returningExpression && (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver || this.connection.driver instanceof CockroachDriver)) { + if (returningExpression && (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof CockroachDriver)) { return `DELETE FROM ${tableName}${whereExpression} RETURNING ${returningExpression}`; } else if (returningExpression !== "" && this.connection.driver instanceof SqlServerDriver) { diff --git a/src/query-builder/InsertQueryBuilder.ts b/src/query-builder/InsertQueryBuilder.ts index e36fb31a84..1ea9b4233c 100644 --- a/src/query-builder/InsertQueryBuilder.ts +++ b/src/query-builder/InsertQueryBuilder.ts @@ -19,7 +19,6 @@ import {BroadcasterResult} from "../subscriber/BroadcasterResult"; import {EntitySchema} from "../entity-schema/EntitySchema"; import {OracleDriver} from "../driver/oracle/OracleDriver"; import {AuroraDataApiDriver} from "../driver/aurora-data-api/AuroraDataApiDriver"; -import {AuroraDataApiPostgresDriver} from "../driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Allows to build complex sql queries in a fashion way and execute those queries. @@ -264,7 +263,7 @@ export class InsertQueryBuilder extends QueryBuilder { if (statement && Array.isArray(statement.overwrite)) { if (this.connection.driver instanceof MysqlDriver || this.connection.driver instanceof AuroraDataApiDriver) { this.expressionMap.onUpdate.overwrite = statement.overwrite.map(column => `${column} = VALUES(${column})`).join(", "); - } else if (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver || this.connection.driver instanceof AbstractSqliteDriver || this.connection.driver instanceof CockroachDriver) { + } else if (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AbstractSqliteDriver || this.connection.driver instanceof CockroachDriver) { this.expressionMap.onUpdate.overwrite = statement.overwrite.map(column => `${column} = EXCLUDED.${column}`).join(", "); } } @@ -315,7 +314,7 @@ export class InsertQueryBuilder extends QueryBuilder { query += ` DEFAULT VALUES`; } } - if (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver || this.connection.driver instanceof AbstractSqliteDriver || this.connection.driver instanceof CockroachDriver) { + if (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AbstractSqliteDriver || this.connection.driver instanceof CockroachDriver) { query += `${this.expressionMap.onIgnore ? " ON CONFLICT DO NOTHING " : ""}`; query += `${this.expressionMap.onConflict ? " ON CONFLICT " + this.expressionMap.onConflict : ""}`; if (this.expressionMap.onUpdate) { @@ -332,7 +331,7 @@ export class InsertQueryBuilder extends QueryBuilder { } // add RETURNING expression - if (returningExpression && (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver || this.connection.driver instanceof OracleDriver || this.connection.driver instanceof CockroachDriver)) { + if (returningExpression && (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof OracleDriver || this.connection.driver instanceof CockroachDriver)) { query += ` RETURNING ${returningExpression}`; } @@ -494,7 +493,7 @@ export class InsertQueryBuilder extends QueryBuilder { } else { expression += `${geomFromText}(${this.connection.driver.createParameter(paramName, parametersCount)})`; } - } else if ((this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver) && this.connection.driver.spatialTypes.indexOf(column.type) !== -1) { + } else if (this.connection.driver instanceof PostgresDriver && this.connection.driver.spatialTypes.indexOf(column.type) !== -1) { if (column.srid != null) { expression += `ST_SetSRID(ST_GeomFromGeoJSON(${this.connection.driver.createParameter(paramName, parametersCount)}), ${column.srid})::${column.type}`; } else { diff --git a/src/query-builder/SelectQueryBuilder.ts b/src/query-builder/SelectQueryBuilder.ts index 3cfb272f9b..483b2dced8 100644 --- a/src/query-builder/SelectQueryBuilder.ts +++ b/src/query-builder/SelectQueryBuilder.ts @@ -36,7 +36,6 @@ import {SelectQueryBuilderOption} from "./SelectQueryBuilderOption"; import {ObjectUtils} from "../util/ObjectUtils"; import {DriverUtils} from "../driver/DriverUtils"; import {AuroraDataApiDriver} from "../driver/aurora-data-api/AuroraDataApiDriver"; -import {AuroraDataApiPostgresDriver} from "../driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Allows to build complex sql queries in a fashion way and execute those queries. @@ -1442,7 +1441,7 @@ export class SelectQueryBuilder extends QueryBuilder implements const {driver} = this.connection; let select = "SELECT "; - if ((driver instanceof PostgresDriver || driver instanceof AuroraDataApiPostgresDriver) && selectDistinctOn.length > 0) { + if (driver instanceof PostgresDriver && selectDistinctOn.length > 0) { const selectDistinctOnMap = selectDistinctOn.map( (on) => this.replacePropertyNames(on) ).join(", "); @@ -1651,7 +1650,7 @@ export class SelectQueryBuilder extends QueryBuilder implements if (driver instanceof MysqlDriver || driver instanceof AuroraDataApiDriver) { return " LOCK IN SHARE MODE"; - } else if (driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver ) { + } else if (driver instanceof PostgresDriver) { return " FOR SHARE"; } else if (driver instanceof OracleDriver) { @@ -1664,7 +1663,7 @@ export class SelectQueryBuilder extends QueryBuilder implements throw new LockNotSupportedOnGivenDriverError(); } case "pessimistic_write": - if (driver instanceof MysqlDriver || driver instanceof AuroraDataApiDriver || driver instanceof PostgresDriver || driver instanceof AuroraDataApiPostgresDriver || driver instanceof OracleDriver) { + if (driver instanceof MysqlDriver || driver instanceof AuroraDataApiDriver || driver instanceof PostgresDriver || driver instanceof OracleDriver) { return " FOR UPDATE"; } else if (driver instanceof SqlServerDriver) { @@ -1728,7 +1727,7 @@ export class SelectQueryBuilder extends QueryBuilder implements selectionPath = `${asText}(${selectionPath})`; } - if (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver ) + if (this.connection.driver instanceof PostgresDriver) // cast to JSON to trigger parsing in the driver selectionPath = `ST_AsGeoJSON(${selectionPath})::json`; diff --git a/src/query-builder/UpdateQueryBuilder.ts b/src/query-builder/UpdateQueryBuilder.ts index f7fba2f2b3..1bf6c68b6d 100644 --- a/src/query-builder/UpdateQueryBuilder.ts +++ b/src/query-builder/UpdateQueryBuilder.ts @@ -24,7 +24,6 @@ import {UpdateValuesMissingError} from "../error/UpdateValuesMissingError"; import {EntityColumnNotFound} from "../error/EntityColumnNotFound"; import {QueryDeepPartialEntity} from "./QueryPartialEntity"; import {AuroraDataApiDriver} from "../driver/aurora-data-api/AuroraDataApiDriver"; -import {AuroraDataApiPostgresDriver} from "../driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Allows to build complex sql queries in a fashion way and execute those queries. @@ -101,7 +100,7 @@ export class UpdateQueryBuilder extends QueryBuilder implements parameters, ); - if (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver) { + if (this.connection.driver instanceof PostgresDriver) { updateResult.raw = result[0]; updateResult.affected = result[1]; } @@ -447,7 +446,7 @@ export class UpdateQueryBuilder extends QueryBuilder implements } else { expression = `${geomFromText}(${this.connection.driver.createParameter(paramName, parametersCount)})`; } - } else if ((this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver) && this.connection.driver.spatialTypes.indexOf(column.type) !== -1) { + } else if (this.connection.driver instanceof PostgresDriver && this.connection.driver.spatialTypes.indexOf(column.type) !== -1) { if (column.srid != null) { expression = `ST_SetSRID(ST_GeomFromGeoJSON(${this.connection.driver.createParameter(paramName, parametersCount)}), ${column.srid})::${column.type}`; } else { @@ -517,7 +516,7 @@ export class UpdateQueryBuilder extends QueryBuilder implements const returningExpression = this.createReturningExpression(); // generate and return sql update query - if (returningExpression && (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver || this.connection.driver instanceof OracleDriver || this.connection.driver instanceof CockroachDriver)) { + if (returningExpression && (this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof OracleDriver || this.connection.driver instanceof CockroachDriver)) { return `UPDATE ${this.getTableName(this.getMainTableName())} SET ${updateColumnAndValues.join(", ")}${whereExpression} RETURNING ${returningExpression}`; } else if (returningExpression && this.connection.driver instanceof SqlServerDriver) { diff --git a/src/schema-builder/RdbmsSchemaBuilder.ts b/src/schema-builder/RdbmsSchemaBuilder.ts index 3dfbaad0d5..2c625c9a6e 100644 --- a/src/schema-builder/RdbmsSchemaBuilder.ts +++ b/src/schema-builder/RdbmsSchemaBuilder.ts @@ -24,7 +24,6 @@ import {TableExclusion} from "./table/TableExclusion"; import {View} from "./view/View"; import {AuroraDataApiDriver} from "../driver/aurora-data-api/AuroraDataApiDriver"; import { ForeignKeyMetadata } from "../metadata/ForeignKeyMetadata"; -import {AuroraDataApiPostgresDriver} from "../driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; /** * Creates complete tables schemas in the database based on the entity metadatas. @@ -340,7 +339,7 @@ export class RdbmsSchemaBuilder implements SchemaBuilder { protected async dropOldExclusions(): Promise { // Only PostgreSQL supports exclusion constraints - if (!(this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver)) + if (!(this.connection.driver instanceof PostgresDriver)) return; await PromiseUtils.runInSequence(this.entityToSyncMetadatas, async metadata => { @@ -619,7 +618,7 @@ export class RdbmsSchemaBuilder implements SchemaBuilder { */ protected async createNewExclusions(): Promise { // Only PostgreSQL supports exclusion constraints - if (!(this.connection.driver instanceof PostgresDriver || this.connection.driver instanceof AuroraDataApiPostgresDriver)) + if (!(this.connection.driver instanceof PostgresDriver)) return; await PromiseUtils.runInSequence(this.entityToSyncMetadatas, async metadata => { diff --git a/test/functional/entity-schema/exclusions/exclusions-basic.ts b/test/functional/entity-schema/exclusions/exclusions-basic.ts index d199466a48..1e20cda483 100644 --- a/test/functional/entity-schema/exclusions/exclusions-basic.ts +++ b/test/functional/entity-schema/exclusions/exclusions-basic.ts @@ -3,7 +3,6 @@ import {closeTestingConnections, createTestingConnections, reloadTestingDatabase import {Connection} from "../../../../src/connection/Connection"; import {MeetingSchema} from "./entity/Meeting"; import {PostgresDriver} from "../../../../src/driver/postgres/PostgresDriver"; -import { AuroraDataApiPostgresDriver } from "../../../../src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; describe("entity-schema > exclusions", () => { @@ -16,7 +15,7 @@ describe("entity-schema > exclusions", () => { it("should create an exclusion constraint", () => Promise.all(connections.map(async connection => { // Only PostgreSQL supports exclusion constraints. - if (!(connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver)) + if (!(connection.driver instanceof PostgresDriver)) return; const queryRunner = connection.createQueryRunner(); diff --git a/test/functional/multi-schema-and-database/multi-schema-and-database-basic-functionality/multi-schema-and-database-basic-functionality.ts b/test/functional/multi-schema-and-database/multi-schema-and-database-basic-functionality/multi-schema-and-database-basic-functionality.ts index edde116821..c6fbdd915e 100644 --- a/test/functional/multi-schema-and-database/multi-schema-and-database-basic-functionality/multi-schema-and-database-basic-functionality.ts +++ b/test/functional/multi-schema-and-database/multi-schema-and-database-basic-functionality/multi-schema-and-database-basic-functionality.ts @@ -11,7 +11,6 @@ import {Person} from "./entity/Person"; import {Question} from "./entity/Question"; import {Answer} from "./entity/Answer"; import {MysqlDriver} from "../../../../src/driver/mysql/MysqlDriver"; -import { AuroraDataApiPostgresDriver } from "../../../../src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; describe("multi-schema-and-database > basic-functionality", () => { @@ -42,7 +41,7 @@ describe("multi-schema-and-database > basic-functionality", () => { .where("post.id = :id", {id: 1}) .getSql(); - if (connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver) + if (connection.driver instanceof PostgresDriver) sql.should.be.equal(`SELECT "post"."id" AS "post_id", "post"."name" AS "post_name" FROM "custom"."post" "post" WHERE "post"."id" = $1`); if (connection.driver instanceof SqlServerDriver) @@ -65,7 +64,7 @@ describe("multi-schema-and-database > basic-functionality", () => { .where("user.id = :id", {id: 1}) .getSql(); - if (connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver) + if (connection.driver instanceof PostgresDriver) sql.should.be.equal(`SELECT "user"."id" AS "user_id", "user"."name" AS "user_name" FROM "userSchema"."user" "user" WHERE "user"."id" = $1`); if (connection.driver instanceof SqlServerDriver) @@ -103,7 +102,7 @@ describe("multi-schema-and-database > basic-functionality", () => { .where("category.id = :id", {id: 1}) .getSql(); - if (connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver) + if (connection.driver instanceof PostgresDriver) sql.should.be.equal(`SELECT "category"."id" AS "category_id", "category"."name" AS "category_name",` + ` "category"."postId" AS "category_postId", "post"."id" AS "post_id", "post"."name" AS "post_name"` + ` FROM "guest"."category" "category" INNER JOIN "custom"."post" "post" ON "post"."id"="category"."postId" WHERE "category"."id" = $1`); @@ -141,7 +140,7 @@ describe("multi-schema-and-database > basic-functionality", () => { (await query.getRawOne())!.should.be.not.empty; - if (connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver) + if (connection.driver instanceof PostgresDriver) query.getSql().should.be.equal(`SELECT * FROM "guest"."category" "category", "userSchema"."user" "user",` + ` "custom"."post" "post" WHERE "category"."id" = $1 AND "post"."id" = "category"."postId"`); diff --git a/test/functional/query-builder/locking/query-builder-locking.ts b/test/functional/query-builder/locking/query-builder-locking.ts index f9fbeb6c14..eefdddfd05 100644 --- a/test/functional/query-builder/locking/query-builder-locking.ts +++ b/test/functional/query-builder/locking/query-builder-locking.ts @@ -18,7 +18,6 @@ import {SqlServerDriver} from "../../../../src/driver/sqlserver/SqlServerDriver" import {AbstractSqliteDriver} from "../../../../src/driver/sqlite-abstract/AbstractSqliteDriver"; import {OracleDriver} from "../../../../src/driver/oracle/OracleDriver"; import {LockNotSupportedOnGivenDriverError} from "../../../../src/error/LockNotSupportedOnGivenDriverError"; -import { AuroraDataApiPostgresDriver } from "../../../../src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; describe("query builder > locking", () => { @@ -133,7 +132,7 @@ describe("query builder > locking", () => { .where("post.id = :id", { id: 1 }) .getSql(); - if (connection.driver instanceof MysqlDriver || connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver || connection.driver instanceof OracleDriver) { + if (connection.driver instanceof MysqlDriver || connection.driver instanceof PostgresDriver || connection.driver instanceof OracleDriver) { expect(sql.indexOf("FOR UPDATE") !== -1).to.be.true; } else if (connection.driver instanceof SqlServerDriver) { diff --git a/test/functional/query-builder/order-by/query-builder-order-by.ts b/test/functional/query-builder/order-by/query-builder-order-by.ts index e6aaac1091..4db3c69a99 100644 --- a/test/functional/query-builder/order-by/query-builder-order-by.ts +++ b/test/functional/query-builder/order-by/query-builder-order-by.ts @@ -5,7 +5,6 @@ import {expect} from "chai"; import {Post} from "./entity/Post"; import {PostgresDriver} from "../../../../src/driver/postgres/PostgresDriver"; import {MysqlDriver} from "../../../../src/driver/mysql/MysqlDriver"; -import { AuroraDataApiPostgresDriver } from "../../../../src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; describe("query builder > order-by", () => { @@ -52,7 +51,7 @@ describe("query builder > order-by", () => { }))); it("should be always in right order(custom order)", () => Promise.all(connections.map(async connection => { - if (!(connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver)) // NULLS FIRST / LAST only supported by postgres + if (!(connection.driver instanceof PostgresDriver)) // NULLS FIRST / LAST only supported by postgres return; const post1 = new Post(); diff --git a/test/functional/query-runner/rename-column.ts b/test/functional/query-runner/rename-column.ts index c59d36edb5..b033a46fb0 100644 --- a/test/functional/query-runner/rename-column.ts +++ b/test/functional/query-runner/rename-column.ts @@ -9,7 +9,6 @@ import {SqlServerDriver} from "../../../src/driver/sqlserver/SqlServerDriver"; import {PostgresDriver} from "../../../src/driver/postgres/PostgresDriver"; import {AbstractSqliteDriver} from "../../../src/driver/sqlite-abstract/AbstractSqliteDriver"; import {MysqlDriver} from "../../../src/driver/mysql/MysqlDriver"; -import { AuroraDataApiPostgresDriver } from "../../../src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; describe("query runner > rename column", () => { @@ -124,7 +123,7 @@ describe("query runner > rename column", () => { await queryRunner.createDatabase("testDB", true); await queryRunner.createSchema("testDB.testSchema", true); - } else if (connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver) { + } else if (connection.driver instanceof PostgresDriver) { questionTableName = "testSchema.question"; categoryTableName = "testSchema.category"; await queryRunner.createSchema("testSchema", true); diff --git a/test/functional/query-runner/rename-table.ts b/test/functional/query-runner/rename-table.ts index 738d731544..15fbd52647 100644 --- a/test/functional/query-runner/rename-table.ts +++ b/test/functional/query-runner/rename-table.ts @@ -8,7 +8,6 @@ import {Table} from "../../../src/schema-builder/table/Table"; import {AbstractSqliteDriver} from "../../../src/driver/sqlite-abstract/AbstractSqliteDriver"; import {PostgresDriver} from "../../../src/driver/postgres/PostgresDriver"; import {MysqlDriver} from "../../../src/driver/mysql/MysqlDriver"; -import { AuroraDataApiPostgresDriver } from "../../../src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; describe("query runner > rename table", () => { @@ -106,7 +105,7 @@ describe("query runner > rename table", () => { await queryRunner.createDatabase("testDB", true); await queryRunner.createSchema("testDB.testSchema", true); - } else if (connection.driver instanceof PostgresDriver || connection.driver instanceof SapDriver || connection.driver instanceof AuroraDataApiPostgresDriver) { + } else if (connection.driver instanceof PostgresDriver || connection.driver instanceof SapDriver) { questionTableName = "testSchema.question"; renamedQuestionTableName = "testSchema.renamedQuestion"; categoryTableName = "testSchema.category"; diff --git a/test/functional/repository/find-options-locking/find-options-locking.ts b/test/functional/repository/find-options-locking/find-options-locking.ts index b60bbedb5b..876c67ff2e 100644 --- a/test/functional/repository/find-options-locking/find-options-locking.ts +++ b/test/functional/repository/find-options-locking/find-options-locking.ts @@ -18,7 +18,6 @@ import {SqlServerDriver} from "../../../../src/driver/sqlserver/SqlServerDriver" import {AbstractSqliteDriver} from "../../../../src/driver/sqlite-abstract/AbstractSqliteDriver"; import {OracleDriver} from "../../../../src/driver/oracle/OracleDriver"; import {LockNotSupportedOnGivenDriverError} from "../../../../src/error/LockNotSupportedOnGivenDriverError"; -import { AuroraDataApiPostgresDriver } from "../../../../src/driver/aurora-data-api-pg/AuroraDataApiPostgresDriver"; describe("repository > find options > locking", () => { @@ -86,7 +85,7 @@ describe("repository > find options > locking", () => { if (connection.driver instanceof MysqlDriver) { expect(executedSql[0].indexOf("LOCK IN SHARE MODE") !== -1).to.be.true; - } else if (connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver) { + } else if (connection.driver instanceof PostgresDriver) { expect(executedSql[0].indexOf("FOR SHARE") !== -1).to.be.true; } else if (connection.driver instanceof OracleDriver) { @@ -116,7 +115,7 @@ describe("repository > find options > locking", () => { .findOne(1, {lock: {mode: "pessimistic_write"}}); }); - if (connection.driver instanceof MysqlDriver || connection.driver instanceof PostgresDriver || connection.driver instanceof AuroraDataApiPostgresDriver || connection.driver instanceof OracleDriver) { + if (connection.driver instanceof MysqlDriver || connection.driver instanceof PostgresDriver || connection.driver instanceof OracleDriver) { expect(executedSql[0].indexOf("FOR UPDATE") !== -1).to.be.true; } else if (connection.driver instanceof SqlServerDriver) {