From 7f07cc6344fc9e6227d192fb6f9e5768291d40cd Mon Sep 17 00:00:00 2001 From: RomaZe Date: Wed, 29 Sep 2021 13:03:20 +0300 Subject: [PATCH 01/53] ADBDEV-2091: Add partitioning support for Sybase --- .../pxf/plugins/jdbc/utils/DbProduct.java | 300 +++++++++--------- 1 file changed, 156 insertions(+), 144 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java index be52b32f03..6bfc354d8f 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java @@ -1,144 +1,156 @@ -package org.greenplum.pxf.plugins.jdbc.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A tool class to change PXF-JDBC plugin behaviour for certain external databases - */ -public enum DbProduct { - MICROSOFT { - @Override - public String wrapDate(Object val) { - return "'" + val + "'"; - } - - @Override - public String buildSessionQuery(String key, String value) { - return String.format("SET %s %s", key, value); - } - }, - - MYSQL { - @Override - public String wrapDate(Object val) { - return "DATE('" + val + "')"; - } - }, - - ORACLE { - @Override - public String wrapDate(Object val) { - return "to_date('" + val + "', 'YYYY-MM-DD')"; - } - - @Override - public String wrapTimestamp(Object val) { - return "to_timestamp('" + val + "', 'YYYY-MM-DD HH24:MI:SS.FF')"; - } - - @Override - public String buildSessionQuery(String key, String value) { - return String.format("ALTER SESSION SET %s = %s", key, value); - } - }, - - POSTGRES { - @Override - public String wrapDate(Object val) { - return "date'" + val + "'"; - } - }, - - S3_SELECT { - @Override - public String wrapDate(Object val) { - return "TO_TIMESTAMP('" + val + "')"; - } - - @Override - public String wrapTimestamp(Object val) { - return "TO_TIMESTAMP('" + val + "')"; - } - }; - - /** - * Wraps a given date value the way required by target database - * - * @param val {@link java.sql.Date} object to wrap - * @return a string with a properly wrapped date object - */ - public abstract String wrapDate(Object val); - - /** - * Wraps a given timestamp value the way required by target database - * - * @param val {@link java.sql.Timestamp} object to wrap - * @return a string with a properly wrapped timestamp object - */ - public String wrapTimestamp(Object val) { - return "'" + val + "'"; - } - - /** - * Build a query to set session-level variables for target database - * - * @param key variable name (key) - * @param value variable value - * @return a string with template SET query - */ - public String buildSessionQuery(String key, String value) { - return String.format("SET %s = %s", key, value); - } - - /** - * Get DbProduct for database by database name - * - * @param dbName database name - * @return a DbProduct of the required class - */ - public static DbProduct getDbProduct(String dbName) { - if (LOG.isDebugEnabled()) { - LOG.debug("Database product name is '" + dbName + "'"); - } - - dbName = dbName.toUpperCase(); - DbProduct result; - if (dbName.contains("MICROSOFT")) - result = DbProduct.MICROSOFT; - else if (dbName.contains("MYSQL")) - result = DbProduct.MYSQL; - else if (dbName.contains("ORACLE")) - result = DbProduct.ORACLE; - else if (dbName.contains("S3 SELECT")) - result = DbProduct.S3_SELECT; - else - result = DbProduct.POSTGRES; - - if (LOG.isDebugEnabled()) { - LOG.debug("DbProduct '" + result + "' is used"); - } - return result; - } - - private static final Logger LOG = LoggerFactory.getLogger(DbProduct.class); -} +package org.greenplum.pxf.plugins.jdbc.utils; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A tool class to change PXF-JDBC plugin behaviour for certain external databases + */ +public enum DbProduct { + MICROSOFT { + @Override + public String wrapDate(Object val) { + return "'" + val + "'"; + } + + @Override + public String buildSessionQuery(String key, String value) { + return String.format("SET %s %s", key, value); + } + }, + + MYSQL { + @Override + public String wrapDate(Object val) { + return "DATE('" + val + "')"; + } + }, + + ORACLE { + @Override + public String wrapDate(Object val) { + return "to_date('" + val + "', 'YYYY-MM-DD')"; + } + + @Override + public String wrapTimestamp(Object val) { + return "to_timestamp('" + val + "', 'YYYY-MM-DD HH24:MI:SS.FF')"; + } + + @Override + public String buildSessionQuery(String key, String value) { + return String.format("ALTER SESSION SET %s = %s", key, value); + } + }, + + POSTGRES { + @Override + public String wrapDate(Object val) { + return "date'" + val + "'"; + } + }, + + S3_SELECT { + @Override + public String wrapDate(Object val) { + return "TO_TIMESTAMP('" + val + "')"; + } + + @Override + public String wrapTimestamp(Object val) { + return "TO_TIMESTAMP('" + val + "')"; + } + }, + + SYBASE { + @Override + public String wrapDate(Object val) { return "'" + val + "'"; } + + @Override + public String buildSessionQuery(String key, String value) { + return String.format("SET %s %s", key, value); + } + }; + + /** + * Wraps a given date value the way required by target database + * + * @param val {@link java.sql.Date} object to wrap + * @return a string with a properly wrapped date object + */ + public abstract String wrapDate(Object val); + + /** + * Wraps a given timestamp value the way required by target database + * + * @param val {@link java.sql.Timestamp} object to wrap + * @return a string with a properly wrapped timestamp object + */ + public String wrapTimestamp(Object val) { + return "'" + val + "'"; + } + + /** + * Build a query to set session-level variables for target database + * + * @param key variable name (key) + * @param value variable value + * @return a string with template SET query + */ + public String buildSessionQuery(String key, String value) { + return String.format("SET %s = %s", key, value); + } + + /** + * Get DbProduct for database by database name + * + * @param dbName database name + * @return a DbProduct of the required class + */ + public static DbProduct getDbProduct(String dbName) { + if (LOG.isDebugEnabled()) { + LOG.debug("Database product name is '" + dbName + "'"); + } + + dbName = dbName.toUpperCase(); + DbProduct result; + if (dbName.contains("MICROSOFT")) + result = DbProduct.MICROSOFT; + else if (dbName.contains("MYSQL")) + result = DbProduct.MYSQL; + else if (dbName.contains("ORACLE")) + result = DbProduct.ORACLE; + else if (dbName.contains("S3 SELECT")) + result = DbProduct.S3_SELECT; + else if (dbName.contains("ADAPTIVE SERVER ENTERPRISE")) + result = DbProduct.SYBASE; + else + result = DbProduct.POSTGRES; + + if (LOG.isDebugEnabled()) { + LOG.debug("DbProduct '" + result + "' is used"); + } + return result; + } + + private static final Logger LOG = LoggerFactory.getLogger(DbProduct.class); +} From 23ee6eb6942d0bda3d5a08fb7c655fdf16d70a7a Mon Sep 17 00:00:00 2001 From: RomaZe Date: Thu, 30 Sep 2021 09:59:14 +0300 Subject: [PATCH 02/53] ADBDEV-2091: Revert the line break back --- .../pxf/plugins/jdbc/utils/DbProduct.java | 312 +++++++++--------- 1 file changed, 156 insertions(+), 156 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java index 6bfc354d8f..7ce8907aed 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java @@ -1,156 +1,156 @@ -package org.greenplum.pxf.plugins.jdbc.utils; - -/* - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -/** - * A tool class to change PXF-JDBC plugin behaviour for certain external databases - */ -public enum DbProduct { - MICROSOFT { - @Override - public String wrapDate(Object val) { - return "'" + val + "'"; - } - - @Override - public String buildSessionQuery(String key, String value) { - return String.format("SET %s %s", key, value); - } - }, - - MYSQL { - @Override - public String wrapDate(Object val) { - return "DATE('" + val + "')"; - } - }, - - ORACLE { - @Override - public String wrapDate(Object val) { - return "to_date('" + val + "', 'YYYY-MM-DD')"; - } - - @Override - public String wrapTimestamp(Object val) { - return "to_timestamp('" + val + "', 'YYYY-MM-DD HH24:MI:SS.FF')"; - } - - @Override - public String buildSessionQuery(String key, String value) { - return String.format("ALTER SESSION SET %s = %s", key, value); - } - }, - - POSTGRES { - @Override - public String wrapDate(Object val) { - return "date'" + val + "'"; - } - }, - - S3_SELECT { - @Override - public String wrapDate(Object val) { - return "TO_TIMESTAMP('" + val + "')"; - } - - @Override - public String wrapTimestamp(Object val) { - return "TO_TIMESTAMP('" + val + "')"; - } - }, - - SYBASE { - @Override - public String wrapDate(Object val) { return "'" + val + "'"; } - - @Override - public String buildSessionQuery(String key, String value) { - return String.format("SET %s %s", key, value); - } - }; - - /** - * Wraps a given date value the way required by target database - * - * @param val {@link java.sql.Date} object to wrap - * @return a string with a properly wrapped date object - */ - public abstract String wrapDate(Object val); - - /** - * Wraps a given timestamp value the way required by target database - * - * @param val {@link java.sql.Timestamp} object to wrap - * @return a string with a properly wrapped timestamp object - */ - public String wrapTimestamp(Object val) { - return "'" + val + "'"; - } - - /** - * Build a query to set session-level variables for target database - * - * @param key variable name (key) - * @param value variable value - * @return a string with template SET query - */ - public String buildSessionQuery(String key, String value) { - return String.format("SET %s = %s", key, value); - } - - /** - * Get DbProduct for database by database name - * - * @param dbName database name - * @return a DbProduct of the required class - */ - public static DbProduct getDbProduct(String dbName) { - if (LOG.isDebugEnabled()) { - LOG.debug("Database product name is '" + dbName + "'"); - } - - dbName = dbName.toUpperCase(); - DbProduct result; - if (dbName.contains("MICROSOFT")) - result = DbProduct.MICROSOFT; - else if (dbName.contains("MYSQL")) - result = DbProduct.MYSQL; - else if (dbName.contains("ORACLE")) - result = DbProduct.ORACLE; - else if (dbName.contains("S3 SELECT")) - result = DbProduct.S3_SELECT; - else if (dbName.contains("ADAPTIVE SERVER ENTERPRISE")) - result = DbProduct.SYBASE; - else - result = DbProduct.POSTGRES; - - if (LOG.isDebugEnabled()) { - LOG.debug("DbProduct '" + result + "' is used"); - } - return result; - } - - private static final Logger LOG = LoggerFactory.getLogger(DbProduct.class); -} +package org.greenplum.pxf.plugins.jdbc.utils; + +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * A tool class to change PXF-JDBC plugin behaviour for certain external databases + */ +public enum DbProduct { + MICROSOFT { + @Override + public String wrapDate(Object val) { + return "'" + val + "'"; + } + + @Override + public String buildSessionQuery(String key, String value) { + return String.format("SET %s %s", key, value); + } + }, + + MYSQL { + @Override + public String wrapDate(Object val) { + return "DATE('" + val + "')"; + } + }, + + ORACLE { + @Override + public String wrapDate(Object val) { + return "to_date('" + val + "', 'YYYY-MM-DD')"; + } + + @Override + public String wrapTimestamp(Object val) { + return "to_timestamp('" + val + "', 'YYYY-MM-DD HH24:MI:SS.FF')"; + } + + @Override + public String buildSessionQuery(String key, String value) { + return String.format("ALTER SESSION SET %s = %s", key, value); + } + }, + + POSTGRES { + @Override + public String wrapDate(Object val) { + return "date'" + val + "'"; + } + }, + + S3_SELECT { + @Override + public String wrapDate(Object val) { + return "TO_TIMESTAMP('" + val + "')"; + } + + @Override + public String wrapTimestamp(Object val) { + return "TO_TIMESTAMP('" + val + "')"; + } + }, + + SYBASE { + @Override + public String wrapDate(Object val) { return "'" + val + "'"; } + + @Override + public String buildSessionQuery(String key, String value) { + return String.format("SET %s %s", key, value); + } + }; + + /** + * Wraps a given date value the way required by target database + * + * @param val {@link java.sql.Date} object to wrap + * @return a string with a properly wrapped date object + */ + public abstract String wrapDate(Object val); + + /** + * Wraps a given timestamp value the way required by target database + * + * @param val {@link java.sql.Timestamp} object to wrap + * @return a string with a properly wrapped timestamp object + */ + public String wrapTimestamp(Object val) { + return "'" + val + "'"; + } + + /** + * Build a query to set session-level variables for target database + * + * @param key variable name (key) + * @param value variable value + * @return a string with template SET query + */ + public String buildSessionQuery(String key, String value) { + return String.format("SET %s = %s", key, value); + } + + /** + * Get DbProduct for database by database name + * + * @param dbName database name + * @return a DbProduct of the required class + */ + public static DbProduct getDbProduct(String dbName) { + if (LOG.isDebugEnabled()) { + LOG.debug("Database product name is '" + dbName + "'"); + } + + dbName = dbName.toUpperCase(); + DbProduct result; + if (dbName.contains("MICROSOFT")) + result = DbProduct.MICROSOFT; + else if (dbName.contains("MYSQL")) + result = DbProduct.MYSQL; + else if (dbName.contains("ORACLE")) + result = DbProduct.ORACLE; + else if (dbName.contains("S3 SELECT")) + result = DbProduct.S3_SELECT; + else if (dbName.contains("ADAPTIVE SERVER ENTERPRISE")) + result = DbProduct.SYBASE; + else + result = DbProduct.POSTGRES; + + if (LOG.isDebugEnabled()) { + LOG.debug("DbProduct '" + result + "' is used"); + } + return result; + } + + private static final Logger LOG = LoggerFactory.getLogger(DbProduct.class); +} From a703dda5f946d2c10e1cd8227cdd9794c810058a Mon Sep 17 00:00:00 2001 From: RomaZe Date: Tue, 9 Nov 2021 20:34:36 +0300 Subject: [PATCH 03/53] AIS-118: Add support for custom hive-jdbc driver --- server/build.gradle | 6 ++++-- server/pxf-jdbc/build.gradle | 1 + 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/server/build.gradle b/server/build.gradle index 3729d14eb1..9aa71a0ce8 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -137,11 +137,13 @@ configure(javaProjects) { entry("hive-metastore") entry("hive-serde") entry("hive-common") + entry("hive-service") + entry("hive-service-rpc") } // 1.2.2 breaks on CDH-5.x - dependencySet(group:"org.apache.hive", version:"1.1.0") { + // We use custom hive-jdbc driver from Arenadata + dependencySet(group:"io.arenadata.hive", version:"2.3.7-arenadata-pxf-1") { entry("hive-jdbc") - entry("hive-service") } dependencySet(group:"org.apache.hive.shims", version:"${hiveVersion}") { entry("hive-shims-common") diff --git a/server/pxf-jdbc/build.gradle b/server/pxf-jdbc/build.gradle index ce44148f2d..50f6a0da05 100644 --- a/server/pxf-jdbc/build.gradle +++ b/server/pxf-jdbc/build.gradle @@ -33,6 +33,7 @@ dependencies { implementation("org.apache.hive:hive-jdbc") { transitive = false } implementation("org.apache.hive:hive-service") { transitive = false } + implementation("org.apache.hive:hive-service-rpc") { transitive = false } implementation("org.apache.thrift:libthrift") { transitive = false } implementation("org.apache.hive:hive-common") { transitive = false } implementation("org.apache.hive.shims:hive-shims-0.23") { transitive = false } From aaac2023b5ff062446e9f0a3ae0cf26bfcb307de Mon Sep 17 00:00:00 2001 From: RomaZe Date: Wed, 10 Nov 2021 23:40:24 +0300 Subject: [PATCH 04/53] AIS-118: Change Avro version to 1.9.2 Our Kafka PXF connector depends on avro-1.9.2. So we need to adjust version for compatibility with the connector. --- server/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/build.gradle b/server/build.gradle index 9aa71a0ce8..02a16d0a0c 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -175,7 +175,7 @@ configure(javaProjects) { } // Avro dependencies - dependencySet(group:"org.apache.avro", version:"1.7.7") { + dependencySet(group:"org.apache.avro", version:"1.9.2") { entry("avro") entry("avro-mapred") } From f5900b5a7fdd44590b9bd3bee35e75edf6a9adfd Mon Sep 17 00:00:00 2001 From: RomaZe Date: Wed, 10 Nov 2021 23:42:47 +0300 Subject: [PATCH 05/53] AIS-118: Change Hive version to 2.3.8 The Hive version 2.3.7 is not compatible with Avro 1.9.2. --- server/gradle.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/gradle.properties b/server/gradle.properties index b5877dae5d..dd93f97eec 100644 --- a/server/gradle.properties +++ b/server/gradle.properties @@ -19,7 +19,7 @@ version=0.0.0-SNAPSHOT apiVersion=0 license=ASL 2.0 hadoopVersion=2.10.1 -hiveVersion=2.3.7 +hiveVersion=2.3.8 hiveStorageApiVersion=2.7.2 hbaseVersion=1.3.2 junitVersion=4.11 From 03dc0a592b4c569a2131bb3fae1e5c2a574068e8 Mon Sep 17 00:00:00 2001 From: RomaZe Date: Thu, 11 Nov 2021 18:30:19 +0300 Subject: [PATCH 06/53] AIS-118: Bump hive-jdbc version to 2.3.8 --- server/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/build.gradle b/server/build.gradle index 02a16d0a0c..42c3d44895 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -142,7 +142,7 @@ configure(javaProjects) { } // 1.2.2 breaks on CDH-5.x // We use custom hive-jdbc driver from Arenadata - dependencySet(group:"io.arenadata.hive", version:"2.3.7-arenadata-pxf-1") { + dependencySet(group:"io.arenadata.hive", version:"2.3.8-arenadata-pxf-1") { entry("hive-jdbc") } dependencySet(group:"org.apache.hive.shims", version:"${hiveVersion}") { From 2a26f6f8a610452b8344589d3cca15b5ed3fc480 Mon Sep 17 00:00:00 2001 From: RomaZe Date: Thu, 11 Nov 2021 19:31:40 +0300 Subject: [PATCH 07/53] AIS-118: Change build.gradle for hive-jdbc --- server/pxf-jdbc/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/pxf-jdbc/build.gradle b/server/pxf-jdbc/build.gradle index 50f6a0da05..2cc9f97206 100644 --- a/server/pxf-jdbc/build.gradle +++ b/server/pxf-jdbc/build.gradle @@ -31,7 +31,7 @@ dependencies { * Transitive Dependencies for JDBC Hive Access *******************************/ - implementation("org.apache.hive:hive-jdbc") { transitive = false } + implementation("io.arenadata.hive:hive-jdbc") { transitive = false } implementation("org.apache.hive:hive-service") { transitive = false } implementation("org.apache.hive:hive-service-rpc") { transitive = false } implementation("org.apache.thrift:libthrift") { transitive = false } From 344b325dc278999ce638e11deb5ff43d8a7249f5 Mon Sep 17 00:00:00 2001 From: RomaZe Date: Thu, 11 Nov 2021 20:35:31 +0300 Subject: [PATCH 08/53] AIS-118: Fix deprecated method in AvroResolverTest --- .../org/greenplum/pxf/plugins/hdfs/AvroResolverTest.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/server/pxf-hdfs/src/test/java/org/greenplum/pxf/plugins/hdfs/AvroResolverTest.java b/server/pxf-hdfs/src/test/java/org/greenplum/pxf/plugins/hdfs/AvroResolverTest.java index 7c59a43ffd..35fb2a6400 100644 --- a/server/pxf-hdfs/src/test/java/org/greenplum/pxf/plugins/hdfs/AvroResolverTest.java +++ b/server/pxf-hdfs/src/test/java/org/greenplum/pxf/plugins/hdfs/AvroResolverTest.java @@ -445,7 +445,7 @@ private Schema getAvroSchemaForComplexTypes() { // add a RECORD with a float, int, and string inside fields.add(new Schema.Field( Schema.Type.RECORD.getName(), - createRecord(new Schema.Type[]{Schema.Type.FLOAT, Schema.Type.INT, Schema.Type.STRING}), + createRecord(schema, new Schema.Type[]{Schema.Type.FLOAT, Schema.Type.INT, Schema.Type.STRING}), "", null) ); @@ -488,12 +488,12 @@ private Schema createEnum(String[] symbols) { return Schema.createEnum("enum", "", null, values); } - private Schema createRecord(Schema.Type[] types) { + private Schema createRecord(Schema schema, Schema.Type[] types) { List fields = new ArrayList<>(); for (Schema.Type type : types) { fields.add(new Schema.Field(type.getName(), Schema.create(type), "", null)); } - return Schema.createRecord(fields); + return Schema.createRecord(schema.getName(), schema.getDoc(), schema.getNamespace(), false, fields); } // we can only support Unions that have 2 elements, and one has to be NULL From 36abf261e377c028bfb52b10b7d9437c6c2fe370 Mon Sep 17 00:00:00 2001 From: RomaZe Date: Fri, 10 Dec 2021 12:54:24 +0300 Subject: [PATCH 09/53] ADBDEV-2349: Fixed pxf-hdfs tests on power --- server/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/build.gradle b/server/build.gradle index 42c3d44895..7b084b8d83 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -105,7 +105,7 @@ configure(javaProjects) { dependency("org.threeten:threeten-extra:1.5.0") dependency("org.tukaani:xz:1.8") dependency("org.wildfly.openssl:wildfly-openssl:1.0.7.Final") - dependency("org.xerial.snappy:snappy-java:1.0.5") + dependency("org.xerial.snappy:snappy-java:1.1.7.5") // Hadoop dependencies dependencySet(group:"org.apache.hadoop", version:"${hadoopVersion}") { From 691c0487bd4a3190e36c9418b5d20a0933a5bbad Mon Sep 17 00:00:00 2001 From: RomaZe Date: Thu, 23 Dec 2021 21:41:21 +0300 Subject: [PATCH 10/53] AIS-146: Bump version for hive-jdbc The previous version of the hive-jdbc depends on the vulnerability version of log4j library. --- server/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/build.gradle b/server/build.gradle index 7b084b8d83..68cc9e2ade 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -142,7 +142,7 @@ configure(javaProjects) { } // 1.2.2 breaks on CDH-5.x // We use custom hive-jdbc driver from Arenadata - dependencySet(group:"io.arenadata.hive", version:"2.3.8-arenadata-pxf-1") { + dependencySet(group:"io.arenadata.hive", version:"2.3.8-arenadata-pxf-3") { entry("hive-jdbc") } dependencySet(group:"org.apache.hive.shims", version:"${hiveVersion}") { From afc408876327f02793f289d95fecbce6e0cbccce Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Thu, 17 Mar 2022 00:16:06 +0300 Subject: [PATCH 11/53] AIS-149: Add CONVERT_ORACLE_DATE parameter --- .../pxf/plugins/jdbc/JdbcAccessor.java | 5 +++ .../pxf/plugins/jdbc/JdbcBasePlugin.java | 9 ++++ .../plugins/jdbc/JdbcPredicateBuilder.java | 17 ++++++- .../pxf/plugins/jdbc/SQLQueryBuilder.java | 8 +++- .../pxf/plugins/jdbc/utils/DbProduct.java | 44 +++++++++++++++++++ .../pxf/plugins/jdbc/utils/DbProductTest.java | 11 +++++ 6 files changed, 92 insertions(+), 2 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessor.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessor.java index ffbe7ece48..78415885b0 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessor.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessor.java @@ -107,6 +107,11 @@ public boolean openForRead() throws SQLException, SQLTimeoutException { } else if (quoteColumns) { sqlQueryBuilder.forceSetQuoteString(); } + + if (wrapDateWithTime) { + sqlQueryBuilder.setWrapDateWithTime(true); + } + // Read variables String queryRead = sqlQueryBuilder.buildSelectQuery(); LOG.trace("Select query: {}", queryRead); diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java index 3197b8bedd..30a5fae450 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java @@ -134,6 +134,9 @@ public static TransactionIsolation typeOf(String str) { // Query timeout. protected Integer queryTimeout; + // Convert Postgres timestamp to Oracle date with time + protected boolean wrapDateWithTime = false; + // Quote columns setting set by user (three values are possible) protected Boolean quoteColumns = null; @@ -250,6 +253,12 @@ public void afterPropertiesSet() { } } + // Optional parameter. The default value is false + String wrapDateWithTimeRaw = context.getOption("CONVERT_ORACLE_DATE"); + if (wrapDateWithTimeRaw != null) { + wrapDateWithTime = Boolean.parseBoolean(wrapDateWithTimeRaw); + } + // Optional parameter. The default value is null String quoteColumnsRaw = context.getOption("QUOTE_COLUMNS"); if (quoteColumnsRaw != null) { diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcPredicateBuilder.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcPredicateBuilder.java index c5c9f4cc0a..64be4ceb42 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcPredicateBuilder.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcPredicateBuilder.java @@ -39,6 +39,7 @@ public class JdbcPredicateBuilder extends ColumnPredicateBuilder { private final DbProduct dbProduct; + private boolean wrapDateWithTime = false; public JdbcPredicateBuilder(DbProduct dbProduct, List tupleDescription) { @@ -52,6 +53,15 @@ public JdbcPredicateBuilder(DbProduct dbProduct, this.dbProduct = dbProduct; } + public JdbcPredicateBuilder(DbProduct dbProduct, + String quoteString, + List tupleDescription, + boolean wrapDateWithTime) { + super(quoteString, tupleDescription); + this.dbProduct = dbProduct; + this.wrapDateWithTime = wrapDateWithTime; + } + @Override public String toString() { StringBuilder sb = getStringBuilder(); @@ -79,7 +89,12 @@ protected String serializeValue(DataType type, String value) { return dbProduct.wrapDate(value); case TIMESTAMP: // Timestamp field has different format in different databases - return dbProduct.wrapTimestamp(value); + // If wrapDateWithTime = true we have to convert timestamp to Oracle `date with time` + if (wrapDateWithTime) { + return dbProduct.wrapDateWithTime(value); + } else { + return dbProduct.wrapTimestamp(value); + } default: throw new UnsupportedOperationException(String.format( "Unsupported column type for filtering '%s' ", type.getOID())); diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilder.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilder.java index 1960e23563..93e23042ac 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilder.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilder.java @@ -77,6 +77,7 @@ public class SQLQueryBuilder { private final List columns; private final String source; private String quoteString; + private boolean wrapDateWithTime = false; private boolean subQueryUsed = false; /** @@ -122,6 +123,10 @@ public SQLQueryBuilder(RequestContext context, DatabaseMetaData metaData, String quoteString = ""; } + public void setWrapDateWithTime(boolean wrapDateWithTime) { + this.wrapDateWithTime = wrapDateWithTime; + } + /** * Build SELECT query (with "WHERE" and partition constraints). * @@ -268,7 +273,8 @@ protected JdbcPredicateBuilder getPredicateBuilder() { return new JdbcPredicateBuilder( dbProduct, quoteString, - context.getTupleDescription()); + context.getTupleDescription(), + wrapDateWithTime); } /** diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java index 7ce8907aed..52d35c1e21 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java @@ -32,6 +32,11 @@ public String wrapDate(Object val) { return "'" + val + "'"; } + @Override + public String wrapDateWithTime(Object val) { + return wrapTimestamp(val); + } + @Override public String buildSessionQuery(String key, String value) { return String.format("SET %s %s", key, value); @@ -43,6 +48,11 @@ public String buildSessionQuery(String key, String value) { public String wrapDate(Object val) { return "DATE('" + val + "')"; } + + @Override + public String wrapDateWithTime(Object val) { + return wrapTimestamp(val); + } }, ORACLE { @@ -51,6 +61,16 @@ public String wrapDate(Object val) { return "to_date('" + val + "', 'YYYY-MM-DD')"; } + @Override + public String wrapDateWithTime(Object val) { + String valStr = String.valueOf(val); + int index = valStr.lastIndexOf('.'); + if (index != -1) { + valStr = valStr.substring(0, index); + } + return "to_date('" + valStr + "', 'YYYY-MM-DD HH24:MI:SS')"; + } + @Override public String wrapTimestamp(Object val) { return "to_timestamp('" + val + "', 'YYYY-MM-DD HH24:MI:SS.FF')"; @@ -67,6 +87,11 @@ public String buildSessionQuery(String key, String value) { public String wrapDate(Object val) { return "date'" + val + "'"; } + + @Override + public String wrapDateWithTime(Object val) { + return wrapTimestamp(val); + } }, S3_SELECT { @@ -75,6 +100,11 @@ public String wrapDate(Object val) { return "TO_TIMESTAMP('" + val + "')"; } + @Override + public String wrapDateWithTime(Object val) { + return wrapTimestamp(val); + } + @Override public String wrapTimestamp(Object val) { return "TO_TIMESTAMP('" + val + "')"; @@ -85,6 +115,11 @@ public String wrapTimestamp(Object val) { @Override public String wrapDate(Object val) { return "'" + val + "'"; } + @Override + public String wrapDateWithTime(Object val) { + return wrapTimestamp(val); + } + @Override public String buildSessionQuery(String key, String value) { return String.format("SET %s %s", key, value); @@ -99,6 +134,15 @@ public String buildSessionQuery(String key, String value) { */ public abstract String wrapDate(Object val); + /** + * Wraps a given date value to the date with time. + * It might be used in some special cases. + * + * @param val {@link java.sql.Date} object to wrap + * @return a string with a properly wrapped date object + */ + public abstract String wrapDateWithTime(Object val); + /** * Wraps a given timestamp value the way required by target database * diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/DbProductTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/DbProductTest.java index 517cfac3df..cf859cfecf 100644 --- a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/DbProductTest.java +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/DbProductTest.java @@ -96,6 +96,17 @@ public void testOracleDates() { } } + @Test + public void testOracleDatesWithTime() { + final String[] expected = {"to_date('2001-01-01 00:00:00', 'YYYY-MM-DD HH24:MI:SS')"}; + + DbProduct dbProduct = DbProduct.getDbProduct(DB_NAME_ORACLE); + + for (int i = 0; i < TIMESTAMPS.length; i++) { + assertEquals(expected[i], dbProduct.wrapDateWithTime(TIMESTAMPS[i])); + } + } + @Test public void testOracleTimestamps() { final String[] expected = {"to_timestamp('2001-01-01 00:00:00.0', 'YYYY-MM-DD HH24:MI:SS.FF')"}; From fd098b838fbfa8a2792326657bc53bbab6ccf520 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Thu, 17 Mar 2022 12:46:08 +0300 Subject: [PATCH 12/53] AIS-149: Update README.md --- server/pxf-jdbc/README.md | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/server/pxf-jdbc/README.md b/server/pxf-jdbc/README.md index e6ab2e2d0e..e5e96b976e 100644 --- a/server/pxf-jdbc/README.md +++ b/server/pxf-jdbc/README.md @@ -124,6 +124,35 @@ Whether PXF should quote column names when constructing SQL query to the externa When this setting is not set, PXF automatically checks whether some column name should be quoted, and if so, it quotes all column names in the query. +#### Convert to Oracle date type +*Can be set only in `LOCATION` clause of external table DDL. +It is used by only PXF Oracle JDBC driver for pushdown.* + +The parameter is used for some specific cases when you need to convert Postgres `timestamp` type to `date` type in Oracle for pushdown filter. + +* **Option**: `CONVERT_ORACLE_DATE` +* **Value**: + * not set — default value is `false`. Postgres `timestamp` type will be converted to Oracle `timestamp` type (default behaviour) + * `true` (case-insensitive) — convert Postgres `timestamp` type to Oracle `date` type + * any other value or `false` — Postgres `timestamp` type will be converted to Oracle `timestamp` type (default behaviour) + +If a field is `timestamp` type in the external GP table and `CONVERT_ORACLE_DATE=true` the fields that are used in the `where` filter will be cast to `date` type in Oracle. +The milliseconds will be truncated. Example of the query where c3 field has `timestamp` type in the GP and `date` type in the Oracle: +``` +query in gp: SELECT c1, c2, c3 FROM ext_oracle_datetime_fix WHERE c3 >= '2022-01-01 14:00:00.123456' and c3 < '2022-01-02 03:00:00.232323'; +recieved query in oracle: SELECT c1, c2, c3 FROM system.tst_pxf_datetime WHERE (c3 >= to_date('2022-01-01 14:00:00', 'YYYY-MM-DD HH24:MI:SS') AND c3 < to_date('2022-01-02 03:00:00', 'YYYY-MM-DD HH24:MI:SS')) +``` + +If the parameter `CONVERT_ORACLE_DATE=false` or it is not declared in the `LOCATION` the c3 field will be converted to `timestamp` type in Oracle (default behaviour): + +``` +query in gp: SELECT c1, c2, c3 FROM ext_oracle_datetime_fix where c3 >= '2022-01-01 12:01:00' and c3 < '2022-01-02 02:01:00'; +recieved query in oracle: SELECT c1, c2, c3 FROM system.tst_pxf_datetime WHERE (c3 >= to_timestamp('2022-01-01 12:01:00', 'YYYY-MM-DD HH24:MI:SS.FF') AND c3 < to_timestamp('2022-01-02 02:01:00', 'YYYY-MM-DD HH24:MI:SS.FF')) +``` + +**Notes:** +The parameter `CONVERT_ORACLE_DATE` has impact only on the fields that are used in the `where` filter and does not apply for the other fields with `timestamp` type in the table. + #### Partition by *Can be set only in `LOCATION` clause of external table DDL* From 53f3dfde68ec2217dd28e0338d32a21afbd07ac5 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Tue, 22 Mar 2022 09:43:35 +0300 Subject: [PATCH 13/53] Update AvroResolverTest --- .../java/org/greenplum/pxf/plugins/hdfs/AvroResolverTest.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/pxf-hdfs/src/test/java/org/greenplum/pxf/plugins/hdfs/AvroResolverTest.java b/server/pxf-hdfs/src/test/java/org/greenplum/pxf/plugins/hdfs/AvroResolverTest.java index 78be592d73..6e9c327755 100644 --- a/server/pxf-hdfs/src/test/java/org/greenplum/pxf/plugins/hdfs/AvroResolverTest.java +++ b/server/pxf-hdfs/src/test/java/org/greenplum/pxf/plugins/hdfs/AvroResolverTest.java @@ -593,7 +593,7 @@ private Schema getAvroSchemaForComplexTypes() { // add a RECORD with a float, int, and string inside fields.add(new Schema.Field( Schema.Type.RECORD.getName(), - createRecord(schema, new Schema.Type[]{Schema.Type.FLOAT, Schema.Type.INT, Schema.Type.STRING}), + createRecord(new Schema.Type[]{Schema.Type.FLOAT, Schema.Type.INT, Schema.Type.STRING}), "", null) ); @@ -738,7 +738,7 @@ private Schema createEnum(String[] symbols) { return Schema.createEnum("enum", "", null, values); } - private Schema createRecord(Schema schema, Schema.Type[] types) { + private Schema createRecord(Schema.Type[] types) { List fields = new ArrayList<>(); for (Schema.Type type : types) { fields.add(new Schema.Field(type.getName(), Schema.create(type), "", null)); From 902499c109a092bbfb3ecdb358dec4966a67303b Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Fri, 19 Aug 2022 11:10:43 +0300 Subject: [PATCH 14/53] Remove unnecessary dependency for log4j See description of the commit 8f9c137e --- server/build.gradle | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/server/build.gradle b/server/build.gradle index 2c01b31fd0..8fa9ec8aee 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -94,18 +94,6 @@ configure(javaProjects) { dependency("org.apache.htrace:htrace-core:3.1.0-incubating") dependency("org.apache.htrace:htrace-core4:4.0.1-incubating") - // --- bump log4j2 to 2.17.1 for CVE-2021-44228, CVE-2021-45046, and CVE-2021-45105 fixes, - // more details: https://logging.apache.org/log4j/2.x/security.html - // revert once org.springframework.boot:spring-boot-starter-log4j2 is upgraded to bundle log4j2:2.17.1+ - dependencySet(group:"org.apache.logging.log4j", version:"2.17.1") { - entry("log4j-jul") - entry("log4j-api") - entry("log4j-core") - entry("log4j-spring-boot") - } - dependency("org.apache.logging.log4j:log4j-slf4j-impl:2.17.1") - // --- end of CVE patch - dependency("org.apache.zookeeper:zookeeper:3.4.6") dependency("org.codehaus.woodstox:stax2-api:3.1.4") dependency("org.datanucleus:datanucleus-api-jdo:4.2.4") From 49fb1161b3d3f6f1817e265b7417a4a9a1492089 Mon Sep 17 00:00:00 2001 From: dimoffon Date: Wed, 21 Sep 2022 15:41:06 +0300 Subject: [PATCH 15/53] ADBDEV-2991 added logging of query filter --- .../pxf/plugins/jdbc/SQLQueryBuilder.java | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilder.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilder.java index 93e23042ac..453b9077ff 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilder.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilder.java @@ -144,7 +144,9 @@ public String buildSelectQuery() { // Insert partition constraints buildFragmenterSql(context, dbProduct, quoteString, sb); - return sb.toString(); + String query = sb.toString(); + LOG.debug("buildSelectQuery: {}", query); + return query; } /** @@ -293,18 +295,24 @@ protected TreeVisitor getPruner() { * @param query SQL query to insert constraints to. The query may may contain other WHERE statements */ private void buildWhereSQL(StringBuilder query) { - if (!context.hasFilter()) return; + if (!context.hasFilter()) { + LOG.debug("FILTER empty"); + return; + } JdbcPredicateBuilder jdbcPredicateBuilder = getPredicateBuilder(); try { // Parse the filter string into a expression tree Node Node root = new FilterParser().parse(context.getFilterString()); + LOG.debug("FILTER source: {}", context.getFilterString()); // Prune the parsed tree with the provided pruner and then // traverse the tree with the JDBC predicate builder to produce a predicate TRAVERSER.traverse(root, getPruner(), jdbcPredicateBuilder); // No exceptions were thrown, change the provided query - query.append(jdbcPredicateBuilder.toString()); + String where = jdbcPredicateBuilder.toString(); + LOG.debug("FILTER target: {}", where); + query.append(where); } catch (Exception e) { LOG.debug("WHERE clause is omitted: " + e.toString()); // Silence the exception and do not insert constraints From 87158f07100932db4328b601d21a07084b9ce3e7 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Mon, 3 Oct 2022 18:05:09 +0300 Subject: [PATCH 16/53] ADBDEV-3096: Allow set Oracle parallel instructions --- .../pxf/plugins/jdbc/utils/DbProduct.java | 3 +- .../jdbc/utils/oracle/OracleJdbcUtils.java | 9 ++ .../oracle/OracleParallelSessionParam.java | 55 +++++++++++ .../OracleParallelSessionParamFactory.java | 94 +++++++++++++++++++ .../oracle/OracleSessionQueryFactory.java | 30 ++++++ 5 files changed, 190 insertions(+), 1 deletion(-) create mode 100644 server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleJdbcUtils.java create mode 100644 server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParam.java create mode 100644 server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java create mode 100644 server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactory.java diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java index 52d35c1e21..7ab1d55914 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/DbProduct.java @@ -19,6 +19,7 @@ * under the License. */ +import org.greenplum.pxf.plugins.jdbc.utils.oracle.OracleJdbcUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -78,7 +79,7 @@ public String wrapTimestamp(Object val) { @Override public String buildSessionQuery(String key, String value) { - return String.format("ALTER SESSION SET %s = %s", key, value); + return OracleJdbcUtils.buildSessionQuery(key, value); } }, diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleJdbcUtils.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleJdbcUtils.java new file mode 100644 index 0000000000..0e3b2fecb0 --- /dev/null +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleJdbcUtils.java @@ -0,0 +1,9 @@ +package org.greenplum.pxf.plugins.jdbc.utils.oracle; + +public class OracleJdbcUtils { + private static final OracleSessionQueryFactory sessionQueryFactory = new OracleSessionQueryFactory(); + + public static String buildSessionQuery(String property, String value) { + return sessionQueryFactory.create(property, value); + } +} diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParam.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParam.java new file mode 100644 index 0000000000..b07d64654d --- /dev/null +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParam.java @@ -0,0 +1,55 @@ +package org.greenplum.pxf.plugins.jdbc.utils.oracle; + +public class OracleParallelSessionParam { + private Clause clause; + private StatementType statementType; + private String degreeOfParallelism; + + public void setClause(Clause clause) { + this.clause = clause; + } + + public void setStatementType(StatementType statementType) { + this.statementType = statementType; + } + + public void setDegreeOfParallelism(String degreeOfParallelism) { + this.degreeOfParallelism = degreeOfParallelism; + } + + public Clause getClause() { + return clause; + } + + public StatementType getStatementType() { + return statementType; + } + + public String getDegreeOfParallelism() { + return degreeOfParallelism; + } + + public enum Clause { + ENABLE("ENABLE"), + DISABLE("DISABLE"), + FORCE("FORCE"); + + public final String value; + + Clause(String value) { + this.value = value; + } + } + + public enum StatementType { + DML("DML"), + DDL("DDL"), + QUERY("QUERY"); + + public final String value; + + StatementType(String value) { + this.value = value; + } + } +} diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java new file mode 100644 index 0000000000..414fc21868 --- /dev/null +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java @@ -0,0 +1,94 @@ +package org.greenplum.pxf.plugins.jdbc.utils.oracle; + +import org.apache.commons.lang.StringUtils; +import org.apache.logging.log4j.util.Strings; + +import java.util.Arrays; +import java.util.HashMap; + +public class OracleParallelSessionParamFactory { + private static final String ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER = "\\."; + + public OracleParallelSessionParam create(String property, String value) { + validateValue(property, value); + + HashMap map = getParallelSessionParam(value); + String clause = map.get("clause").toUpperCase(); + String statementType = map.get("statement_type").toUpperCase(); + String degreeOfParallelism = map.get("degree_of_parallelism"); + + validateParams(clause, statementType, degreeOfParallelism, property); + + OracleParallelSessionParam param = new OracleParallelSessionParam(); + param.setClause(OracleParallelSessionParam.Clause.valueOf(clause)); + param.setStatementType(OracleParallelSessionParam.StatementType.valueOf(statementType)); + param.setDegreeOfParallelism(degreeOfParallelism); + return param; + } + + private void validateValue(String property, String value) { + if (StringUtils.isBlank(value)) { + throw new IllegalArgumentException(String.format( + "Parameter %s is empty in jdbc-site.xml", property) + ); + } + if (value.split(ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER).length < 2 + || value.split(ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER).length > 3) { + throw new IllegalArgumentException(String.format( + "Parameter %s in jdbc-site.xml has to contain at least 2 but not more then 3 values delimited by %s", + property, ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER) + ); + } + } + + private void validateParams(String clause, String statementType, String degreeOfParallelism, String property) { + if (!isClauseValid(clause)) { + throw new IllegalArgumentException(String.format( + "The 'clause' value %s in the parameter %s is not valid", clause, property) + ); + } + if (!isStatementTypeValid(statementType)) { + throw new IllegalArgumentException(String.format( + "The 'statement type' value %s in the parameter %s is not valid", statementType, property) + ); + } + if (!isDegreeOfParallelismValid(degreeOfParallelism)) { + throw new IllegalArgumentException(String.format( + "The 'degree of parallelism' value %s in the parameter %s is not valid", degreeOfParallelism, property) + ); + } + } + + private HashMap getParallelSessionParam(String value) { + HashMap params = new HashMap<>(); + String[] values = value.split(ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER); + params.put("clause", values[0]); + params.put("statement_type", values[1]); + if (values.length == 3 && Strings.isNotBlank(values[2])) { + params.put("degree_of_parallelism", values[2]); + } else { + params.put("degree_of_parallelism", ""); + } + return params; + } + + private boolean isClauseValid(String value) { + return Arrays.stream(OracleParallelSessionParam.Clause.values()).anyMatch(e -> e.value.equalsIgnoreCase(value)); + } + + private boolean isStatementTypeValid(String value) { + return Arrays.stream(OracleParallelSessionParam.StatementType.values()).anyMatch(e -> e.value.equalsIgnoreCase(value)); + } + + private boolean isDegreeOfParallelismValid(String value) { + if (Strings.isNotEmpty(value)) { + try { + Integer.parseInt(value); + return true; + } catch (NumberFormatException e) { + return false; + } + } + return true; + } +} diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactory.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactory.java new file mode 100644 index 0000000000..1ed41d793a --- /dev/null +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactory.java @@ -0,0 +1,30 @@ +package org.greenplum.pxf.plugins.jdbc.utils.oracle; + +import org.apache.logging.log4j.util.Strings; + +public class OracleSessionQueryFactory { + private static final String ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_PREFIX = "alter_session_parallel"; + private final OracleParallelSessionParamFactory oracleSessionParamFactory = new OracleParallelSessionParamFactory(); + + public String create(String property, String value) { + if (property.contains(ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_PREFIX)) { + return getParallelSessionCommand(property, value); + } + return String.format("ALTER SESSION SET %s = %s", property, value); + } + + private String getParallelSessionCommand(String property, String value) { + OracleParallelSessionParam param = oracleSessionParamFactory.create(property, value); + return createParallelSessionCommand(param); + } + + private String createParallelSessionCommand(OracleParallelSessionParam param) { + if (Strings.isNotEmpty(param.getDegreeOfParallelism()) + && param.getClause() == OracleParallelSessionParam.Clause.FORCE) { + return String.format("ALTER SESSION %s PARALLEL %s PARALLEL %s", + param.getClause(), param.getStatementType(), param.getDegreeOfParallelism()); + } else { + return String.format("ALTER SESSION %s PARALLEL %s", param.getClause(), param.getStatementType()); + } + } +} From 7579b58e2ae0039e6ff2698224869a24b4ce9d63 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Tue, 4 Oct 2022 18:03:25 +0300 Subject: [PATCH 17/53] ADBDEV-3096: Allow set Oracle parallel instructions --- .../oracle/OracleParallelSessionParam.java | 48 +++++++++---------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParam.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParam.java index b07d64654d..c32376a705 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParam.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParam.java @@ -5,30 +5,6 @@ public class OracleParallelSessionParam { private StatementType statementType; private String degreeOfParallelism; - public void setClause(Clause clause) { - this.clause = clause; - } - - public void setStatementType(StatementType statementType) { - this.statementType = statementType; - } - - public void setDegreeOfParallelism(String degreeOfParallelism) { - this.degreeOfParallelism = degreeOfParallelism; - } - - public Clause getClause() { - return clause; - } - - public StatementType getStatementType() { - return statementType; - } - - public String getDegreeOfParallelism() { - return degreeOfParallelism; - } - public enum Clause { ENABLE("ENABLE"), DISABLE("DISABLE"), @@ -52,4 +28,28 @@ public enum StatementType { this.value = value; } } + + public void setClause(Clause clause) { + this.clause = clause; + } + + public void setStatementType(StatementType statementType) { + this.statementType = statementType; + } + + public void setDegreeOfParallelism(String degreeOfParallelism) { + this.degreeOfParallelism = degreeOfParallelism; + } + + public Clause getClause() { + return clause; + } + + public StatementType getStatementType() { + return statementType; + } + + public String getDegreeOfParallelism() { + return degreeOfParallelism; + } } From 2cd30ffb3b84fa452235a902649c4eff0aa24514 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Tue, 4 Oct 2022 19:22:12 +0300 Subject: [PATCH 18/53] ADBDEV-3096: Add unit tests for parallel session params --- server/pxf-jdbc/build.gradle | 1 + ...OracleParallelSessionParamFactoryTest.java | 103 ++++++++++++++++ .../oracle/OracleSessionQueryFactoryTest.java | 111 ++++++++++++++++++ 3 files changed, 215 insertions(+) create mode 100644 server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java create mode 100644 server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactoryTest.java diff --git a/server/pxf-jdbc/build.gradle b/server/pxf-jdbc/build.gradle index 2cc9f97206..0652609128 100644 --- a/server/pxf-jdbc/build.gradle +++ b/server/pxf-jdbc/build.gradle @@ -46,6 +46,7 @@ dependencies { testImplementation("org.apache.parquet:parquet-pig") testImplementation('org.springframework.boot:spring-boot-starter-test') + testImplementation('org.mockito:mockito-inline') } test { diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java new file mode 100644 index 0000000000..2143991fec --- /dev/null +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java @@ -0,0 +1,103 @@ +package org.greenplum.pxf.plugins.jdbc.utils.oracle; + +import org.junit.jupiter.api.Test; + +import static org.junit.jupiter.api.Assertions.*; + +class OracleParallelSessionParamFactoryTest { + + private final OracleParallelSessionParamFactory oracleParallelSessionParamFactory = new OracleParallelSessionParamFactory(); + private final String property = "jdbc.session.property.alter_session_parallel.1"; + + @Test + void createWithClauseAndStatementAndDegreeOfParallelismSuccess() { + String value = "force.query.5"; + OracleParallelSessionParam param = oracleParallelSessionParamFactory.create(property, value); + assertEquals(param.getClause(), OracleParallelSessionParam.Clause.FORCE); + assertEquals(param.getStatementType(), OracleParallelSessionParam.StatementType.QUERY); + assertEquals(param.getDegreeOfParallelism(), "5"); + } + + @Test + void createWithClauseAndStatementSuccess() { + String value = "enable.ddl"; + OracleParallelSessionParam param = oracleParallelSessionParamFactory.create(property, value); + assertEquals(param.getClause(), OracleParallelSessionParam.Clause.ENABLE); + assertEquals(param.getStatementType(), OracleParallelSessionParam.StatementType.DDL); + assertEquals(param.getDegreeOfParallelism(), ""); + } + + @Test + void createWithClauseAndStatementAndBlankDegreeOfParallelismSuccess() { + String value = "disable.dml. "; + String property = "jdbc.session.property.alter_session_parallel.1"; + OracleParallelSessionParam param = oracleParallelSessionParamFactory.create(property, value); + assertEquals(param.getClause(), OracleParallelSessionParam.Clause.DISABLE); + assertEquals(param.getStatementType(), OracleParallelSessionParam.StatementType.DML); + assertEquals(param.getDegreeOfParallelism(), ""); + } + + @Test + void createWithEmptyValue() { + String value = "enable.dml."; + String property = "jdbc.session.property.alter_session_parallel.1"; + OracleParallelSessionParam param = oracleParallelSessionParamFactory.create(property, value); + assertEquals(param.getClause(), OracleParallelSessionParam.Clause.ENABLE); + assertEquals(param.getStatementType(), OracleParallelSessionParam.StatementType.DML); + assertEquals(param.getDegreeOfParallelism(), ""); + } + + @Test + void createWithWrongClause() { + String value = "fake_force.query.5"; + Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); + String expectedMessage = "The 'clause' value FAKE_FORCE in the parameter jdbc.session.property.alter_session_parallel.1 is not valid"; + String actualMessage = exception.getMessage(); + assertEquals(actualMessage, expectedMessage); + } + + @Test + void createWithWrongStatement() { + String value = "enable.fake_statement"; + Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); + String expectedMessage = "The 'statement type' value FAKE_STATEMENT in the parameter jdbc.session.property.alter_session_parallel.1 is not valid"; + String actualMessage = exception.getMessage(); + assertEquals(expectedMessage, actualMessage); + } + + @Test + void createWithWrongDegreeOfParallelism() { + String value = "force.dml.fake_number"; + Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); + String expectedMessage = "The 'degree of parallelism' value fake_number in the parameter jdbc.session.property.alter_session_parallel.1 is not valid"; + String actualMessage = exception.getMessage(); + assertEquals(expectedMessage, actualMessage); + } + + @Test + void createWithWrongValueMoreThen3() { + String value = "force.dml.number.70"; + Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); + String expectedMessage = "Parameter jdbc.session.property.alter_session_parallel.1 in jdbc-site.xml has to contain at least 2 but not more then 3 values delimited by \\."; + String actualMessage = exception.getMessage(); + assertEquals(expectedMessage, actualMessage); + } + + @Test + void createWithWrongValueLessThen2() { + String value = "force"; + Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); + String expectedMessage = "Parameter jdbc.session.property.alter_session_parallel.1 in jdbc-site.xml has to contain at least 2 but not more then 3 values delimited by \\."; + String actualMessage = exception.getMessage(); + assertEquals(expectedMessage, actualMessage); + } + + @Test + void createWithBlankValue() { + String value = " "; + Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); + String expectedMessage = "Parameter jdbc.session.property.alter_session_parallel.1 is empty in jdbc-site.xml"; + String actualMessage = exception.getMessage(); + assertEquals(expectedMessage, actualMessage); + } +} \ No newline at end of file diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactoryTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactoryTest.java new file mode 100644 index 0000000000..cda2451d40 --- /dev/null +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactoryTest.java @@ -0,0 +1,111 @@ +package org.greenplum.pxf.plugins.jdbc.utils.oracle; + +import org.junit.jupiter.api.Test; +import org.mockito.MockedConstruction; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.mockConstruction; +import static org.mockito.Mockito.when; + +class OracleSessionQueryFactoryTest { + + @Test + @SuppressWarnings("try") + void createParallelSessionQueryWithForceAndDegreeOfParallelism() { + String property = "jdbc.session.property.alter_session_parallel.1"; + String value = "force.query.4"; + String expectedResult = "ALTER SESSION FORCE PARALLEL QUERY PARALLEL 4"; + + OracleParallelSessionParam param = new OracleParallelSessionParam(); + param.setClause(OracleParallelSessionParam.Clause.FORCE); + param.setStatementType(OracleParallelSessionParam.StatementType.QUERY); + param.setDegreeOfParallelism("4"); + + try (MockedConstruction mocked = mockConstruction(OracleParallelSessionParamFactory.class, + (mock, context) -> when(mock.create(property, value)).thenReturn(param))) { + OracleSessionQueryFactory oracleSessionQueryFactory = new OracleSessionQueryFactory(); + String result = oracleSessionQueryFactory.create(property, value); + assertEquals(expectedResult, result); + } + } + + @Test + @SuppressWarnings("try") + void createParallelSessionQueryWithForce() { + String property = "jdbc.session.property.alter_session_parallel.1"; + String value = "force.dml"; + String expectedResult = "ALTER SESSION FORCE PARALLEL DML"; + + OracleParallelSessionParam param = new OracleParallelSessionParam(); + param.setClause(OracleParallelSessionParam.Clause.FORCE); + param.setStatementType(OracleParallelSessionParam.StatementType.DML); + param.setDegreeOfParallelism(""); + + try (MockedConstruction mocked = mockConstruction(OracleParallelSessionParamFactory.class, + (mock, context) -> when(mock.create(property, value)).thenReturn(param))) { + OracleSessionQueryFactory oracleSessionQueryFactory = new OracleSessionQueryFactory(); + String result = oracleSessionQueryFactory.create(property, value); + assertEquals(expectedResult, result); + } + } + + @Test + @SuppressWarnings("try") + void createParallelSessionQueryWithEnable() { + String property = "jdbc.session.property.alter_session_parallel.1"; + String value = "enable.dml.2"; + String expectedResult = "ALTER SESSION ENABLE PARALLEL DDL"; + + OracleParallelSessionParam param = new OracleParallelSessionParam(); + param.setClause(OracleParallelSessionParam.Clause.ENABLE); + param.setStatementType(OracleParallelSessionParam.StatementType.DDL); + param.setDegreeOfParallelism("2"); + + try (MockedConstruction mocked = mockConstruction(OracleParallelSessionParamFactory.class, + (mock, context) -> when(mock.create(property, value)).thenReturn(param))) { + OracleSessionQueryFactory oracleSessionQueryFactory = new OracleSessionQueryFactory(); + String result = oracleSessionQueryFactory.create(property, value); + assertEquals(expectedResult, result); + } + } + + @Test + @SuppressWarnings("try") + void createParallelSessionQueryWithDisable() { + String property = "jdbc.session.property.alter_session_parallel.1"; + String value = "disable.dml"; + String expectedResult = "ALTER SESSION DISABLE PARALLEL DML"; + + OracleParallelSessionParam param = new OracleParallelSessionParam(); + param.setClause(OracleParallelSessionParam.Clause.DISABLE); + param.setStatementType(OracleParallelSessionParam.StatementType.DML); + param.setDegreeOfParallelism(""); + + try (MockedConstruction mocked = mockConstruction(OracleParallelSessionParamFactory.class, + (mock, context) -> when(mock.create(property, value)).thenReturn(param))) { + OracleSessionQueryFactory oracleSessionQueryFactory = new OracleSessionQueryFactory(); + String result = oracleSessionQueryFactory.create(property, value); + assertEquals(expectedResult, result); + } + } + + @Test + @SuppressWarnings("try") + void createNotParallelSessionQuery() { + String property = "STATISTICS_LEVEL"; + String value = "TYPICAL"; + String expectedResult = "ALTER SESSION SET STATISTICS_LEVEL = TYPICAL"; + + OracleParallelSessionParam param = new OracleParallelSessionParam(); + param.setClause(OracleParallelSessionParam.Clause.ENABLE); + param.setStatementType(OracleParallelSessionParam.StatementType.DDL); + param.setDegreeOfParallelism("2"); + + try (MockedConstruction mocked = mockConstruction(OracleParallelSessionParamFactory.class, + (mock, context) -> when(mock.create(property, value)).thenReturn(param))) { + OracleSessionQueryFactory oracleSessionQueryFactory = new OracleSessionQueryFactory(); + String result = oracleSessionQueryFactory.create(property, value); + assertEquals(expectedResult, result); + } + } +} \ No newline at end of file From 30bf9baeb11d3619d2923e19758b800dbb01b961 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Thu, 6 Oct 2022 17:13:22 +0300 Subject: [PATCH 19/53] ADBDEV-3096: Remove redundant values for ENUM --- .../oracle/OracleParallelSessionParam.java | 24 +++++-------------- .../OracleParallelSessionParamFactory.java | 4 ++-- 2 files changed, 8 insertions(+), 20 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParam.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParam.java index c32376a705..85e54b0c2b 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParam.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParam.java @@ -6,27 +6,15 @@ public class OracleParallelSessionParam { private String degreeOfParallelism; public enum Clause { - ENABLE("ENABLE"), - DISABLE("DISABLE"), - FORCE("FORCE"); - - public final String value; - - Clause(String value) { - this.value = value; - } + ENABLE, + DISABLE, + FORCE } public enum StatementType { - DML("DML"), - DDL("DDL"), - QUERY("QUERY"); - - public final String value; - - StatementType(String value) { - this.value = value; - } + DML, + DDL, + QUERY } public void setClause(Clause clause) { diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java index 414fc21868..8255782813 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java @@ -73,11 +73,11 @@ private HashMap getParallelSessionParam(String value) { } private boolean isClauseValid(String value) { - return Arrays.stream(OracleParallelSessionParam.Clause.values()).anyMatch(e -> e.value.equalsIgnoreCase(value)); + return Arrays.stream(OracleParallelSessionParam.Clause.values()).anyMatch(e -> e.name().equalsIgnoreCase(value)); } private boolean isStatementTypeValid(String value) { - return Arrays.stream(OracleParallelSessionParam.StatementType.values()).anyMatch(e -> e.value.equalsIgnoreCase(value)); + return Arrays.stream(OracleParallelSessionParam.StatementType.values()).anyMatch(e -> e.name().equalsIgnoreCase(value)); } private boolean isDegreeOfParallelismValid(String value) { From aacdccddb2a8194c891963b0a6f4942585167ec4 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Thu, 6 Oct 2022 20:18:57 +0300 Subject: [PATCH 20/53] ADBDEV-3096: Refactor OracleParallelSessionParamFactory --- .../OracleParallelSessionParamFactory.java | 73 ++++++++----------- ...OracleParallelSessionParamFactoryTest.java | 12 +-- 2 files changed, 38 insertions(+), 47 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java index 8255782813..0468aff50c 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java @@ -3,7 +3,6 @@ import org.apache.commons.lang.StringUtils; import org.apache.logging.log4j.util.Strings; -import java.util.Arrays; import java.util.HashMap; public class OracleParallelSessionParamFactory { @@ -17,48 +16,28 @@ public OracleParallelSessionParam create(String property, String value) { String statementType = map.get("statement_type").toUpperCase(); String degreeOfParallelism = map.get("degree_of_parallelism"); - validateParams(clause, statementType, degreeOfParallelism, property); - OracleParallelSessionParam param = new OracleParallelSessionParam(); - param.setClause(OracleParallelSessionParam.Clause.valueOf(clause)); - param.setStatementType(OracleParallelSessionParam.StatementType.valueOf(statementType)); - param.setDegreeOfParallelism(degreeOfParallelism); + param.setClause(getClause(clause, property)); + param.setStatementType(getStatementType(statementType, property)); + param.setDegreeOfParallelism(getDegreeOfParallelism(degreeOfParallelism, property)); return param; } private void validateValue(String property, String value) { if (StringUtils.isBlank(value)) { throw new IllegalArgumentException(String.format( - "Parameter %s is empty in jdbc-site.xml", property) + "The parameter '%s' is empty in jdbc-site.xml", property) ); } if (value.split(ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER).length < 2 || value.split(ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER).length > 3) { throw new IllegalArgumentException(String.format( - "Parameter %s in jdbc-site.xml has to contain at least 2 but not more then 3 values delimited by %s", + "The parameter '%s' in jdbc-site.xml has to contain at least 2 but not more then 3 values delimited by %s", property, ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER) ); } } - private void validateParams(String clause, String statementType, String degreeOfParallelism, String property) { - if (!isClauseValid(clause)) { - throw new IllegalArgumentException(String.format( - "The 'clause' value %s in the parameter %s is not valid", clause, property) - ); - } - if (!isStatementTypeValid(statementType)) { - throw new IllegalArgumentException(String.format( - "The 'statement type' value %s in the parameter %s is not valid", statementType, property) - ); - } - if (!isDegreeOfParallelismValid(degreeOfParallelism)) { - throw new IllegalArgumentException(String.format( - "The 'degree of parallelism' value %s in the parameter %s is not valid", degreeOfParallelism, property) - ); - } - } - private HashMap getParallelSessionParam(String value) { HashMap params = new HashMap<>(); String[] values = value.split(ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER); @@ -66,29 +45,41 @@ private HashMap getParallelSessionParam(String value) { params.put("statement_type", values[1]); if (values.length == 3 && Strings.isNotBlank(values[2])) { params.put("degree_of_parallelism", values[2]); - } else { - params.put("degree_of_parallelism", ""); } return params; } - private boolean isClauseValid(String value) { - return Arrays.stream(OracleParallelSessionParam.Clause.values()).anyMatch(e -> e.name().equalsIgnoreCase(value)); + private OracleParallelSessionParam.Clause getClause(String clause, String property) { + try { + return OracleParallelSessionParam.Clause.valueOf(clause); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException(String.format( + "The 'clause' value '%s' in the parameter '%s' is not valid", clause, property) + ); + } } - private boolean isStatementTypeValid(String value) { - return Arrays.stream(OracleParallelSessionParam.StatementType.values()).anyMatch(e -> e.name().equalsIgnoreCase(value)); + private OracleParallelSessionParam.StatementType getStatementType(String statementType, String property) { + try { + return OracleParallelSessionParam.StatementType.valueOf(statementType); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException(String.format( + "The 'statement type' value '%s' in the parameter '%s' is not valid", statementType, property) + ); + } } - private boolean isDegreeOfParallelismValid(String value) { - if (Strings.isNotEmpty(value)) { - try { - Integer.parseInt(value); - return true; - } catch (NumberFormatException e) { - return false; - } + private String getDegreeOfParallelism(String degreeOfParallelism, String property) { + if (degreeOfParallelism == null) { + return Strings.EMPTY; + } + try { + Integer.parseInt(degreeOfParallelism); + return degreeOfParallelism; + } catch (NumberFormatException nfe) { + throw new IllegalArgumentException(String.format( + "The 'degree of parallelism' value '%s' in the parameter '%s' is not valid", degreeOfParallelism, property) + ); } - return true; } } diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java index 2143991fec..0eb7484b55 100644 --- a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java @@ -51,7 +51,7 @@ void createWithEmptyValue() { void createWithWrongClause() { String value = "fake_force.query.5"; Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); - String expectedMessage = "The 'clause' value FAKE_FORCE in the parameter jdbc.session.property.alter_session_parallel.1 is not valid"; + String expectedMessage = "The 'clause' value 'FAKE_FORCE' in the parameter 'jdbc.session.property.alter_session_parallel.1' is not valid"; String actualMessage = exception.getMessage(); assertEquals(actualMessage, expectedMessage); } @@ -60,7 +60,7 @@ void createWithWrongClause() { void createWithWrongStatement() { String value = "enable.fake_statement"; Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); - String expectedMessage = "The 'statement type' value FAKE_STATEMENT in the parameter jdbc.session.property.alter_session_parallel.1 is not valid"; + String expectedMessage = "The 'statement type' value 'FAKE_STATEMENT' in the parameter 'jdbc.session.property.alter_session_parallel.1' is not valid"; String actualMessage = exception.getMessage(); assertEquals(expectedMessage, actualMessage); } @@ -69,7 +69,7 @@ void createWithWrongStatement() { void createWithWrongDegreeOfParallelism() { String value = "force.dml.fake_number"; Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); - String expectedMessage = "The 'degree of parallelism' value fake_number in the parameter jdbc.session.property.alter_session_parallel.1 is not valid"; + String expectedMessage = "The 'degree of parallelism' value 'fake_number' in the parameter 'jdbc.session.property.alter_session_parallel.1' is not valid"; String actualMessage = exception.getMessage(); assertEquals(expectedMessage, actualMessage); } @@ -78,7 +78,7 @@ void createWithWrongDegreeOfParallelism() { void createWithWrongValueMoreThen3() { String value = "force.dml.number.70"; Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); - String expectedMessage = "Parameter jdbc.session.property.alter_session_parallel.1 in jdbc-site.xml has to contain at least 2 but not more then 3 values delimited by \\."; + String expectedMessage = "The parameter 'jdbc.session.property.alter_session_parallel.1' in jdbc-site.xml has to contain at least 2 but not more then 3 values delimited by \\."; String actualMessage = exception.getMessage(); assertEquals(expectedMessage, actualMessage); } @@ -87,7 +87,7 @@ void createWithWrongValueMoreThen3() { void createWithWrongValueLessThen2() { String value = "force"; Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); - String expectedMessage = "Parameter jdbc.session.property.alter_session_parallel.1 in jdbc-site.xml has to contain at least 2 but not more then 3 values delimited by \\."; + String expectedMessage = "The parameter 'jdbc.session.property.alter_session_parallel.1' in jdbc-site.xml has to contain at least 2 but not more then 3 values delimited by \\."; String actualMessage = exception.getMessage(); assertEquals(expectedMessage, actualMessage); } @@ -96,7 +96,7 @@ void createWithWrongValueLessThen2() { void createWithBlankValue() { String value = " "; Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); - String expectedMessage = "Parameter jdbc.session.property.alter_session_parallel.1 is empty in jdbc-site.xml"; + String expectedMessage = "The parameter 'jdbc.session.property.alter_session_parallel.1' is empty in jdbc-site.xml"; String actualMessage = exception.getMessage(); assertEquals(expectedMessage, actualMessage); } From 718f8456c713e12e2c1ffaf5e9d5f049cd73cbfe Mon Sep 17 00:00:00 2001 From: Georgy Shelkovy Date: Thu, 22 Sep 2022 13:51:21 +0500 Subject: [PATCH 21/53] ADBDEV-3060: Incorrect work bool data type in query to pxf foreign table --- external-table/src/pxffilters.c | 6 +++++ external-table/src/pxfheaders.c | 27 +++++++------------ fdw/pxf_deparse.c | 1 + .../expected/FDW_FilterPushDownTest.out | 23 ++++++++++++++++ regression/expected/FilterPushDownTest.out | 23 ++++++++++++++++ regression/sql/FDW_FilterPushDownTest.sql | 6 +++++ regression/sql/FilterPushDownTest.sql | 6 +++++ 7 files changed, 75 insertions(+), 17 deletions(-) diff --git a/external-table/src/pxffilters.c b/external-table/src/pxffilters.c index 869b181aca..9d55df7d45 100644 --- a/external-table/src/pxffilters.c +++ b/external-table/src/pxffilters.c @@ -1427,6 +1427,12 @@ extractPxfAttributes(List *quals, bool *qualsAreSupported) append_attr_from_var((Var *) expr->arg, attributes); break; } + case T_Var: + { + attributes = + append_attr_from_var((Var*) node, attributes); + break; + } default: { /* diff --git a/external-table/src/pxfheaders.c b/external-table/src/pxfheaders.c index f66053b659..60e2b9a08f 100644 --- a/external-table/src/pxfheaders.c +++ b/external-table/src/pxfheaders.c @@ -374,7 +374,6 @@ add_projection_desc_httpheader(CHURL_HEADERS headers, int i; int dropped_count; int number; - int numTargetList; #if PG_VERSION_NUM < 90400 int numSimpleVars; #endif @@ -385,7 +384,7 @@ add_projection_desc_httpheader(CHURL_HEADERS headers, TupleDesc tupdesc; initStringInfo(&formatter); - numTargetList = 0; + number = 0; #if PG_VERSION_NUM >= 90400 /* @@ -421,7 +420,7 @@ add_projection_desc_httpheader(CHURL_HEADERS headers, { add_projection_index_header(headers, formatter, attno - 1, long_number); - numTargetList++; + number++; } } @@ -434,22 +433,8 @@ add_projection_desc_httpheader(CHURL_HEADERS headers, } #endif - number = numTargetList + -#if PG_VERSION_NUM >= 90400 - projInfo->pi_numSimpleVars + -#else - numSimpleVars + -#endif - list_length(qualsAttributes); - if (number == 0) - return; - attrs_used = NULL; - /* Convert the number of projection columns to a string */ - pg_ltoa(number, long_number); - churl_headers_append(headers, "X-GP-ATTRS-PROJ", long_number); - #if PG_VERSION_NUM >= 90400 for (i = 0; i < projInfo->pi_numSimpleVars; i++) #else @@ -492,9 +477,17 @@ add_projection_desc_httpheader(CHURL_HEADERS headers, /* Shift the column index by the running dropped_count */ add_projection_index_header(headers, formatter, i - 1 - dropped_count, long_number); + number++; } } + if (number != 0) + { + /* Convert the number of projection columns to a string */ + pg_ltoa(number, long_number); + churl_headers_append(headers, "X-GP-ATTRS-PROJ", long_number); + } + list_free(qualsAttributes); pfree(formatter.data); bms_free(attrs_used); diff --git a/fdw/pxf_deparse.c b/fdw/pxf_deparse.c index 602463198b..572fc16f98 100644 --- a/fdw/pxf_deparse.c +++ b/fdw/pxf_deparse.c @@ -73,5 +73,6 @@ classifyConditions(PlannerInfo *root, /* for now, just assume that all WHERE clauses are OK on remote */ *remote_conds = lappend(*remote_conds, ri); + *local_conds = lappend(*local_conds, ri); } } diff --git a/regression/expected/FDW_FilterPushDownTest.out b/regression/expected/FDW_FilterPushDownTest.out index 7db99f01f7..e33f61301b 100644 --- a/regression/expected/FDW_FilterPushDownTest.out +++ b/regression/expected/FDW_FilterPushDownTest.out @@ -3,6 +3,29 @@ ----------------------------------------------------- -- Check that the filter is being pushed down. We create an external table -- that returns the filter being sent from the C-side +CREATE SERVER loopback FOREIGN DATA WRAPPER jdbc_pxf_fdw OPTIONS (jdbc_driver 'org.postgresql.Driver', db_url 'jdbc:postgresql://localhost:5432/gpadmin'); +CREATE USER MAPPING FOR CURRENT_USER SERVER loopback; +CREATE FOREIGN TABLE foreign_tb_test (id int, v_text text, v_bool bool) SERVER loopback OPTIONS ( resource 'public.tb_test_t' ); +explain analyze select v_text from foreign_tb_test where v_text = '5' and v_bool = false ; + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------ + Gather Motion 3:1 (slice1; segments: 3) (cost=50000.00..50000.00 rows=1000 width=32) (actual time=25.552..25.554 rows=1 loops=1) + -> Foreign Scan on foreign_tb_test (cost=50000.00..50000.00 rows=334 width=32) (actual time=0.369..0.371 rows=1 loops=1) + Filter: ((NOT v_bool) AND (v_text = '5'::text)) + Planning time: 3.438 ms + (slice0) Executor memory: 59K bytes. + (slice1) Executor memory: 111K bytes avg x 3 workers, 138K bytes max (seg1). + Memory used: 128000kB + Optimizer: Postgres query optimizer + Execution time: 25.723 ms +(9 rows) + +select v_text from foreign_tb_test where v_text = '5' and v_bool = false ; + v_text +-------- + 5 +(1 row) + CREATE FOREIGN DATA WRAPPER pxf_filter_push_down_fdw HANDLER pxf_fdw_handler VALIDATOR pxf_fdw_validator diff --git a/regression/expected/FilterPushDownTest.out b/regression/expected/FilterPushDownTest.out index 6ce195b4c9..e2e2d9f872 100644 --- a/regression/expected/FilterPushDownTest.out +++ b/regression/expected/FilterPushDownTest.out @@ -3,6 +3,29 @@ ----------------------------------------------------- -- Check that the filter is being pushed down. We create an external table -- that returns the filter being sent from the C-side +create table tb_test_t(id serial primary key, v_text text, v_bool bool); +insert into tb_test_t(v_text, v_bool) select v::text, false from generate_series(1, 20) v; +CREATE EXTERNAL TABLE tb_test_t_pxf(id int, v_text text, v_bool bool) LOCATION('pxf://public.tb_test_t?PROFILE=JDBC&JDBC_DRIVER=org.postgresql.Driver&DB_URL=jdbc:postgresql://localhost:5432/gpadmin&USER=gpadmin') FORMAT 'CUSTOM' (FORMATTER='pxfwritable_import'); +explain analyze select v_text from tb_test_t_pxf where v_text = '5' and v_bool = false ; + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------- + Gather Motion 3:1 (slice1; segments: 3) (cost=0.00..469.21 rows=237038 width=8) (actual time=12.759..12.760 rows=1 loops=1) + -> External Scan on tb_test_t_pxf (cost=0.00..462.14 rows=79013 width=8) (actual time=12.051..12.079 rows=1 loops=1) + Filter: ((v_text = '5'::text) AND (NOT v_bool)) + Planning time: 6.030 ms + (slice0) Executor memory: 132K bytes. + (slice1) Executor memory: 130K bytes avg x 3 workers, 130K bytes max (seg0). + Memory used: 128000kB + Optimizer: Pivotal Optimizer (GPORCA) + Execution time: 13.066 ms +(9 rows) + +select v_text from tb_test_t_pxf where v_text = '5' and v_bool = false ; + v_text +-------- + 5 +(1 row) + DROP EXTERNAL TABLE IF EXISTS test_filter CASCADE; CREATE EXTERNAL TABLE test_filter (t0 text, a1 integer, b2 boolean, filterValue text) LOCATION (E'pxf://dummy_path?FRAGMENTER=org.greenplum.pxf.diagnostic.FilterVerifyFragmenter&ACCESSOR=org.greenplum.pxf.diagnostic.UserDataVerifyAccessor&RESOLVER=org.greenplum.pxf.plugins.hdfs.StringPassResolver') diff --git a/regression/sql/FDW_FilterPushDownTest.sql b/regression/sql/FDW_FilterPushDownTest.sql index cf2e869a55..c1fc0a4c8c 100644 --- a/regression/sql/FDW_FilterPushDownTest.sql +++ b/regression/sql/FDW_FilterPushDownTest.sql @@ -6,6 +6,12 @@ -- Check that the filter is being pushed down. We create an external table -- that returns the filter being sent from the C-side +CREATE SERVER loopback FOREIGN DATA WRAPPER jdbc_pxf_fdw OPTIONS (jdbc_driver 'org.postgresql.Driver', db_url 'jdbc:postgresql://localhost:5432/gpadmin'); +CREATE USER MAPPING FOR CURRENT_USER SERVER loopback; +CREATE FOREIGN TABLE foreign_tb_test (id int, v_text text, v_bool bool) SERVER loopback OPTIONS ( resource 'public.tb_test_t' ); +explain analyze select v_text from foreign_tb_test where v_text = '5' and v_bool = false ; +select v_text from foreign_tb_test where v_text = '5' and v_bool = false ; + CREATE FOREIGN DATA WRAPPER pxf_filter_push_down_fdw HANDLER pxf_fdw_handler VALIDATOR pxf_fdw_validator diff --git a/regression/sql/FilterPushDownTest.sql b/regression/sql/FilterPushDownTest.sql index dd7b8bc959..7dc8aef321 100644 --- a/regression/sql/FilterPushDownTest.sql +++ b/regression/sql/FilterPushDownTest.sql @@ -6,6 +6,12 @@ -- Check that the filter is being pushed down. We create an external table -- that returns the filter being sent from the C-side +create table tb_test_t(id serial primary key, v_text text, v_bool bool); +insert into tb_test_t(v_text, v_bool) select v::text, false from generate_series(1, 20) v; +CREATE EXTERNAL TABLE tb_test_t_pxf(id int, v_text text, v_bool bool) LOCATION('pxf://public.tb_test_t?PROFILE=JDBC&JDBC_DRIVER=org.postgresql.Driver&DB_URL=jdbc:postgresql://localhost:5432/gpadmin&USER=gpadmin') FORMAT 'CUSTOM' (FORMATTER='pxfwritable_import'); +explain analyze select v_text from tb_test_t_pxf where v_text = '5' and v_bool = false ; +select v_text from tb_test_t_pxf where v_text = '5' and v_bool = false ; + DROP EXTERNAL TABLE IF EXISTS test_filter CASCADE; CREATE EXTERNAL TABLE test_filter (t0 text, a1 integer, b2 boolean, filterValue text) From 31da9e6b0b886b0744f3ce2cab893c4444c607e4 Mon Sep 17 00:00:00 2001 From: Alexander Denissov Date: Tue, 27 Sep 2022 12:20:39 -0700 Subject: [PATCH 22/53] Updated projection and filter tests, de-dup targetList attrs --- .../expected/query01.ans | 74 +++++++++++++++++-- .../checkColumnProjection/sql/query01.sql | 22 ++++-- .../checkFilterPushDown/expected/query01.ans | 48 ++++++++++++ .../checkFilterPushDown/sql/query01.sql | 12 +++ external-table/src/pxfheaders.c | 8 +- .../expected/FDW_FilterPushDownTest.out | 51 +++++++------ regression/expected/FilterPushDownTest.out | 51 +++++++------ regression/sql/FDW_FilterPushDownTest.sql | 15 ++-- regression/sql/FilterPushDownTest.sql | 14 ++-- 9 files changed, 220 insertions(+), 75 deletions(-) diff --git a/automation/tincrepo/main/pxf/features/columnprojection/checkColumnProjection/expected/query01.ans b/automation/tincrepo/main/pxf/features/columnprojection/checkColumnProjection/expected/query01.ans index ed29cee275..9b0f70cb1f 100755 --- a/automation/tincrepo/main/pxf/features/columnprojection/checkColumnProjection/expected/query01.ans +++ b/automation/tincrepo/main/pxf/features/columnprojection/checkColumnProjection/expected/query01.ans @@ -56,9 +56,40 @@ SELECT colprojvalue FROM test_column_projection ORDER BY t0; t0|colprojvalue (10 rows) --- Column Projection is not supported for boolean? --- SELECT t0, colprojvalue FROM test_column_projection WHERE b2 ORDER BY t0; --- +SELECT t0, colprojvalue FROM test_column_projection WHERE b2 ORDER BY t0; + t0 | colprojvalue +----+-------------------- + A | t0|b2|colprojvalue + C | t0|b2|colprojvalue + E | t0|b2|colprojvalue + G | t0|b2|colprojvalue + I | t0|b2|colprojvalue +(5 rows) + +SELECT t0, a1, colprojvalue FROM test_column_projection WHERE a1 < 5 AND b2 = false ORDER BY t0; + t0 | a1 | colprojvalue +----+----+----------------------- + B | 1 | t0|a1|b2|colprojvalue + D | 3 | t0|a1|b2|colprojvalue +(2 rows) + +SELECT sqrt(a1), colprojvalue FROM test_column_projection WHERE a1 < 5 AND b2 = false ORDER BY t0; + sqrt | colprojvalue +------------------+----------------------- + 1 | t0|a1|b2|colprojvalue + 1.73205080756888 | t0|a1|b2|colprojvalue +(2 rows) + +SELECT sqrt(a1), colprojvalue FROM test_column_projection WHERE b2 = false ORDER BY t0; + sqrt | colprojvalue +------------------+----------------------- + 1 | t0|a1|b2|colprojvalue + 1.73205080756888 | t0|a1|b2|colprojvalue + 2.23606797749979 | t0|a1|b2|colprojvalue + 2.64575131106459 | t0|a1|b2|colprojvalue + 3 | t0|a1|b2|colprojvalue +(5 rows) + SELECT t0, colprojvalue FROM test_column_projection WHERE a1 < 5 ORDER BY t0; t0 | colprojvalue ----+-------------------- @@ -196,9 +227,40 @@ SELECT colprojvalue FROM test_column_projection ORDER BY t0; t0|colprojvalue (10 rows) --- Column Projection is not supported for boolean? --- SELECT t0, colprojvalue FROM test_column_projection WHERE b2 ORDER BY t0; --- +SELECT t0, colprojvalue FROM test_column_projection WHERE b2 ORDER BY t0; + t0 | colprojvalue +----+-------------------- + A | t0|b2|colprojvalue + C | t0|b2|colprojvalue + E | t0|b2|colprojvalue + G | t0|b2|colprojvalue + I | t0|b2|colprojvalue +(5 rows) + +SELECT t0, a1, colprojvalue FROM test_column_projection WHERE a1 < 5 AND b2 = false ORDER BY t0; + t0 | a1 | colprojvalue +----+----+----------------------- + B | 1 | t0|a1|b2|colprojvalue + D | 3 | t0|a1|b2|colprojvalue +(2 rows) + +SELECT sqrt(a1), colprojvalue FROM test_column_projection WHERE a1 < 5 AND b2 = false ORDER BY t0; + sqrt | colprojvalue +------------------+----------------------- + 1 | t0|a1|b2|colprojvalue + 1.73205080756888 | t0|a1|b2|colprojvalue +(2 rows) + +SELECT sqrt(a1), colprojvalue FROM test_column_projection WHERE b2 = false ORDER BY t0; + sqrt | colprojvalue +------------------+----------------------- + 1 | t0|a1|b2|colprojvalue + 1.73205080756888 | t0|a1|b2|colprojvalue + 2.23606797749979 | t0|a1|b2|colprojvalue + 2.64575131106459 | t0|a1|b2|colprojvalue + 3 | t0|a1|b2|colprojvalue +(5 rows) + SELECT t0, colprojvalue FROM test_column_projection WHERE a1 < 5 ORDER BY t0; t0 | colprojvalue ----+-------------------- diff --git a/automation/tincrepo/main/pxf/features/columnprojection/checkColumnProjection/sql/query01.sql b/automation/tincrepo/main/pxf/features/columnprojection/checkColumnProjection/sql/query01.sql index cc7490ac3d..9d9ea79dc1 100755 --- a/automation/tincrepo/main/pxf/features/columnprojection/checkColumnProjection/sql/query01.sql +++ b/automation/tincrepo/main/pxf/features/columnprojection/checkColumnProjection/sql/query01.sql @@ -13,9 +13,14 @@ SELECT t0, colprojvalue FROM test_column_projection ORDER BY t0; SELECT colprojvalue FROM test_column_projection ORDER BY t0; --- Column Projection is not supported for boolean? --- SELECT t0, colprojvalue FROM test_column_projection WHERE b2 ORDER BY t0; --- +SELECT t0, colprojvalue FROM test_column_projection WHERE b2 ORDER BY t0; + +SELECT t0, a1, colprojvalue FROM test_column_projection WHERE a1 < 5 AND b2 = false ORDER BY t0; + +SELECT sqrt(a1), colprojvalue FROM test_column_projection WHERE a1 < 5 AND b2 = false ORDER BY t0; + +SELECT sqrt(a1), colprojvalue FROM test_column_projection WHERE b2 = false ORDER BY t0; + SELECT t0, colprojvalue FROM test_column_projection WHERE a1 < 5 ORDER BY t0; SELECT t0, colprojvalue FROM test_column_projection WHERE a1 <= 5 ORDER BY t0; @@ -41,9 +46,14 @@ SELECT t0, colprojvalue FROM test_column_projection ORDER BY t0; SELECT colprojvalue FROM test_column_projection ORDER BY t0; --- Column Projection is not supported for boolean? --- SELECT t0, colprojvalue FROM test_column_projection WHERE b2 ORDER BY t0; --- +SELECT t0, colprojvalue FROM test_column_projection WHERE b2 ORDER BY t0; + +SELECT t0, a1, colprojvalue FROM test_column_projection WHERE a1 < 5 AND b2 = false ORDER BY t0; + +SELECT sqrt(a1), colprojvalue FROM test_column_projection WHERE a1 < 5 AND b2 = false ORDER BY t0; + +SELECT sqrt(a1), colprojvalue FROM test_column_projection WHERE b2 = false ORDER BY t0; + SELECT t0, colprojvalue FROM test_column_projection WHERE a1 < 5 ORDER BY t0; SELECT t0, colprojvalue FROM test_column_projection WHERE a1 <= 5 ORDER BY t0; diff --git a/automation/tincrepo/main/pxf/features/filterpushdown/checkFilterPushDown/expected/query01.ans b/automation/tincrepo/main/pxf/features/filterpushdown/checkFilterPushDown/expected/query01.ans index 65cd72a634..95f0bf4700 100755 --- a/automation/tincrepo/main/pxf/features/filterpushdown/checkFilterPushDown/expected/query01.ans +++ b/automation/tincrepo/main/pxf/features/filterpushdown/checkFilterPushDown/expected/query01.ans @@ -41,6 +41,30 @@ SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; J | 9 | f | a2c16s4dtrueo0l2 (5 rows) +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + t0 | a1 | filtervalue +----+----+------------------------------- + B | 1 | a2c16s4dtrueo0l2a1c23s1d5o1l0 + D | 3 | a2c16s4dtrueo0l2a1c23s1d5o1l0 +(2 rows) + +SELECT sqrt(a1), filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + sqrt | filtervalue +------------------+------------------------------- + 1 | a2c16s4dtrueo0l2a1c23s1d5o1l0 + 1.73205080756888 | a2c16s4dtrueo0l2a1c23s1d5o1l0 +(2 rows) + +SELECT sqrt(a1), filtervalue FROM test_filter WHERE b2 = false ORDER BY t0; + sqrt | filtervalue +------------------+------------------ + 1 | a2c16s4dtrueo0l2 + 1.73205080756888 | a2c16s4dtrueo0l2 + 2.23606797749979 | a2c16s4dtrueo0l2 + 2.64575131106459 | a2c16s4dtrueo0l2 + 3 | a2c16s4dtrueo0l2 +(5 rows) + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; t0 | a1 | b2 | filtervalue ----+----+----+--------------------------------------------- @@ -97,6 +121,30 @@ SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; J | 9 | f | a2c16s4dtrueo0l2 (5 rows) +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + t0 | a1 | filtervalue +----+----+------------------------------- + B | 1 | a1c23s1d5o1a2c16s4dtrueo0l2l0 + D | 3 | a1c23s1d5o1a2c16s4dtrueo0l2l0 +(2 rows) + +SELECT sqrt(a1), filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + sqrt | filtervalue +------------------+------------------------------- + 1 | a1c23s1d5o1a2c16s4dtrueo0l2l0 + 1.73205080756888 | a1c23s1d5o1a2c16s4dtrueo0l2l0 +(2 rows) + +SELECT sqrt(a1), filtervalue FROM test_filter WHERE b2 = false ORDER BY t0; + sqrt | filtervalue +------------------+------------------ + 1 | a2c16s4dtrueo0l2 + 1.73205080756888 | a2c16s4dtrueo0l2 + 2.23606797749979 | a2c16s4dtrueo0l2 + 2.64575131106459 | a2c16s4dtrueo0l2 + 3 | a2c16s4dtrueo0l2 +(5 rows) + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; t0 | a1 | b2 | filtervalue ----+----+----+--------------------------------------------- diff --git a/automation/tincrepo/main/pxf/features/filterpushdown/checkFilterPushDown/sql/query01.sql b/automation/tincrepo/main/pxf/features/filterpushdown/checkFilterPushDown/sql/query01.sql index f5d01dbe8c..a5c75cd31f 100755 --- a/automation/tincrepo/main/pxf/features/filterpushdown/checkFilterPushDown/sql/query01.sql +++ b/automation/tincrepo/main/pxf/features/filterpushdown/checkFilterPushDown/sql/query01.sql @@ -13,6 +13,12 @@ SELECT * FROM test_filter WHERE t0 = 'B' OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + +SELECT sqrt(a1), filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + +SELECT sqrt(a1), filtervalue FROM test_filter WHERE b2 = false ORDER BY t0; + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; SELECT * FROM test_filter WHERE b2 = false OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, a1; @@ -29,6 +35,12 @@ SELECT * FROM test_filter WHERE t0 = 'B' OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + +SELECT sqrt(a1), filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + +SELECT sqrt(a1), filtervalue FROM test_filter WHERE b2 = false ORDER BY t0; + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; SELECT * FROM test_filter WHERE b2 = false OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, a1; diff --git a/external-table/src/pxfheaders.c b/external-table/src/pxfheaders.c index 60e2b9a08f..f43fe145fc 100644 --- a/external-table/src/pxfheaders.c +++ b/external-table/src/pxfheaders.c @@ -384,6 +384,7 @@ add_projection_desc_httpheader(CHURL_HEADERS headers, TupleDesc tupdesc; initStringInfo(&formatter); + attrs_used = NULL; number = 0; #if PG_VERSION_NUM >= 90400 @@ -418,9 +419,9 @@ add_projection_desc_httpheader(CHURL_HEADERS headers, int attno = lfirst_int(lc1); if (attno > InvalidAttrNumber) { - add_projection_index_header(headers, - formatter, attno - 1, long_number); - number++; + attrs_used = + bms_add_member(attrs_used, + attno - FirstLowInvalidHeapAttributeNumber); } } @@ -433,7 +434,6 @@ add_projection_desc_httpheader(CHURL_HEADERS headers, } #endif - attrs_used = NULL; #if PG_VERSION_NUM >= 90400 for (i = 0; i < projInfo->pi_numSimpleVars; i++) diff --git a/regression/expected/FDW_FilterPushDownTest.out b/regression/expected/FDW_FilterPushDownTest.out index e33f61301b..ba066edce1 100644 --- a/regression/expected/FDW_FilterPushDownTest.out +++ b/regression/expected/FDW_FilterPushDownTest.out @@ -3,29 +3,6 @@ ----------------------------------------------------- -- Check that the filter is being pushed down. We create an external table -- that returns the filter being sent from the C-side -CREATE SERVER loopback FOREIGN DATA WRAPPER jdbc_pxf_fdw OPTIONS (jdbc_driver 'org.postgresql.Driver', db_url 'jdbc:postgresql://localhost:5432/gpadmin'); -CREATE USER MAPPING FOR CURRENT_USER SERVER loopback; -CREATE FOREIGN TABLE foreign_tb_test (id int, v_text text, v_bool bool) SERVER loopback OPTIONS ( resource 'public.tb_test_t' ); -explain analyze select v_text from foreign_tb_test where v_text = '5' and v_bool = false ; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------- - Gather Motion 3:1 (slice1; segments: 3) (cost=50000.00..50000.00 rows=1000 width=32) (actual time=25.552..25.554 rows=1 loops=1) - -> Foreign Scan on foreign_tb_test (cost=50000.00..50000.00 rows=334 width=32) (actual time=0.369..0.371 rows=1 loops=1) - Filter: ((NOT v_bool) AND (v_text = '5'::text)) - Planning time: 3.438 ms - (slice0) Executor memory: 59K bytes. - (slice1) Executor memory: 111K bytes avg x 3 workers, 138K bytes max (seg1). - Memory used: 128000kB - Optimizer: Postgres query optimizer - Execution time: 25.723 ms -(9 rows) - -select v_text from foreign_tb_test where v_text = '5' and v_bool = false ; - v_text --------- - 5 -(1 row) - CREATE FOREIGN DATA WRAPPER pxf_filter_push_down_fdw HANDLER pxf_fdw_handler VALIDATOR pxf_fdw_validator @@ -75,6 +52,13 @@ SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; J | 9 | f | a2c16s4dtrueo0l2 (5 rows) +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + t0 | a1 | filtervalue +----+----+------------------------------- + B | 1 | a1c23s1d5o1a2c16s4dtrueo0l2l0 + D | 3 | a1c23s1d5o1a2c16s4dtrueo0l2l0 +(2 rows) + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; t0 | a1 | b2 | filtervalue ----+----+----+--------------------------------------------- @@ -130,6 +114,13 @@ SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; J | 9 | f | a2c16s4dtrueo0l2 (5 rows) +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + t0 | a1 | filtervalue +----+----+------------------------------- + B | 1 | a1c23s1d5o1a2c16s4dtrueo0l2l0 + D | 3 | a1c23s1d5o1a2c16s4dtrueo0l2l0 +(2 rows) + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; t0 | a1 | b2 | filtervalue ----+----+----+--------------------------------------------- @@ -207,6 +198,13 @@ SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; J | 9 | f | a2c16s4dtrueo0l2 (5 rows) +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + t0 | a1 | filtervalue +----+----+------------------------------- + B | 1 | a1c23s1d5o1a2c16s4dtrueo0l2l0 + D | 3 | a1c23s1d5o1a2c16s4dtrueo0l2l0 +(2 rows) + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; t0 | a1 | b2 | filtervalue ----+----+----+--------------------------------------------- @@ -262,6 +260,13 @@ SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; J | 9 | f | a2c16s4dtrueo0l2 (5 rows) +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + t0 | a1 | filtervalue +----+----+------------------------------- + B | 1 | a1c23s1d5o1a2c16s4dtrueo0l2l0 + D | 3 | a1c23s1d5o1a2c16s4dtrueo0l2l0 +(2 rows) + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; t0 | a1 | b2 | filtervalue ----+----+----+--------------------------------------------- diff --git a/regression/expected/FilterPushDownTest.out b/regression/expected/FilterPushDownTest.out index e2e2d9f872..b07b92c55f 100644 --- a/regression/expected/FilterPushDownTest.out +++ b/regression/expected/FilterPushDownTest.out @@ -3,29 +3,6 @@ ----------------------------------------------------- -- Check that the filter is being pushed down. We create an external table -- that returns the filter being sent from the C-side -create table tb_test_t(id serial primary key, v_text text, v_bool bool); -insert into tb_test_t(v_text, v_bool) select v::text, false from generate_series(1, 20) v; -CREATE EXTERNAL TABLE tb_test_t_pxf(id int, v_text text, v_bool bool) LOCATION('pxf://public.tb_test_t?PROFILE=JDBC&JDBC_DRIVER=org.postgresql.Driver&DB_URL=jdbc:postgresql://localhost:5432/gpadmin&USER=gpadmin') FORMAT 'CUSTOM' (FORMATTER='pxfwritable_import'); -explain analyze select v_text from tb_test_t_pxf where v_text = '5' and v_bool = false ; - QUERY PLAN -------------------------------------------------------------------------------------------------------------------------------- - Gather Motion 3:1 (slice1; segments: 3) (cost=0.00..469.21 rows=237038 width=8) (actual time=12.759..12.760 rows=1 loops=1) - -> External Scan on tb_test_t_pxf (cost=0.00..462.14 rows=79013 width=8) (actual time=12.051..12.079 rows=1 loops=1) - Filter: ((v_text = '5'::text) AND (NOT v_bool)) - Planning time: 6.030 ms - (slice0) Executor memory: 132K bytes. - (slice1) Executor memory: 130K bytes avg x 3 workers, 130K bytes max (seg0). - Memory used: 128000kB - Optimizer: Pivotal Optimizer (GPORCA) - Execution time: 13.066 ms -(9 rows) - -select v_text from tb_test_t_pxf where v_text = '5' and v_bool = false ; - v_text --------- - 5 -(1 row) - DROP EXTERNAL TABLE IF EXISTS test_filter CASCADE; CREATE EXTERNAL TABLE test_filter (t0 text, a1 integer, b2 boolean, filterValue text) LOCATION (E'pxf://dummy_path?FRAGMENTER=org.greenplum.pxf.diagnostic.FilterVerifyFragmenter&ACCESSOR=org.greenplum.pxf.diagnostic.UserDataVerifyAccessor&RESOLVER=org.greenplum.pxf.plugins.hdfs.StringPassResolver') @@ -68,6 +45,13 @@ SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; J | 9 | f | a2c16s4dtrueo0l2 (5 rows) +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + t0 | a1 | filtervalue +----+----+------------------------------- + B | 1 | a2c16s4dtrueo0l2a1c23s1d5o1l0 + D | 3 | a2c16s4dtrueo0l2a1c23s1d5o1l0 +(2 rows) + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; t0 | a1 | b2 | filtervalue ----+----+----+--------------------------------------------- @@ -123,6 +107,13 @@ SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; J | 9 | f | a2c16s4dtrueo0l2 (5 rows) +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + t0 | a1 | filtervalue +----+----+------------------------------- + B | 1 | a1c23s1d5o1a2c16s4dtrueo0l2l0 + D | 3 | a1c23s1d5o1a2c16s4dtrueo0l2l0 +(2 rows) + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; t0 | a1 | b2 | filtervalue ----+----+----+--------------------------------------------- @@ -197,6 +188,13 @@ SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; J | 9 | f | a2c16s4dtrueo0l2 (5 rows) +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + t0 | a1 | filtervalue +----+----+------------------------------- + B | 1 | a2c16s4dtrueo0l2a1c23s1d5o1l0 + D | 3 | a2c16s4dtrueo0l2a1c23s1d5o1l0 +(2 rows) + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; t0 | a1 | b2 | filtervalue ----+----+----+--------------------------------------------- @@ -252,6 +250,13 @@ SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; J | 9 | f | a2c16s4dtrueo0l2 (5 rows) +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + t0 | a1 | filtervalue +----+----+------------------------------- + B | 1 | a2c16s4dtrueo0l2a1c23s1d5o1l0 + D | 3 | a2c16s4dtrueo0l2a1c23s1d5o1l0 +(2 rows) + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; t0 | a1 | b2 | filtervalue ----+----+----+--------------------------------------------- diff --git a/regression/sql/FDW_FilterPushDownTest.sql b/regression/sql/FDW_FilterPushDownTest.sql index c1fc0a4c8c..5a4c667c50 100644 --- a/regression/sql/FDW_FilterPushDownTest.sql +++ b/regression/sql/FDW_FilterPushDownTest.sql @@ -5,13 +5,6 @@ -- Check that the filter is being pushed down. We create an external table -- that returns the filter being sent from the C-side - -CREATE SERVER loopback FOREIGN DATA WRAPPER jdbc_pxf_fdw OPTIONS (jdbc_driver 'org.postgresql.Driver', db_url 'jdbc:postgresql://localhost:5432/gpadmin'); -CREATE USER MAPPING FOR CURRENT_USER SERVER loopback; -CREATE FOREIGN TABLE foreign_tb_test (id int, v_text text, v_bool bool) SERVER loopback OPTIONS ( resource 'public.tb_test_t' ); -explain analyze select v_text from foreign_tb_test where v_text = '5' and v_bool = false ; -select v_text from foreign_tb_test where v_text = '5' and v_bool = false ; - CREATE FOREIGN DATA WRAPPER pxf_filter_push_down_fdw HANDLER pxf_fdw_handler VALIDATOR pxf_fdw_validator @@ -40,6 +33,8 @@ SELECT * FROM test_filter WHERE t0 = 'B' OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; SELECT * FROM test_filter WHERE b2 = false OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, a1; @@ -56,6 +51,8 @@ SELECT * FROM test_filter WHERE t0 = 'B' OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; SELECT * FROM test_filter WHERE b2 = false OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, a1; @@ -98,6 +95,8 @@ SELECT * FROM test_filter WHERE t0 = 'B' OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; SELECT * FROM test_filter WHERE b2 = false OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, a1; @@ -114,6 +113,8 @@ SELECT * FROM test_filter WHERE t0 = 'B' OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; SELECT * FROM test_filter WHERE b2 = false OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, a1; diff --git a/regression/sql/FilterPushDownTest.sql b/regression/sql/FilterPushDownTest.sql index 7dc8aef321..b70a093de3 100644 --- a/regression/sql/FilterPushDownTest.sql +++ b/regression/sql/FilterPushDownTest.sql @@ -6,12 +6,6 @@ -- Check that the filter is being pushed down. We create an external table -- that returns the filter being sent from the C-side -create table tb_test_t(id serial primary key, v_text text, v_bool bool); -insert into tb_test_t(v_text, v_bool) select v::text, false from generate_series(1, 20) v; -CREATE EXTERNAL TABLE tb_test_t_pxf(id int, v_text text, v_bool bool) LOCATION('pxf://public.tb_test_t?PROFILE=JDBC&JDBC_DRIVER=org.postgresql.Driver&DB_URL=jdbc:postgresql://localhost:5432/gpadmin&USER=gpadmin') FORMAT 'CUSTOM' (FORMATTER='pxfwritable_import'); -explain analyze select v_text from tb_test_t_pxf where v_text = '5' and v_bool = false ; -select v_text from tb_test_t_pxf where v_text = '5' and v_bool = false ; - DROP EXTERNAL TABLE IF EXISTS test_filter CASCADE; CREATE EXTERNAL TABLE test_filter (t0 text, a1 integer, b2 boolean, filterValue text) @@ -31,6 +25,8 @@ SELECT * FROM test_filter WHERE t0 = 'B' OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; SELECT * FROM test_filter WHERE b2 = false OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, a1; @@ -47,6 +43,8 @@ SELECT * FROM test_filter WHERE t0 = 'B' OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; SELECT * FROM test_filter WHERE b2 = false OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, a1; @@ -84,6 +82,8 @@ SELECT * FROM test_filter WHERE t0 = 'B' OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; SELECT * FROM test_filter WHERE b2 = false OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, a1; @@ -100,6 +100,8 @@ SELECT * FROM test_filter WHERE t0 = 'B' OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, SELECT * FROM test_filter WHERE b2 = false ORDER BY t0, a1; +SELECT t0, a1, filtervalue FROM test_filter WHERE a1 < 5 AND b2 = false ORDER BY t0, a1; + SELECT * FROM test_filter WHERE b2 = false AND (a1 = 1 OR a1 = 10) ORDER BY t0, a1; SELECT * FROM test_filter WHERE b2 = false OR (a1 >= 0 AND a1 <= 2) ORDER BY t0, a1; From 555455182f82ffce8df0f0901f99fa836ee95079 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Mon, 10 Oct 2022 15:52:11 +0300 Subject: [PATCH 23/53] ADBDEV-3096: Move ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER --- .../OracleParallelSessionParamFactory.java | 20 ++++++------- .../oracle/OracleSessionQueryFactory.java | 4 ++- ...OracleParallelSessionParamFactoryTest.java | 29 ++++++++++++------- .../oracle/OracleSessionQueryFactoryTest.java | 19 ++++++------ 4 files changed, 39 insertions(+), 33 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java index 0468aff50c..8c04bdc608 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java @@ -6,12 +6,10 @@ import java.util.HashMap; public class OracleParallelSessionParamFactory { - private static final String ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER = "\\."; + public OracleParallelSessionParam create(String property, String value, String delimiter) { + validateValue(property, value, delimiter); - public OracleParallelSessionParam create(String property, String value) { - validateValue(property, value); - - HashMap map = getParallelSessionParam(value); + HashMap map = getParallelSessionParam(value, delimiter); String clause = map.get("clause").toUpperCase(); String statementType = map.get("statement_type").toUpperCase(); String degreeOfParallelism = map.get("degree_of_parallelism"); @@ -23,24 +21,24 @@ public OracleParallelSessionParam create(String property, String value) { return param; } - private void validateValue(String property, String value) { + private void validateValue(String property, String value, String delimiter) { if (StringUtils.isBlank(value)) { throw new IllegalArgumentException(String.format( "The parameter '%s' is empty in jdbc-site.xml", property) ); } - if (value.split(ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER).length < 2 - || value.split(ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER).length > 3) { + if (value.split(delimiter).length < 2 + || value.split(delimiter).length > 3) { throw new IllegalArgumentException(String.format( "The parameter '%s' in jdbc-site.xml has to contain at least 2 but not more then 3 values delimited by %s", - property, ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER) + property, delimiter) ); } } - private HashMap getParallelSessionParam(String value) { + private HashMap getParallelSessionParam(String value, String delimiter) { HashMap params = new HashMap<>(); - String[] values = value.split(ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER); + String[] values = value.split(delimiter); params.put("clause", values[0]); params.put("statement_type", values[1]); if (values.length == 3 && Strings.isNotBlank(values[2])) { diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactory.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactory.java index 1ed41d793a..52ac7d14be 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactory.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactory.java @@ -4,6 +4,7 @@ public class OracleSessionQueryFactory { private static final String ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_PREFIX = "alter_session_parallel"; + private static final String ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER = "\\."; private final OracleParallelSessionParamFactory oracleSessionParamFactory = new OracleParallelSessionParamFactory(); public String create(String property, String value) { @@ -14,7 +15,8 @@ public String create(String property, String value) { } private String getParallelSessionCommand(String property, String value) { - OracleParallelSessionParam param = oracleSessionParamFactory.create(property, value); + OracleParallelSessionParam param = oracleSessionParamFactory.create(property, + value, ORACLE_JDBC_SESSION_PARALLEL_PROPERTY_DELIMITER); return createParallelSessionCommand(param); } diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java index 0eb7484b55..31faf25479 100644 --- a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java @@ -8,11 +8,12 @@ class OracleParallelSessionParamFactoryTest { private final OracleParallelSessionParamFactory oracleParallelSessionParamFactory = new OracleParallelSessionParamFactory(); private final String property = "jdbc.session.property.alter_session_parallel.1"; + private final String delimiter = "\\."; @Test void createWithClauseAndStatementAndDegreeOfParallelismSuccess() { String value = "force.query.5"; - OracleParallelSessionParam param = oracleParallelSessionParamFactory.create(property, value); + OracleParallelSessionParam param = oracleParallelSessionParamFactory.create(property, value, delimiter); assertEquals(param.getClause(), OracleParallelSessionParam.Clause.FORCE); assertEquals(param.getStatementType(), OracleParallelSessionParam.StatementType.QUERY); assertEquals(param.getDegreeOfParallelism(), "5"); @@ -21,7 +22,7 @@ void createWithClauseAndStatementAndDegreeOfParallelismSuccess() { @Test void createWithClauseAndStatementSuccess() { String value = "enable.ddl"; - OracleParallelSessionParam param = oracleParallelSessionParamFactory.create(property, value); + OracleParallelSessionParam param = oracleParallelSessionParamFactory.create(property, value, delimiter); assertEquals(param.getClause(), OracleParallelSessionParam.Clause.ENABLE); assertEquals(param.getStatementType(), OracleParallelSessionParam.StatementType.DDL); assertEquals(param.getDegreeOfParallelism(), ""); @@ -31,7 +32,7 @@ void createWithClauseAndStatementSuccess() { void createWithClauseAndStatementAndBlankDegreeOfParallelismSuccess() { String value = "disable.dml. "; String property = "jdbc.session.property.alter_session_parallel.1"; - OracleParallelSessionParam param = oracleParallelSessionParamFactory.create(property, value); + OracleParallelSessionParam param = oracleParallelSessionParamFactory.create(property, value, delimiter); assertEquals(param.getClause(), OracleParallelSessionParam.Clause.DISABLE); assertEquals(param.getStatementType(), OracleParallelSessionParam.StatementType.DML); assertEquals(param.getDegreeOfParallelism(), ""); @@ -41,7 +42,7 @@ void createWithClauseAndStatementAndBlankDegreeOfParallelismSuccess() { void createWithEmptyValue() { String value = "enable.dml."; String property = "jdbc.session.property.alter_session_parallel.1"; - OracleParallelSessionParam param = oracleParallelSessionParamFactory.create(property, value); + OracleParallelSessionParam param = oracleParallelSessionParamFactory.create(property, value, delimiter); assertEquals(param.getClause(), OracleParallelSessionParam.Clause.ENABLE); assertEquals(param.getStatementType(), OracleParallelSessionParam.StatementType.DML); assertEquals(param.getDegreeOfParallelism(), ""); @@ -50,7 +51,8 @@ void createWithEmptyValue() { @Test void createWithWrongClause() { String value = "fake_force.query.5"; - Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); + Exception exception = assertThrows(IllegalArgumentException.class, + () -> oracleParallelSessionParamFactory.create(property, value, delimiter)); String expectedMessage = "The 'clause' value 'FAKE_FORCE' in the parameter 'jdbc.session.property.alter_session_parallel.1' is not valid"; String actualMessage = exception.getMessage(); assertEquals(actualMessage, expectedMessage); @@ -59,7 +61,8 @@ void createWithWrongClause() { @Test void createWithWrongStatement() { String value = "enable.fake_statement"; - Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); + Exception exception = assertThrows(IllegalArgumentException.class, + () -> oracleParallelSessionParamFactory.create(property, value, delimiter)); String expectedMessage = "The 'statement type' value 'FAKE_STATEMENT' in the parameter 'jdbc.session.property.alter_session_parallel.1' is not valid"; String actualMessage = exception.getMessage(); assertEquals(expectedMessage, actualMessage); @@ -68,7 +71,8 @@ void createWithWrongStatement() { @Test void createWithWrongDegreeOfParallelism() { String value = "force.dml.fake_number"; - Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); + Exception exception = assertThrows(IllegalArgumentException.class, + () -> oracleParallelSessionParamFactory.create(property, value, delimiter)); String expectedMessage = "The 'degree of parallelism' value 'fake_number' in the parameter 'jdbc.session.property.alter_session_parallel.1' is not valid"; String actualMessage = exception.getMessage(); assertEquals(expectedMessage, actualMessage); @@ -77,7 +81,8 @@ void createWithWrongDegreeOfParallelism() { @Test void createWithWrongValueMoreThen3() { String value = "force.dml.number.70"; - Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); + Exception exception = assertThrows(IllegalArgumentException.class, + () -> oracleParallelSessionParamFactory.create(property, value, delimiter)); String expectedMessage = "The parameter 'jdbc.session.property.alter_session_parallel.1' in jdbc-site.xml has to contain at least 2 but not more then 3 values delimited by \\."; String actualMessage = exception.getMessage(); assertEquals(expectedMessage, actualMessage); @@ -86,7 +91,8 @@ void createWithWrongValueMoreThen3() { @Test void createWithWrongValueLessThen2() { String value = "force"; - Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); + Exception exception = assertThrows(IllegalArgumentException.class, + () -> oracleParallelSessionParamFactory.create(property, value, delimiter)); String expectedMessage = "The parameter 'jdbc.session.property.alter_session_parallel.1' in jdbc-site.xml has to contain at least 2 but not more then 3 values delimited by \\."; String actualMessage = exception.getMessage(); assertEquals(expectedMessage, actualMessage); @@ -95,9 +101,10 @@ void createWithWrongValueLessThen2() { @Test void createWithBlankValue() { String value = " "; - Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value)); + Exception exception = assertThrows(IllegalArgumentException.class, + () -> oracleParallelSessionParamFactory.create(property, value, delimiter)); String expectedMessage = "The parameter 'jdbc.session.property.alter_session_parallel.1' is empty in jdbc-site.xml"; String actualMessage = exception.getMessage(); assertEquals(expectedMessage, actualMessage); } -} \ No newline at end of file +} diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactoryTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactoryTest.java index cda2451d40..565c24e0f7 100644 --- a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactoryTest.java +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleSessionQueryFactoryTest.java @@ -8,12 +8,14 @@ import static org.mockito.Mockito.when; class OracleSessionQueryFactoryTest { + private final String property = "jdbc.session.property.alter_session_parallel.1"; + private final String delimiter = "\\."; @Test @SuppressWarnings("try") void createParallelSessionQueryWithForceAndDegreeOfParallelism() { - String property = "jdbc.session.property.alter_session_parallel.1"; String value = "force.query.4"; + String delimiter = "\\."; String expectedResult = "ALTER SESSION FORCE PARALLEL QUERY PARALLEL 4"; OracleParallelSessionParam param = new OracleParallelSessionParam(); @@ -22,7 +24,7 @@ void createParallelSessionQueryWithForceAndDegreeOfParallelism() { param.setDegreeOfParallelism("4"); try (MockedConstruction mocked = mockConstruction(OracleParallelSessionParamFactory.class, - (mock, context) -> when(mock.create(property, value)).thenReturn(param))) { + (mock, context) -> when(mock.create(property, value, delimiter)).thenReturn(param))) { OracleSessionQueryFactory oracleSessionQueryFactory = new OracleSessionQueryFactory(); String result = oracleSessionQueryFactory.create(property, value); assertEquals(expectedResult, result); @@ -32,7 +34,6 @@ void createParallelSessionQueryWithForceAndDegreeOfParallelism() { @Test @SuppressWarnings("try") void createParallelSessionQueryWithForce() { - String property = "jdbc.session.property.alter_session_parallel.1"; String value = "force.dml"; String expectedResult = "ALTER SESSION FORCE PARALLEL DML"; @@ -42,7 +43,7 @@ void createParallelSessionQueryWithForce() { param.setDegreeOfParallelism(""); try (MockedConstruction mocked = mockConstruction(OracleParallelSessionParamFactory.class, - (mock, context) -> when(mock.create(property, value)).thenReturn(param))) { + (mock, context) -> when(mock.create(property, value, delimiter)).thenReturn(param))) { OracleSessionQueryFactory oracleSessionQueryFactory = new OracleSessionQueryFactory(); String result = oracleSessionQueryFactory.create(property, value); assertEquals(expectedResult, result); @@ -52,7 +53,6 @@ void createParallelSessionQueryWithForce() { @Test @SuppressWarnings("try") void createParallelSessionQueryWithEnable() { - String property = "jdbc.session.property.alter_session_parallel.1"; String value = "enable.dml.2"; String expectedResult = "ALTER SESSION ENABLE PARALLEL DDL"; @@ -62,7 +62,7 @@ void createParallelSessionQueryWithEnable() { param.setDegreeOfParallelism("2"); try (MockedConstruction mocked = mockConstruction(OracleParallelSessionParamFactory.class, - (mock, context) -> when(mock.create(property, value)).thenReturn(param))) { + (mock, context) -> when(mock.create(property, value, delimiter)).thenReturn(param))) { OracleSessionQueryFactory oracleSessionQueryFactory = new OracleSessionQueryFactory(); String result = oracleSessionQueryFactory.create(property, value); assertEquals(expectedResult, result); @@ -72,7 +72,6 @@ void createParallelSessionQueryWithEnable() { @Test @SuppressWarnings("try") void createParallelSessionQueryWithDisable() { - String property = "jdbc.session.property.alter_session_parallel.1"; String value = "disable.dml"; String expectedResult = "ALTER SESSION DISABLE PARALLEL DML"; @@ -82,7 +81,7 @@ void createParallelSessionQueryWithDisable() { param.setDegreeOfParallelism(""); try (MockedConstruction mocked = mockConstruction(OracleParallelSessionParamFactory.class, - (mock, context) -> when(mock.create(property, value)).thenReturn(param))) { + (mock, context) -> when(mock.create(property, value, delimiter)).thenReturn(param))) { OracleSessionQueryFactory oracleSessionQueryFactory = new OracleSessionQueryFactory(); String result = oracleSessionQueryFactory.create(property, value); assertEquals(expectedResult, result); @@ -102,10 +101,10 @@ void createNotParallelSessionQuery() { param.setDegreeOfParallelism("2"); try (MockedConstruction mocked = mockConstruction(OracleParallelSessionParamFactory.class, - (mock, context) -> when(mock.create(property, value)).thenReturn(param))) { + (mock, context) -> when(mock.create(property, value, delimiter)).thenReturn(param))) { OracleSessionQueryFactory oracleSessionQueryFactory = new OracleSessionQueryFactory(); String result = oracleSessionQueryFactory.create(property, value); assertEquals(expectedResult, result); } } -} \ No newline at end of file +} From 45090ee8922759d835dba2eeee10aa010566021a Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Mon, 10 Oct 2022 18:24:23 +0300 Subject: [PATCH 24/53] ADBDEV-3096: Change getParallelSessionParam function argument --- .../utils/oracle/OracleParallelSessionParamFactory.java | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java index 8c04bdc608..1ab24736f9 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java @@ -9,7 +9,7 @@ public class OracleParallelSessionParamFactory { public OracleParallelSessionParam create(String property, String value, String delimiter) { validateValue(property, value, delimiter); - HashMap map = getParallelSessionParam(value, delimiter); + HashMap map = getParallelSessionParam(value.split(delimiter)); String clause = map.get("clause").toUpperCase(); String statementType = map.get("statement_type").toUpperCase(); String degreeOfParallelism = map.get("degree_of_parallelism"); @@ -27,8 +27,8 @@ private void validateValue(String property, String value, String delimiter) { "The parameter '%s' is empty in jdbc-site.xml", property) ); } - if (value.split(delimiter).length < 2 - || value.split(delimiter).length > 3) { + String[] values = value.split(delimiter); + if (values.length < 2 || values.length > 3) { throw new IllegalArgumentException(String.format( "The parameter '%s' in jdbc-site.xml has to contain at least 2 but not more then 3 values delimited by %s", property, delimiter) @@ -36,9 +36,8 @@ private void validateValue(String property, String value, String delimiter) { } } - private HashMap getParallelSessionParam(String value, String delimiter) { + private HashMap getParallelSessionParam(String[] values) { HashMap params = new HashMap<>(); - String[] values = value.split(delimiter); params.put("clause", values[0]); params.put("statement_type", values[1]); if (values.length == 3 && Strings.isNotBlank(values[2])) { From 208e91cdfe96985e5d0a91623b385ab2ad4e4dc7 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Tue, 11 Oct 2022 12:35:50 +0300 Subject: [PATCH 25/53] ADBDEV-3096: Add function to split value --- .../OracleParallelSessionParamFactory.java | 19 +++++++++++-------- ...OracleParallelSessionParamFactoryTest.java | 2 +- 2 files changed, 12 insertions(+), 9 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java index 1ab24736f9..38285539f9 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactory.java @@ -7,9 +7,9 @@ public class OracleParallelSessionParamFactory { public OracleParallelSessionParam create(String property, String value, String delimiter) { - validateValue(property, value, delimiter); + String[] values = splitValue(property, value, delimiter); - HashMap map = getParallelSessionParam(value.split(delimiter)); + HashMap map = getParallelSessionParam(values); String clause = map.get("clause").toUpperCase(); String statementType = map.get("statement_type").toUpperCase(); String degreeOfParallelism = map.get("degree_of_parallelism"); @@ -21,12 +21,8 @@ public OracleParallelSessionParam create(String property, String value, String d return param; } - private void validateValue(String property, String value, String delimiter) { - if (StringUtils.isBlank(value)) { - throw new IllegalArgumentException(String.format( - "The parameter '%s' is empty in jdbc-site.xml", property) - ); - } + private String[] splitValue(String property, String value, String delimiter) { + validateValue(property, value); String[] values = value.split(delimiter); if (values.length < 2 || values.length > 3) { throw new IllegalArgumentException(String.format( @@ -34,6 +30,13 @@ private void validateValue(String property, String value, String delimiter) { property, delimiter) ); } + return values; + } + + private void validateValue(String property, String value) { + if (StringUtils.isBlank(value)) { + throw new IllegalArgumentException(String.format("The parameter '%s' is blank in jdbc-site.xml", property)); + } } private HashMap getParallelSessionParam(String[] values) { diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java index 31faf25479..840d56dd7d 100644 --- a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/utils/oracle/OracleParallelSessionParamFactoryTest.java @@ -103,7 +103,7 @@ void createWithBlankValue() { String value = " "; Exception exception = assertThrows(IllegalArgumentException.class, () -> oracleParallelSessionParamFactory.create(property, value, delimiter)); - String expectedMessage = "The parameter 'jdbc.session.property.alter_session_parallel.1' is empty in jdbc-site.xml"; + String expectedMessage = "The parameter 'jdbc.session.property.alter_session_parallel.1' is blank in jdbc-site.xml"; String actualMessage = exception.getMessage(); assertEquals(expectedMessage, actualMessage); } From 2f3d06460b2d25f9f68351a088c66e3ab0d8ef06 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Mon, 17 Oct 2022 10:43:05 +0300 Subject: [PATCH 26/53] ADBDEV-3104: Add close connection if something wrong in closeForRead() method --- .../pxf/plugins/jdbc/JdbcAccessor.java | 22 +++++++++++-------- .../pxf/plugins/jdbc/JdbcBasePlugin.java | 2 +- .../pxf/plugins/jdbc/JdbcAccessorTest.java | 2 +- 3 files changed, 15 insertions(+), 11 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessor.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessor.java index 8ae3c60b34..5c57054be2 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessor.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessor.java @@ -22,7 +22,6 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.greenplum.pxf.api.OneRow; -import org.greenplum.pxf.api.error.PxfRuntimeException; import org.greenplum.pxf.api.model.Accessor; import org.greenplum.pxf.api.model.ConfigurationFactory; import org.greenplum.pxf.api.security.SecureLogin; @@ -36,12 +35,7 @@ import java.io.File; import java.io.IOException; import java.nio.charset.Charset; -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.SQLTimeoutException; -import java.sql.Statement; +import java.sql.*; import java.util.LinkedList; import java.util.List; import java.util.concurrent.ExecutorService; @@ -101,8 +95,18 @@ public boolean openForRead() throws SQLException, SQLTimeoutException { if (statementRead != null && !statementRead.isClosed()) { return true; } - Connection connection = super.getConnection(); + try { + return openForReadInner(connection); + } catch (Throwable e) { + if (statementRead == null) { + JdbcBasePlugin.closeConnection(connection); + } + throw new RuntimeException(e.getMessage(), e); + } + } + + private boolean openForReadInner(Connection connection) throws SQLException { SQLQueryBuilder sqlQueryBuilder = new SQLQueryBuilder(context, connection.getMetaData(), getQueryText()); // Build SELECT query @@ -230,7 +234,7 @@ public boolean openForWrite() throws SQLException, SQLTimeoutException { return true; } - /** + /** * writeNextObject() implementation *

* If batchSize is not 0 or 1, add a tuple to the batch of statementWrite diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java index a144d6e568..eda17b4950 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java @@ -488,7 +488,7 @@ private Connection getConnectionInternal() throws Exception { * @param connection connection to close * @throws SQLException throws when a SQLException occurs */ - private static void closeConnection(Connection connection) throws SQLException { + static void closeConnection(Connection connection) throws SQLException { if (connection == null) { LOG.warn("Call to close connection is ignored as connection provided was null"); return; diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessorTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessorTest.java index db41b3179f..640e0c32c6 100644 --- a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessorTest.java +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessorTest.java @@ -82,7 +82,7 @@ public void testReadFromQueryFailsWhenServerDirectoryIsNotSpecified() throws SQL context.setDataSource("query:foo"); accessor.setRequestContext(context); accessor.afterPropertiesSet(); - Exception e = assertThrows(IllegalStateException.class, + Exception e = assertThrows(RuntimeException.class, () -> accessor.openForRead()); assertEquals("No server configuration directory found for server unknown", e.getMessage()); } From eb4c599e9c4b8a033bdfe5ec0c96e48e747d3ab5 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Wed, 9 Nov 2022 14:09:16 +0200 Subject: [PATCH 27/53] ADBDEV-3209: Add possibility to decrypt an encrypted password --- server/build.gradle | 28 ++++++++++++++++- server/pxf-api/build.gradle | 1 + .../PxfJksTextEncryptorConfiguration.java | 30 +++++++++++++++++++ server/pxf-jdbc/build.gradle | 1 + .../pxf/plugins/jdbc/JdbcBasePlugin.java | 27 +++++++++++++---- .../jdbc/utils/JdbcDecryptService.java | 19 ++++++++++++ .../service/spring/PxfJksTextEncryptor.java | 16 ++++++++++ 7 files changed, 115 insertions(+), 7 deletions(-) create mode 100644 server/pxf-api/src/main/java/org/greenplum/pxf/api/configuration/PxfJksTextEncryptorConfiguration.java create mode 100644 server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/JdbcDecryptService.java create mode 100644 server/pxf-service/src/main/java/org/greenplum/pxf/service/spring/PxfJksTextEncryptor.java diff --git a/server/build.gradle b/server/build.gradle index 8fa9ec8aee..e7589abacb 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -20,6 +20,18 @@ buildscript { repositories { mavenCentral() + maven { + url "https://rt.adsw.io/artifactory/maven-arenadata-release/" + mavenContent { + releasesOnly() + } + } + maven { + url "https://rt.adsw.io/artifactory/maven-arenadata-snapshot/" + mavenContent { + snapshotsOnly() + } + } } } @@ -40,6 +52,18 @@ allprojects { repositories { mavenCentral() + maven { + url "https://rt.adsw.io/artifactory/maven-arenadata-release/" + mavenContent { + releasesOnly() + } + } + maven { + url "https://rt.adsw.io/artifactory/maven-arenadata-snapshot/" + mavenContent { + snapshotsOnly() + } + } } } @@ -114,6 +138,9 @@ configure(javaProjects) { dependency("org.wildfly.openssl:wildfly-openssl:1.0.7.Final") dependency("org.xerial.snappy:snappy-java:1.1.8.4") + // Arenadata encryption + dependency("io.arenadata.security:encryption:1.0.0-SNAPSHOT") + // Hadoop dependencies dependencySet(group:"org.apache.hadoop", version:"${hadoopVersion}") { entry("hadoop-annotations") @@ -199,7 +226,6 @@ configure(javaProjects) { entry("aws-java-sdk-kms") entry("aws-java-sdk-s3") } - } } diff --git a/server/pxf-api/build.gradle b/server/pxf-api/build.gradle index 7809b8d96f..610dccfb43 100644 --- a/server/pxf-api/build.gradle +++ b/server/pxf-api/build.gradle @@ -39,6 +39,7 @@ dependencies { implementation("commons-configuration:commons-configuration") implementation("commons-lang:commons-lang") implementation("org.apache.commons:commons-lang3") + implementation("io.arenadata.security:encryption") implementation("org.apache.hadoop:hadoop-auth") { transitive = false } implementation("org.codehaus.woodstox:stax2-api") { transitive = false } diff --git a/server/pxf-api/src/main/java/org/greenplum/pxf/api/configuration/PxfJksTextEncryptorConfiguration.java b/server/pxf-api/src/main/java/org/greenplum/pxf/api/configuration/PxfJksTextEncryptorConfiguration.java new file mode 100644 index 0000000000..1627312d24 --- /dev/null +++ b/server/pxf-api/src/main/java/org/greenplum/pxf/api/configuration/PxfJksTextEncryptorConfiguration.java @@ -0,0 +1,30 @@ +package org.greenplum.pxf.api.configuration; + +import io.arenadata.security.encryption.client.configuration.JksTextEncryptorConfiguration; +import org.springframework.beans.factory.annotation.Value; + +public class PxfJksTextEncryptorConfiguration extends JksTextEncryptorConfiguration { + @Value("${pxf.ssl.jks-store.path}") + private String path; + + @Value("${pxf.ssl.jks-store.password}") + private String password; + + @Value("${pxf.ssl.salt.key}") + private String key; + + @Override + protected String jksStorePath() { + return path; + } + + @Override + protected char[] jksStorePassword() { + return password.toCharArray(); + } + + @Override + protected String secretKeyAlias() { + return key; + } +} diff --git a/server/pxf-jdbc/build.gradle b/server/pxf-jdbc/build.gradle index 0652609128..268987435e 100644 --- a/server/pxf-jdbc/build.gradle +++ b/server/pxf-jdbc/build.gradle @@ -39,6 +39,7 @@ dependencies { implementation("org.apache.hive.shims:hive-shims-0.23") { transitive = false } implementation("org.apache.hive.shims:hive-shims-common") { transitive = false } implementation("org.springframework.boot:spring-boot-starter-log4j2") + implementation("io.arenadata.security:encryption") /******************************* * Test Dependencies diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java index eda17b4950..86e945573e 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java @@ -19,8 +19,11 @@ * under the License. */ +import io.arenadata.security.encryption.model.EncryptorType; +import io.arenadata.security.encryption.util.Util; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; +import org.greenplum.pxf.api.configuration.PxfJksTextEncryptorConfiguration; import org.greenplum.pxf.api.model.BasePlugin; import org.greenplum.pxf.api.model.RequestContext; import org.greenplum.pxf.api.security.SecureLogin; @@ -30,8 +33,10 @@ import org.greenplum.pxf.plugins.jdbc.utils.ConnectionManager; import org.greenplum.pxf.plugins.jdbc.utils.DbProduct; import org.greenplum.pxf.plugins.jdbc.utils.HiveJdbcUtils; +import org.greenplum.pxf.plugins.jdbc.utils.JdbcDecryptService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.NoSuchBeanDefinitionException; import java.security.PrivilegedExceptionAction; import java.sql.Connection; @@ -39,10 +44,7 @@ import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Statement; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.Properties; +import java.util.*; import java.util.stream.Collectors; import static org.greenplum.pxf.api.security.SecureLogin.CONFIG_KEY_SERVICE_USER_IMPERSONATION; @@ -335,11 +337,24 @@ public void afterPropertiesSet() { ); } - // This must be the last parameter parsed, as we output connectionConfiguration earlier - // Optional parameter. By default, corresponding connectionConfiguration property is not set if (jdbcUser != null) { String jdbcPassword = configuration.get(JDBC_PASSWORD_PROPERTY_NAME); if (jdbcPassword != null) { + if (jdbcPassword.startsWith(Util.getEncryptedMessagePrefix(EncryptorType.AES256))) { + try { + PxfJksTextEncryptorConfiguration pxfJksTextEncryptor = SpringContext.getBean(PxfJksTextEncryptorConfiguration.class); + JdbcDecryptService jdbcDecryptService = new JdbcDecryptService(pxfJksTextEncryptor); + jdbcPassword = jdbcDecryptService.decrypt(jdbcPassword); + } catch (NoSuchBeanDefinitionException e) { + throw new IllegalArgumentException( + "Jdbc password is encrypted, but it is not possible to get encryption key. " + + "Check that encryption configuration properties with prefix 'pxf.ssl.*' " + + "are present in the pxf-application.properties file."); + } catch (Exception e) { + throw new RuntimeException( + "Jdbc password is encrypted, but the encryption key is not available." + e.getMessage(), e); + } + } LOG.debug("Connection password: {}", ConnectionManager.maskPassword(jdbcPassword)); connectionConfiguration.setProperty("password", jdbcPassword); } diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/JdbcDecryptService.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/JdbcDecryptService.java new file mode 100644 index 0000000000..5f64a6c15a --- /dev/null +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/JdbcDecryptService.java @@ -0,0 +1,19 @@ +package org.greenplum.pxf.plugins.jdbc.utils; + +import io.arenadata.security.encryption.client.provider.TextEncryptorProvider; +import io.arenadata.security.encryption.client.service.DecryptClient; +import org.greenplum.pxf.api.configuration.PxfJksTextEncryptorConfiguration; + +public class JdbcDecryptService { + private final PxfJksTextEncryptorConfiguration configuration; + + public JdbcDecryptService(PxfJksTextEncryptorConfiguration configuration) { + this.configuration = configuration; + } + + public String decrypt(String encryptedPassword) { + TextEncryptorProvider provider = configuration.textEncryptorProvider(); + DecryptClient decryptClient = configuration.decryptService(provider); + return decryptClient.decrypt(encryptedPassword); + } +} diff --git a/server/pxf-service/src/main/java/org/greenplum/pxf/service/spring/PxfJksTextEncryptor.java b/server/pxf-service/src/main/java/org/greenplum/pxf/service/spring/PxfJksTextEncryptor.java new file mode 100644 index 0000000000..595e3a0e87 --- /dev/null +++ b/server/pxf-service/src/main/java/org/greenplum/pxf/service/spring/PxfJksTextEncryptor.java @@ -0,0 +1,16 @@ +package org.greenplum.pxf.service.spring; + +import org.greenplum.pxf.api.configuration.PxfJksTextEncryptorConfiguration; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +@Configuration +public class PxfJksTextEncryptor { + + @Bean + @ConditionalOnProperty({"pxf.ssl.jks-store.path", "pxf.ssl.jks-store.password", "pxf.ssl.salt.key"}) + PxfJksTextEncryptorConfiguration pxfJksTextEncryptorConfiguration() { + return new PxfJksTextEncryptorConfiguration(); + } +} From 7ba81c8e3476c5f1997e84d1f24627c9eb16939d Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Thu, 10 Nov 2022 01:11:56 +0200 Subject: [PATCH 28/53] ADBDEV-3209: Fixed PR notes --- .../PxfJksTextEncryptorConfiguration.java | 21 +++++++---- .../pxf/api/utilities/SpringContext.java | 8 +++++ .../pxf/plugins/jdbc/JdbcAccessor.java | 5 +-- .../pxf/plugins/jdbc/JdbcBasePlugin.java | 35 +++++++------------ .../jdbc/utils/JdbcDecryptService.java | 19 ---------- .../pxf/plugins/jdbc/JdbcAccessorTest.java | 5 ++- .../pxf/plugins/jdbc/JdbcBasePluginTest.java | 11 +++--- .../jdbc/JdbcBasePluginTestInitialize.java | 5 ++- .../service/spring/PxfJksTextEncryptor.java | 16 --------- 9 files changed, 53 insertions(+), 72 deletions(-) delete mode 100644 server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/JdbcDecryptService.java delete mode 100644 server/pxf-service/src/main/java/org/greenplum/pxf/service/spring/PxfJksTextEncryptor.java diff --git a/server/pxf-api/src/main/java/org/greenplum/pxf/api/configuration/PxfJksTextEncryptorConfiguration.java b/server/pxf-api/src/main/java/org/greenplum/pxf/api/configuration/PxfJksTextEncryptorConfiguration.java index 1627312d24..535504e70b 100644 --- a/server/pxf-api/src/main/java/org/greenplum/pxf/api/configuration/PxfJksTextEncryptorConfiguration.java +++ b/server/pxf-api/src/main/java/org/greenplum/pxf/api/configuration/PxfJksTextEncryptorConfiguration.java @@ -2,16 +2,23 @@ import io.arenadata.security.encryption.client.configuration.JksTextEncryptorConfiguration; import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; +import org.springframework.context.annotation.Configuration; +@Configuration +@ConditionalOnProperty({"pxf.ssl.jks-store.path", "pxf.ssl.jks-store.password", "pxf.ssl.salt.key"}) public class PxfJksTextEncryptorConfiguration extends JksTextEncryptorConfiguration { - @Value("${pxf.ssl.jks-store.path}") - private String path; + private final String path; + private final String password; + private final String key; - @Value("${pxf.ssl.jks-store.password}") - private String password; - - @Value("${pxf.ssl.salt.key}") - private String key; + public PxfJksTextEncryptorConfiguration(@Value("${pxf.ssl.jks-store.path}") String path, + @Value("${pxf.ssl.jks-store.password}") String password, + @Value("${pxf.ssl.salt.key}") String key) { + this.path = path; + this.password = password; + this.key = key; + } @Override protected String jksStorePath() { diff --git a/server/pxf-api/src/main/java/org/greenplum/pxf/api/utilities/SpringContext.java b/server/pxf-api/src/main/java/org/greenplum/pxf/api/utilities/SpringContext.java index bd28a417b1..3b45216f7e 100644 --- a/server/pxf-api/src/main/java/org/greenplum/pxf/api/utilities/SpringContext.java +++ b/server/pxf-api/src/main/java/org/greenplum/pxf/api/utilities/SpringContext.java @@ -26,6 +26,14 @@ public static T getBean(Class requiredType) { return context.getBean(requiredType); } + public static T getNullableBean(Class requiredType) { + try { + return context.getBean(requiredType); + } catch (Exception e) { + return null; + } + } + @Override public void setApplicationContext(ApplicationContext context) throws BeansException { diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessor.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessor.java index 5c57054be2..4a7d562dd6 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessor.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessor.java @@ -19,6 +19,7 @@ * under the License. */ +import io.arenadata.security.encryption.client.service.DecryptClient; import org.apache.commons.io.FileUtils; import org.apache.commons.lang.StringUtils; import org.greenplum.pxf.api.OneRow; @@ -78,8 +79,8 @@ public JdbcAccessor() { * @param connectionManager connection manager * @param secureLogin the instance of the secure login */ - JdbcAccessor(ConnectionManager connectionManager, SecureLogin secureLogin) { - super(connectionManager, secureLogin); + JdbcAccessor(ConnectionManager connectionManager, SecureLogin secureLogin, DecryptClient decryptClient) { + super(connectionManager, secureLogin, decryptClient); } /** diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java index 86e945573e..65c0048e58 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java @@ -19,11 +19,9 @@ * under the License. */ -import io.arenadata.security.encryption.model.EncryptorType; -import io.arenadata.security.encryption.util.Util; +import io.arenadata.security.encryption.client.service.DecryptClient; import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; -import org.greenplum.pxf.api.configuration.PxfJksTextEncryptorConfiguration; import org.greenplum.pxf.api.model.BasePlugin; import org.greenplum.pxf.api.model.RequestContext; import org.greenplum.pxf.api.security.SecureLogin; @@ -33,10 +31,8 @@ import org.greenplum.pxf.plugins.jdbc.utils.ConnectionManager; import org.greenplum.pxf.plugins.jdbc.utils.DbProduct; import org.greenplum.pxf.plugins.jdbc.utils.HiveJdbcUtils; -import org.greenplum.pxf.plugins.jdbc.utils.JdbcDecryptService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.NoSuchBeanDefinitionException; import java.security.PrivilegedExceptionAction; import java.sql.Connection; @@ -168,6 +164,7 @@ public static TransactionIsolation typeOf(String str) { private final ConnectionManager connectionManager; private final SecureLogin secureLogin; + private final DecryptClient decryptClient; static { // Deprecated as of Oct 22, 2019 in version 5.9.2+ @@ -178,10 +175,12 @@ public static TransactionIsolation typeOf(String str) { /** * Creates a new instance with default (singleton) instances of - * ConnectionManager and SecureLogin. + * ConnectionManager, SecureLogin and DecryptClient. */ JdbcBasePlugin() { - this(SpringContext.getBean(ConnectionManager.class), SpringContext.getBean(SecureLogin.class)); + this(SpringContext.getBean(ConnectionManager.class), SpringContext.getBean(SecureLogin.class), + SpringContext.getNullableBean(DecryptClient.class) + ); } /** @@ -189,9 +188,10 @@ public static TransactionIsolation typeOf(String str) { * * @param connectionManager connection manager instance */ - JdbcBasePlugin(ConnectionManager connectionManager, SecureLogin secureLogin) { + JdbcBasePlugin(ConnectionManager connectionManager, SecureLogin secureLogin, DecryptClient decryptClient) { this.connectionManager = connectionManager; this.secureLogin = secureLogin; + this.decryptClient = decryptClient; } @Override @@ -340,20 +340,11 @@ public void afterPropertiesSet() { if (jdbcUser != null) { String jdbcPassword = configuration.get(JDBC_PASSWORD_PROPERTY_NAME); if (jdbcPassword != null) { - if (jdbcPassword.startsWith(Util.getEncryptedMessagePrefix(EncryptorType.AES256))) { - try { - PxfJksTextEncryptorConfiguration pxfJksTextEncryptor = SpringContext.getBean(PxfJksTextEncryptorConfiguration.class); - JdbcDecryptService jdbcDecryptService = new JdbcDecryptService(pxfJksTextEncryptor); - jdbcPassword = jdbcDecryptService.decrypt(jdbcPassword); - } catch (NoSuchBeanDefinitionException e) { - throw new IllegalArgumentException( - "Jdbc password is encrypted, but it is not possible to get encryption key. " + - "Check that encryption configuration properties with prefix 'pxf.ssl.*' " + - "are present in the pxf-application.properties file."); - } catch (Exception e) { - throw new RuntimeException( - "Jdbc password is encrypted, but the encryption key is not available." + e.getMessage(), e); - } + try { + jdbcPassword = decryptClient == null ? jdbcPassword : decryptClient.decrypt(jdbcPassword); + } catch (Exception e) { + throw new RuntimeException( + "Failed to decrypt jdbc password. " + e.getMessage(), e); } LOG.debug("Connection password: {}", ConnectionManager.maskPassword(jdbcPassword)); connectionConfiguration.setProperty("password", jdbcPassword); diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/JdbcDecryptService.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/JdbcDecryptService.java deleted file mode 100644 index 5f64a6c15a..0000000000 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/utils/JdbcDecryptService.java +++ /dev/null @@ -1,19 +0,0 @@ -package org.greenplum.pxf.plugins.jdbc.utils; - -import io.arenadata.security.encryption.client.provider.TextEncryptorProvider; -import io.arenadata.security.encryption.client.service.DecryptClient; -import org.greenplum.pxf.api.configuration.PxfJksTextEncryptorConfiguration; - -public class JdbcDecryptService { - private final PxfJksTextEncryptorConfiguration configuration; - - public JdbcDecryptService(PxfJksTextEncryptorConfiguration configuration) { - this.configuration = configuration; - } - - public String decrypt(String encryptedPassword) { - TextEncryptorProvider provider = configuration.textEncryptorProvider(); - DecryptClient decryptClient = configuration.decryptService(provider); - return decryptClient.decrypt(encryptedPassword); - } -} diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessorTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessorTest.java index 640e0c32c6..0a271d8b8c 100644 --- a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessorTest.java +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcAccessorTest.java @@ -1,5 +1,6 @@ package org.greenplum.pxf.plugins.jdbc; +import io.arenadata.security.encryption.client.service.DecryptClient; import org.apache.hadoop.conf.Configuration; import org.greenplum.pxf.api.model.RequestContext; import org.greenplum.pxf.api.security.SecureLogin; @@ -49,11 +50,13 @@ public class JdbcAccessorTest { private PreparedStatement mockPreparedStatement; @Mock private ResultSet mockResultSet; + @Mock + private DecryptClient mockDecryptClient; @BeforeEach public void setup() { - accessor = new JdbcAccessor(mockConnectionManager, mockSecureLogin); + accessor = new JdbcAccessor(mockConnectionManager, mockSecureLogin, mockDecryptClient); configuration = new Configuration(); context = new RequestContext(); context.setConfig("default"); diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePluginTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePluginTest.java index a1ed1c0347..dd405b12bb 100644 --- a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePluginTest.java +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePluginTest.java @@ -19,6 +19,7 @@ * under the License. */ +import io.arenadata.security.encryption.client.service.DecryptClient; import org.apache.hadoop.conf.Configuration; import org.greenplum.pxf.api.model.RequestContext; import org.greenplum.pxf.api.security.SecureLogin; @@ -64,6 +65,8 @@ public class JdbcBasePluginTest { private PreparedStatement mockStatement; @Mock private SecureLogin mockSecureLogin; + @Mock + private DecryptClient mockDecryptClient; private final SQLException exception = new SQLException("some error"); private Configuration configuration; @@ -238,7 +241,7 @@ public void testTransactionIsolationNotSetByUser() throws SQLException { when(mockConnectionManager.getConnection(any(), any(), any(), anyBoolean(), any(), any())).thenReturn(mockConnection); when(mockConnection.getMetaData()).thenReturn(mockMetaData); - JdbcBasePlugin plugin = new JdbcBasePlugin(mockConnectionManager, mockSecureLogin); + JdbcBasePlugin plugin = new JdbcBasePlugin(mockConnectionManager, mockSecureLogin, mockDecryptClient); plugin.setRequestContext(context); Connection conn = plugin.getConnection(); @@ -297,7 +300,7 @@ public void testTransactionIsolationSetByUserFailedToGetMetadata() throws SQLExc when(mockConnectionManager.getConnection(anyString(), anyString(), any(), anyBoolean(), any(), anyString())).thenReturn(mockConnection); doThrow(new SQLException("")).when(mockConnection).getMetaData(); - JdbcBasePlugin plugin = new JdbcBasePlugin(mockConnectionManager, mockSecureLogin); + JdbcBasePlugin plugin = new JdbcBasePlugin(mockConnectionManager, mockSecureLogin, mockDecryptClient); plugin.setRequestContext(context); assertThrows(SQLException.class, plugin::getConnection); } @@ -323,7 +326,7 @@ public void testGetPreparedStatementDoesNotSetQueryTimeoutIfNotSpecified() throw when(mockConnection.prepareStatement(anyString())).thenReturn(mockStatement); - JdbcBasePlugin plugin = new JdbcBasePlugin(mockConnectionManager, mockSecureLogin); + JdbcBasePlugin plugin = new JdbcBasePlugin(mockConnectionManager, mockSecureLogin, mockDecryptClient); plugin.setRequestContext(context); plugin.getPreparedStatement(mockConnection, "foo"); @@ -485,7 +488,7 @@ public void testGetConnectionConnPropsPoolDisabledPoolProps() throws SQLExceptio } private JdbcBasePlugin getPlugin(ConnectionManager mockConnectionManager, SecureLogin mockSecureLogin, RequestContext context) { - JdbcBasePlugin plugin = new JdbcBasePlugin(mockConnectionManager, mockSecureLogin); + JdbcBasePlugin plugin = new JdbcBasePlugin(mockConnectionManager, mockSecureLogin, mockDecryptClient); plugin.setRequestContext(context); plugin.afterPropertiesSet(); return plugin; diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePluginTestInitialize.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePluginTestInitialize.java index 6b0cdac06d..185cc8bbe8 100644 --- a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePluginTestInitialize.java +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePluginTestInitialize.java @@ -20,6 +20,8 @@ */ import com.google.common.base.Ticker; +import io.arenadata.security.encryption.client.provider.TextEncryptorProvider; +import io.arenadata.security.encryption.client.service.impl.DecryptClientImpl; import org.apache.commons.collections.CollectionUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.security.PxfUserGroupInformation; @@ -89,7 +91,8 @@ public void setup() { ); PxfUserGroupInformation mockPxfUserGroupInformation = mock(PxfUserGroupInformation.class); - plugin = new JdbcBasePlugin(connectionManager, new SecureLogin(mockPxfUserGroupInformation)); + TextEncryptorProvider mockTextEncryptorProvider = mock(TextEncryptorProvider.class); + plugin = new JdbcBasePlugin(connectionManager, new SecureLogin(mockPxfUserGroupInformation), new DecryptClientImpl(mockTextEncryptorProvider)); } /** diff --git a/server/pxf-service/src/main/java/org/greenplum/pxf/service/spring/PxfJksTextEncryptor.java b/server/pxf-service/src/main/java/org/greenplum/pxf/service/spring/PxfJksTextEncryptor.java deleted file mode 100644 index 595e3a0e87..0000000000 --- a/server/pxf-service/src/main/java/org/greenplum/pxf/service/spring/PxfJksTextEncryptor.java +++ /dev/null @@ -1,16 +0,0 @@ -package org.greenplum.pxf.service.spring; - -import org.greenplum.pxf.api.configuration.PxfJksTextEncryptorConfiguration; -import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -@Configuration -public class PxfJksTextEncryptor { - - @Bean - @ConditionalOnProperty({"pxf.ssl.jks-store.path", "pxf.ssl.jks-store.password", "pxf.ssl.salt.key"}) - PxfJksTextEncryptorConfiguration pxfJksTextEncryptorConfiguration() { - return new PxfJksTextEncryptorConfiguration(); - } -} From 55990272c47c8f663e9cd3e9dc2928db580716d0 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Thu, 10 Nov 2022 13:22:04 +0200 Subject: [PATCH 29/53] ADBDEV-3208: Add pxf command to encrypt password Before use this future we must put execution jar-file of our encryption library into the $PXF_BASE/lib directory. --- server/pxf-service/src/scripts/pxf | 33 ++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/server/pxf-service/src/scripts/pxf b/server/pxf-service/src/scripts/pxf index e011eeec67..e76660f1b5 100755 --- a/server/pxf-service/src/scripts/pxf +++ b/server/pxf-service/src/scripts/pxf @@ -227,6 +227,7 @@ function doHelp() It creates the servers, logs, lib, keytabs, and run directories inside \$PXF_BASE and copies configuration files. migrate migrates configurations from older installations of PXF + encrypt encrypt password with specified encryptor type. Default encryptor type is aes256 cluster perform on all the segment hosts in the cluster; try ${bold}pxf cluster help$normal sync synchronize \$PXF_BASE/{conf,lib,servers} directories onto . Use --delete to delete extraneous remote files @@ -492,6 +493,35 @@ function doSync() rsync -az${DELETE:+ --delete} -e "ssh -o StrictHostKeyChecking=no" "$PXF_BASE"/{conf,lib,servers} "${target_host}:$PXF_BASE" } +function doEncrypt() +{ + local pwd=$1 + local encryptorType=$2 + if [[ -z $pwd ]]; then + fail 'Please provide password you want to encrypt' + fi + if [[ -z $encryptorType ]]; then + encryptorType=aes256 + fi + conf_file="$PXF_BASE/conf/pxf-application.properties" + if [[ -z $conf_file ]]; then + fail "File 'pxf-application.propertie' was not found in PXF_BASE/conf/ directory" + fi + jarfile=$(find "$PXF_BASE/lib" -maxdepth 1 -type f -name 'encryption-*.jar') + if [[ -z $jarfile ]]; then + fail "Encryption library was not found in $PXF_BASE/lib/ directory" + fi + jksPath=$(getProperty 'pxf.ssl.jks-store.path' "$conf_file") + jksPassword=$(getProperty 'pxf.ssl.jks-store.password' "$conf_file") + jksEncryptKeyAlias=$(getProperty 'pxf.ssl.salt.key' "$conf_file") + checkJavaHome + java -jar $jarfile -command encrypt -jksPath $jksPath -jksPassword $jksPassword -jksEncryptKeyAlias $jksEncryptKeyAlias -message $pwd -encryptorType $encryptorType +} + +function getProperty { + grep "^${1}" ${2} | cut -d'=' -f2 +} + function doCluster() { local cmd=$2 @@ -545,6 +575,9 @@ case $pxf_script_command in doSync "$2" fi ;; + 'encrypt') + doEncrypt "$2" "$3" + ;; 'help' | '-h' | '--help') doHelp ;; From 4379e61d86a0a52d0104a274982bf2bb3b7120fd Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Thu, 10 Nov 2022 13:38:02 +0200 Subject: [PATCH 30/53] ADBDEV-3208: Fix typo --- server/pxf-service/src/scripts/pxf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/pxf-service/src/scripts/pxf b/server/pxf-service/src/scripts/pxf index e76660f1b5..08ac961eac 100755 --- a/server/pxf-service/src/scripts/pxf +++ b/server/pxf-service/src/scripts/pxf @@ -505,7 +505,7 @@ function doEncrypt() fi conf_file="$PXF_BASE/conf/pxf-application.properties" if [[ -z $conf_file ]]; then - fail "File 'pxf-application.propertie' was not found in PXF_BASE/conf/ directory" + fail "File 'pxf-application.properties' was not found in PXF_BASE/conf/ directory" fi jarfile=$(find "$PXF_BASE/lib" -maxdepth 1 -type f -name 'encryption-*.jar') if [[ -z $jarfile ]]; then From 9d29dd44fa3ccf236e363090a2881b14d2613560 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Thu, 10 Nov 2022 16:35:28 +0200 Subject: [PATCH 31/53] ADBDEV-3208: Change variable name --- server/pxf-service/src/scripts/pxf | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/server/pxf-service/src/scripts/pxf b/server/pxf-service/src/scripts/pxf index 08ac961eac..775166f173 100755 --- a/server/pxf-service/src/scripts/pxf +++ b/server/pxf-service/src/scripts/pxf @@ -507,15 +507,15 @@ function doEncrypt() if [[ -z $conf_file ]]; then fail "File 'pxf-application.properties' was not found in PXF_BASE/conf/ directory" fi - jarfile=$(find "$PXF_BASE/lib" -maxdepth 1 -type f -name 'encryption-*.jar') - if [[ -z $jarfile ]]; then + encr_jar_file=$(find "$PXF_BASE/lib" -maxdepth 1 -type f -name 'encryption-*.jar') + if [[ -z $encr_jar_file ]]; then fail "Encryption library was not found in $PXF_BASE/lib/ directory" fi jksPath=$(getProperty 'pxf.ssl.jks-store.path' "$conf_file") jksPassword=$(getProperty 'pxf.ssl.jks-store.password' "$conf_file") jksEncryptKeyAlias=$(getProperty 'pxf.ssl.salt.key' "$conf_file") checkJavaHome - java -jar $jarfile -command encrypt -jksPath $jksPath -jksPassword $jksPassword -jksEncryptKeyAlias $jksEncryptKeyAlias -message $pwd -encryptorType $encryptorType + java -jar $encr_jar_file -command encrypt -jksPath $jksPath -jksPassword $jksPassword -jksEncryptKeyAlias $jksEncryptKeyAlias -message $pwd -encryptorType $encryptorType } function getProperty { From de292f3f244f08b377bf5c2f57dc8e725a1417a8 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Fri, 11 Nov 2022 12:28:49 +0200 Subject: [PATCH 32/53] ADBDEV-3209: Update README --- server/pxf-jdbc/README.md | 105 +++++++++++++++++++++++++++++++++++++- 1 file changed, 104 insertions(+), 1 deletion(-) diff --git a/server/pxf-jdbc/README.md b/server/pxf-jdbc/README.md index e5e96b976e..9cd39c6d05 100644 --- a/server/pxf-jdbc/README.md +++ b/server/pxf-jdbc/README.md @@ -102,7 +102,8 @@ User name (login) to use to connect to external database. #### JDBC password -Password to use to connect to external database. +Password to use to connect to external database. The password might be encrypted. +How to use encrypted password is described in section [JDBC password encryption](#jdbc-password-encryption). * **Option**: `PASS` * **Configuration parameter**: `jdbc.password` @@ -650,3 +651,105 @@ Follow these steps to enable connectivity to Hive: If you enable impersonation, do not explicitly specify `hive.server2.proxy.user` property in the URL. - if Hive is configured with `hive.server2.enable.doAs = FALSE`, Hive will run Hadoop operations with the identity provided by the PXF Kerberos principal (usually `gpadmin`) + + +## JDBC password encryption +It is possible to use an encrypted password instead of the password in a paint text in the `$PXF_BASE/servers//jdbc-site.xml` file. + +### How to enable encryption +Before using an encrypted password you have to **create keystore and add encryption key** to the store.\ +The keystore is a file where the encryption key will be saved. And the encryption key will be used to encrypt and decrypt password.\ +The keystore and the encryption key have to be created on each segment server. + +The command to create the keystore:\ +```keytool -keystore -storepass -genkey -keypass -alias ```, where\ +`keystore_file` - the file path of the keystore;\ +`keystore_password` - password which will be used to access the keystore;\ +`key_password` - password for the specific `keystore_alias`. It might be the same as `keystore_password`;\ +`keystore_alias` - name of the keystore. + +You will be asked to enter some information about your organization, first and last name, etc. after running the command. + +Example of the command to create a keystore:\ +`keytool -keystore /var/lib/pxf/conf/pxfkeystore.jks -storepass 12345678 -genkey -keypass 12345678 -alias pxfkeystore` + +The next step is to add encryption key.\ +The command to add encryption key to the keystore:\ +`keytool -keystore -storepass -importpass -keypass -alias `, where\ +`keystore_file` - the file path of the keystore that was created in the previous step;\ +`keystore_password` - password to access the keystore;\ +`key_password` - password for the specific `encryption_key_alias`. It might be the same as `keystore_password`;\ +`encryption_key_alias` - name of the encryption key. This name will be used to get encryption key from the keystore. + +You will be asked to enter an encryption key you want to store after running the command. + +Example of the command to create a keystore:\ +`keytool -keystore /var/lib/pxf/conf/pxfkeystore.jks -storepass 12345678 -importpass -keypass 12345678 -alias PXF_PASS_KEY`\ +*Enter the password to be stored:* qwerty + +Finally, additional properties have to be added into the `$PXF_BASE/conf/pxf-application.properties` file on each segment:\ +`pxf.ssl.jks-store.path` - a Java keystore (JKS) absolute file path. It is a `keystore_file` from the command to create the keystore;\ +`pxf.ssl.jks-store.password` - a Java keystore password. It is a `keystore_password` from the command to create the keystore;\ +`pxf.ssl.salt.key` - an alias which is used to get encryption key from the keystore. It is an `encryption_key_alias` from the command to add encryption key to the keystore. + +You have to restart PXF service after adding the properties. + +Example of the properties in the `pxf-application.properties` file: +``` +# Encryption +pxf.ssl.jks-store.path=/var/lib/pxf/conf/pxfkeystore.jks +pxf.ssl.jks-store.password=12345678 +pxf.ssl.salt.key=PXF_PASS_KEY +``` + +### How to use encryption +The first step is to encrypt password that is used to connect to the database.\ +There is a special command to do this action:\ +`pxf encrypt `, where\ +`` - password in a plain text that is used to connect to the database. This password will be encrypted;\ +`` - Optional. The algorithm to encrypt password. Default value: `aes256` + +The result of the command will be an encrypted password in a format `aes256:encrypted_password` + +Example of the command to encrypt password:\ +`pxf encrypt biuserpassword`\ +*Output:* aes256:7BhhI+10ut+xM70iRlyxVDD/tokap3pbK2bmkLgPOYLH7NcfEYJSAIYkApjKM3Zu + +Next, you have to copy the encrypted password including aes256 prefix and paste it into `$PXF_BASE/servers//jdbc-site.xml` file +instead of the password in a plain text. + +The example of the `jdbc-site.xml` with encrypted password: +```xml + + + + jdbc.driver + org.postgresql.Driver + Class name of the JDBC driver (e.g. org.postgresql.Driver) + + + jdbc.url + jdbc:postgresql://10.10.10.20/adb + The URL that the JDBC driver can use to connect to the database (e.g. jdbc:postgresql://localhost/postgres) + + + jdbc.user + bi_user + User name for connecting to the database (e.g. postgres) + + + jdbc.password + aes256:7BhhI+10ut+xM70iRlyxVDD/tokap3pbK2bmkLgPOYLH7NcfEYJSAIYkApjKM3Zu + Password for connecting to the database (e.g. postgres) + + +``` + +You don't need to make any additional changes when you crate an external table. The decryption engine will recognize whether the password is encrypted or not. +If the password is encrypted the decrypter will take care about the password. If the password is in plain text format it will be passed as is to the JDBC connection manager. + + + + + + From a73a6d5aea7b19a9025af14670038436f541275f Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Fri, 11 Nov 2022 12:58:39 +0200 Subject: [PATCH 33/53] ADBDEV-3209: Update README. Add Prerequisites --- server/pxf-jdbc/README.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/server/pxf-jdbc/README.md b/server/pxf-jdbc/README.md index 9cd39c6d05..a2958f6fdf 100644 --- a/server/pxf-jdbc/README.md +++ b/server/pxf-jdbc/README.md @@ -656,6 +656,10 @@ Follow these steps to enable connectivity to Hive: ## JDBC password encryption It is possible to use an encrypted password instead of the password in a paint text in the `$PXF_BASE/servers//jdbc-site.xml` file. +### Prerequisites +There is a special library that is used to encrypt and decrypt password. The executable jar-file of this library has to be copied to `$PXF_BASE/lib/` directory on each segment. +It is used to encrypt password. The original jar-file of the library is used to decrypt password. It is added as a dependency to the PXF project. + ### How to enable encryption Before using an encrypted password you have to **create keystore and add encryption key** to the store.\ The keystore is a file where the encryption key will be saved. And the encryption key will be used to encrypt and decrypt password.\ @@ -687,7 +691,7 @@ Example of the command to create a keystore:\ `keytool -keystore /var/lib/pxf/conf/pxfkeystore.jks -storepass 12345678 -importpass -keypass 12345678 -alias PXF_PASS_KEY`\ *Enter the password to be stored:* qwerty -Finally, additional properties have to be added into the `$PXF_BASE/conf/pxf-application.properties` file on each segment:\ +Next, additional properties have to be added into the `$PXF_BASE/conf/pxf-application.properties` file on each segment:\ `pxf.ssl.jks-store.path` - a Java keystore (JKS) absolute file path. It is a `keystore_file` from the command to create the keystore;\ `pxf.ssl.jks-store.password` - a Java keystore password. It is a `keystore_password` from the command to create the keystore;\ `pxf.ssl.salt.key` - an alias which is used to get encryption key from the keystore. It is an `encryption_key_alias` from the command to add encryption key to the keystore. From a6e5b6a65281082d55da53cda1b9b6a4a32da03c Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Mon, 14 Nov 2022 09:37:29 +0200 Subject: [PATCH 34/53] ADBDEV-3209: Change version of the encryption lib --- server/build.gradle | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/build.gradle b/server/build.gradle index e7589abacb..7060fa5f09 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -139,7 +139,7 @@ configure(javaProjects) { dependency("org.xerial.snappy:snappy-java:1.1.8.4") // Arenadata encryption - dependency("io.arenadata.security:encryption:1.0.0-SNAPSHOT") + dependency("io.arenadata.security:encryption:1.0.0") // Hadoop dependencies dependencySet(group:"org.apache.hadoop", version:"${hadoopVersion}") { From 93f41a7477d668dda01cb9baca0a39a53f6958a6 Mon Sep 17 00:00:00 2001 From: Georgy Shelkovy Date: Sat, 19 Nov 2022 01:36:39 +0500 Subject: [PATCH 35/53] ADBDEV-2976: Handle multi_perform curl errors immediately (#24) The internal buffer will be filled during the first pxfprotocol_import call. If there is chunk corruption, we fail here without trying to interpret the received message during check_response. But if a corrupted chunk is read during the next filling of the buffer, this chunk will be written in the internal buffer. Then if trailer CRLF sequence is not faced, CHUNKE_BAD_CHUNK (with code 3) will be returned and connection must be interrupted as far as application isn't able to find next chunk start position. But current PXF implementation doesn't check error buffer in case of internal buffer isn't empty (we've already placed corrupted chunk here) and tries to interpret it with gpdbwritableformatter_import. If we are lucky and memory corruption isn't happened, we receive error during next buffer filling. To solve this problem, we should check multi_perform result, similar to curl_easy_perform libcurl function. --- external-table/src/libchurl.c | 7 ++----- fdw/libchurl.c | 7 ++----- 2 files changed, 4 insertions(+), 10 deletions(-) diff --git a/external-table/src/libchurl.c b/external-table/src/libchurl.c index c3f880d877..2b85669af1 100644 --- a/external-table/src/libchurl.c +++ b/external-table/src/libchurl.c @@ -473,7 +473,6 @@ churl_read_check_connectivity(CHURL_HANDLE handle) Assert(!context->upload); fill_internal_buffer(context, 1); - check_response(context); } /* @@ -626,6 +625,8 @@ multi_perform(churl_context *context) if (curl_error != CURLM_OK) elog(ERROR, "internal error: curl_multi_perform failed (%d - %s)", curl_error, curl_easy_strerror(curl_error)); + + check_response(context); } static bool @@ -653,8 +654,6 @@ flush_internal_buffer(churl_context *context) multi_perform(context); } - check_response(context); - if ((context->curl_still_running == 0) && ((context_buffer->top - context_buffer->bot) > 0)) elog(ERROR, "failed sending to remote component %s", get_dest_address(context->curl_handle)); @@ -709,8 +708,6 @@ finish_upload(churl_context *context) */ while (context->curl_still_running != 0) multi_perform(context); - - check_response(context); } static void diff --git a/fdw/libchurl.c b/fdw/libchurl.c index 76eb5938f1..7ec7974403 100644 --- a/fdw/libchurl.c +++ b/fdw/libchurl.c @@ -457,7 +457,6 @@ churl_read_check_connectivity(CHURL_HANDLE handle) Assert(!context->upload); fill_internal_buffer(context, 1); - check_response(context); } /* @@ -610,6 +609,8 @@ multi_perform(churl_context *context) if (curl_error != CURLM_OK) elog(ERROR, "internal error: curl_multi_perform failed (%d - %s)", curl_error, curl_easy_strerror(curl_error)); + + check_response(context); } bool @@ -641,8 +642,6 @@ flush_internal_buffer(churl_context *context) ((context_buffer->top - context_buffer->bot) > 0)) elog(ERROR, "failed sending to remote component %s", get_dest_address(context->curl_handle)); - check_response(context); - context_buffer->top = 0; context_buffer->bot = 0; } @@ -694,8 +693,6 @@ finish_upload(churl_context *context) */ while (context->curl_still_running != 0) multi_perform(context); - - check_response(context); } void From 1daf1324694a4e05d283c3cfa6494ba7d0dfdd9e Mon Sep 17 00:00:00 2001 From: GSidorov Date: Tue, 27 Dec 2022 15:34:17 +0300 Subject: [PATCH 36/53] ADBDEV-3319: Implement monitoring metrics in PXF service --- server/pxf-service/build.gradle | 2 + .../rest/ServiceMetricsRestController.java | 83 +++++++++++++++++++ .../src/main/resources/application.properties | 15 ++++ .../ServiceMetricsRestControllerTest.java | 40 +++++++++ 4 files changed, 140 insertions(+) create mode 100644 server/pxf-service/src/main/java/org/greenplum/pxf/service/rest/ServiceMetricsRestController.java create mode 100644 server/pxf-service/src/test/java/org/greenplum/pxf/service/rest/ServiceMetricsRestControllerTest.java diff --git a/server/pxf-service/build.gradle b/server/pxf-service/build.gradle index 48bdc13a16..9bf453232e 100644 --- a/server/pxf-service/build.gradle +++ b/server/pxf-service/build.gradle @@ -39,6 +39,8 @@ dependencies { implementation("org.apache.logging.log4j:log4j-spring-boot") implementation('org.springframework.boot:spring-boot-starter-actuator') implementation('io.micrometer:micrometer-registry-prometheus') + implementation('org.springframework.cloud:spring-cloud-starter-netflix-eureka-client:3.0.6') + implementation('org.mockito:mockito-inline') implementation("org.apache.hadoop:hadoop-hdfs-client") { transitive = false } implementation("org.apache.hadoop:hadoop-auth") { transitive = false } diff --git a/server/pxf-service/src/main/java/org/greenplum/pxf/service/rest/ServiceMetricsRestController.java b/server/pxf-service/src/main/java/org/greenplum/pxf/service/rest/ServiceMetricsRestController.java new file mode 100644 index 0000000000..57c150b483 --- /dev/null +++ b/server/pxf-service/src/main/java/org/greenplum/pxf/service/rest/ServiceMetricsRestController.java @@ -0,0 +1,83 @@ +package org.greenplum.pxf.service.rest; + +import lombok.Getter; +import lombok.RequiredArgsConstructor; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.boot.actuate.metrics.MetricsEndpoint; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Objects; +import java.util.stream.Collectors; + + +@RestController +@RequestMapping("/service-metrics") +public class ServiceMetricsRestController { + private static final Collection METRIC_NAMES = Arrays.asList( + "http.server.requests", + "jvm.buffer.count", + "jvm.buffer.memory.used", + "jvm.buffer.total.capacity", + "jvm.memory.committed", + "jvm.memory.max", + "jvm.memory.used", + "pxf.bytes.received", + "pxf.bytes.sent", + "pxf.fragments.sent", + "pxf.records.received", + "pxf.records.sent" + ); + private static final Collection CLUSTER_METRIC_NAMES = Arrays.asList( + "jvm.memory.committed", + "jvm.memory.max", + "jvm.memory.used", + "pxf.bytes.received", + "pxf.bytes.sent", + "pxf.records.received", + "pxf.records.sent" + ); + private final MetricsEndpoint metricsEndpoint; + private final String clusterName; + private final String hostName; + + public ServiceMetricsRestController(final MetricsEndpoint metricsEndpoint, + @Value("${cluster-name}") final String clusterName, + @Value("${eureka.instance.hostname}") final String hostName) { + this.metricsEndpoint = metricsEndpoint; + this.clusterName = clusterName; + this.hostName = hostName; + } + + @GetMapping + public Collection get() { + return METRIC_NAMES.stream() + .map(name -> metricsEndpoint.metric(name, Collections.emptyList())) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } + + @GetMapping("/cluster-metrics") + public ClusterMetrics getClusterMetrics() { + return new ClusterMetrics( + clusterName, + hostName, + CLUSTER_METRIC_NAMES.stream() + .map(name -> metricsEndpoint.metric(name, Collections.emptyList())) + .filter(Objects::nonNull) + .collect(Collectors.toList())); + } + + + @Getter + @RequiredArgsConstructor + public static class ClusterMetrics { + private final String cluster; + private final String hostname; + private final Collection metrics; + } +} diff --git a/server/pxf-service/src/main/resources/application.properties b/server/pxf-service/src/main/resources/application.properties index d47b4b8fec..ff8bc5b7f1 100644 --- a/server/pxf-service/src/main/resources/application.properties +++ b/server/pxf-service/src/main/resources/application.properties @@ -58,3 +58,18 @@ pxf.features.kerberos.expand-user-principal=true pxf.log.level=info logging.file.name=${pxf.logdir:/tmp}/pxf-service.log logging.file.path=${pxf.logdir:/tmp} + +cluster-name=${cluster_name:} + +# eureka +eureka.client.enabled=${adcc_enabled:false} +eureka.client.service-url.defaultZone=${ADCC_EUREKA_CLIENT_SERV_URL_DEF_ZONE:http://0.0.0.0:8761/eureka} +eureka.instance.prefer-ip-address=${ADCC_EUREKA_CLIENT_PREFER_IP_ADDRESS:true} +eureka.instance.hostname=${pxf_hostname:localhost} +eureka.instance.ip-address=${pxf_ip_address:127.0.0.1} +eureka.instance.appname=PXF SERVICE +eureka.instance.metadata-map.port=${server.port} +eureka.instance.metadata-map.cluster=${cluster-name} +eureka.instance.metadata-map.name=pxf-service +eureka.instance.metadata-map.version=${pxf-version:0.0.0-SNAPSHOT} +eureka.instance.metadata-map.status=UP \ No newline at end of file diff --git a/server/pxf-service/src/test/java/org/greenplum/pxf/service/rest/ServiceMetricsRestControllerTest.java b/server/pxf-service/src/test/java/org/greenplum/pxf/service/rest/ServiceMetricsRestControllerTest.java new file mode 100644 index 0000000000..3943cf0ebf --- /dev/null +++ b/server/pxf-service/src/test/java/org/greenplum/pxf/service/rest/ServiceMetricsRestControllerTest.java @@ -0,0 +1,40 @@ +package org.greenplum.pxf.service.rest; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; +import org.springframework.boot.actuate.metrics.MetricsEndpoint; + +import java.util.Collection; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +class ServiceMetricsRestControllerTest { + private static final String clusterName = "cluster name"; + private static final String hostName = "host name"; + private final MetricsEndpoint metricsEndpoint = mock(MetricsEndpoint.class); + private final ServiceMetricsRestController controller = new ServiceMetricsRestController(metricsEndpoint, clusterName, hostName); + + @Test + public void get() { + MetricsEndpoint.MetricResponse metricResponse = mock(MetricsEndpoint.MetricResponse.class); + when(metricsEndpoint.metric(anyString(), any())).thenReturn(metricResponse).thenReturn(null); + Collection result = controller.get(); + assertEquals(1, result.size()); + result.stream().findAny().filter(metricResponse::equals).orElseGet(Assertions::fail); + } + + @Test + public void getClusterMetrics() { + MetricsEndpoint.MetricResponse metricResponse = mock(MetricsEndpoint.MetricResponse.class); + when(metricsEndpoint.metric(anyString(), any())).thenReturn(metricResponse).thenReturn(null); + ServiceMetricsRestController.ClusterMetrics result = controller.getClusterMetrics(); + assertEquals(clusterName, result.getCluster()); + assertEquals(hostName, result.getHostname()); + assertEquals(1, result.getMetrics().size()); + result.getMetrics().stream().findAny().filter(metricResponse::equals).orElseGet(Assertions::fail); + } +} \ No newline at end of file From 44a043fdc5dea4d63a28c4fc9d09670ef9af7466 Mon Sep 17 00:00:00 2001 From: GSidorov Date: Wed, 28 Dec 2022 08:39:39 +0300 Subject: [PATCH 37/53] ADBDEV-3319: Implement monitoring metrics in PXF service --- .../pxf/service/rest/ServiceMetricsRestController.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/server/pxf-service/src/main/java/org/greenplum/pxf/service/rest/ServiceMetricsRestController.java b/server/pxf-service/src/main/java/org/greenplum/pxf/service/rest/ServiceMetricsRestController.java index 57c150b483..892ea2262e 100644 --- a/server/pxf-service/src/main/java/org/greenplum/pxf/service/rest/ServiceMetricsRestController.java +++ b/server/pxf-service/src/main/java/org/greenplum/pxf/service/rest/ServiceMetricsRestController.java @@ -14,7 +14,6 @@ import java.util.Objects; import java.util.stream.Collectors; - @RestController @RequestMapping("/service-metrics") public class ServiceMetricsRestController { @@ -72,7 +71,6 @@ public ClusterMetrics getClusterMetrics() { .collect(Collectors.toList())); } - @Getter @RequiredArgsConstructor public static class ClusterMetrics { From 836124f7200e63b15c40d67a92636218d60f882d Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Mon, 16 Jan 2023 16:44:53 +0200 Subject: [PATCH 38/53] Update README.md --- server/pxf-jdbc/README.md | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/server/pxf-jdbc/README.md b/server/pxf-jdbc/README.md index a2958f6fdf..537473cfc1 100644 --- a/server/pxf-jdbc/README.md +++ b/server/pxf-jdbc/README.md @@ -687,10 +687,17 @@ The command to add encryption key to the keystore:\ You will be asked to enter an encryption key you want to store after running the command. -Example of the command to create a keystore:\ +Example of the command to add encryption key:\ `keytool -keystore /var/lib/pxf/conf/pxfkeystore.jks -storepass 12345678 -importpass -keypass 12345678 -alias PXF_PASS_KEY`\ *Enter the password to be stored:* qwerty +In case of error `keytool error: java.security.KeyStoreException: Cannot store non-PrivateKeys` run the following command before adding an encryption key:\ +`keytool -importkeystore -srckeystore -destkeystore -deststoretype pkcs12`, where\ +`` - the file path of the keystore which has been created before;\ +Example of the command:\ +`keytool -importkeystore -srckeystore /var/lib/pxf/conf/pxfkeystore.jks -destkeystore /var/lib/pxf/conf/pxfkeystore.jks -deststoretype pkcs12` + + Next, additional properties have to be added into the `$PXF_BASE/conf/pxf-application.properties` file on each segment:\ `pxf.ssl.jks-store.path` - a Java keystore (JKS) absolute file path. It is a `keystore_file` from the command to create the keystore;\ `pxf.ssl.jks-store.password` - a Java keystore password. It is a `keystore_password` from the command to create the keystore;\ From 11c35dfd39663d956bda31024f858498d4affa4f Mon Sep 17 00:00:00 2001 From: Alexey Gordeev Date: Wed, 22 Feb 2023 12:45:33 +0500 Subject: [PATCH 39/53] PXF Docker environment for tests running (#33) * PXF Docker environment for tests running The goal of this patch is to begin automation of PXF testing. It contains scripts and images for Docker environment, like it's done for other ADB components and extensions. Existing upstream concourse scripts were reused where it was possible. For now, we don't focus on hadoop integration testing, but do unit testing, fdw and external-table regression testing. * ADBDEV-3356: Fix ClassCastException issue in DemoTextResolver If the output format of the external table is 'TEXT' we return DataInputStream instead of byte array --------- Co-authored-by: Roman Zolotov --- arenadata/Dockerfile | 33 +++++++++++++++++++ arenadata/README.md | 15 +++++++++ arenadata/test_in_docker.sh | 23 +++++++++++++ .../pxf/api/examples/DemoTextResolver.java | 14 ++++++-- .../greenplum/pxf/api/DemoResolverTest.java | 6 ++-- 5 files changed, 87 insertions(+), 4 deletions(-) create mode 100644 arenadata/Dockerfile create mode 100644 arenadata/README.md create mode 100755 arenadata/test_in_docker.sh diff --git a/arenadata/Dockerfile b/arenadata/Dockerfile new file mode 100644 index 0000000000..28e46aeb72 --- /dev/null +++ b/arenadata/Dockerfile @@ -0,0 +1,33 @@ +FROM hub.adsw.io/library/gpdb6_regress:latest as base + +# install go, ginkgo and keep env variables which may be used as a part of base image +RUN yum install -y go +ENV GOPATH=$HOME/go +ENV PATH=$PATH:/usr/local/go/bin:$GOPATH/bin +RUN go install github.com/onsi/ginkgo/ginkgo@latest + +# leave pxf artifacts dir env also +ENV OUTPUT_ARTIFACT_DIR="pxf_tarball" + +# remove unnecessary artifacts and create symlinks +# concource scripts expects gpdb and pxf placed in the same folder +RUN rm /home/gpadmin/bin_gpdb/server-*.tar.gz && \ + mkdir /tmp/build && \ + ln -s /home/gpadmin/gpdb_src /tmp/build/gpdb_src && \ + ln -s /home/gpadmin/bin_gpdb /tmp/build/bin_gpdb +# default working dir - the place where all sources and artifacts are placed +WORKDIR /tmp/build + +# create separate image with files we don't want to keep in base image +FROM base as build +COPY . /tmp/build/pxf_src +RUN source gpdb_src/concourse/scripts/common.bash && \ + install_gpdb && \ + source '/usr/local/greenplum-db-devel/greenplum_path.sh' && \ + mkdir ${OUTPUT_ARTIFACT_DIR} && \ + pxf_src/concourse/scripts/compile_pxf.bash + +# create test image which prepares base image and keeps only pxf artifacts from build image +FROM base as test +COPY --from=build /tmp/build/${OUTPUT_ARTIFACT_DIR}/pxf.tar.gz /tmp/build/${OUTPUT_ARTIFACT_DIR}/ +COPY --from=build /tmp/build/pxf_src /tmp/build/pxf_src diff --git a/arenadata/README.md b/arenadata/README.md new file mode 100644 index 0000000000..d43ae53a8f --- /dev/null +++ b/arenadata/README.md @@ -0,0 +1,15 @@ +## How to build PXF Docker image +From the root pxf folder run: +```bash +docker build -t gpdb6_pxf_regress:latest -f arenadata/Dockerfile . +``` +This will build an image called `gpdb6_pxf_regress` with the tag `latest`. This image is based on `gpdb6_regress:latest`, which additionally contains pxf sources and pxf artifacts tarball in `/tmp/build/pxf_src` and `/tmp/build/pxf_tarball` folders respectively. + +## How to test PXF +During the image building phase `compile_pxf.bash` script additionally calls `test` make target, which calls `make -C cli/go/src/pxf-cli test` and `make -C server test` commands. +To additionally test `fdw` and `external-table` parts you may call: +```bash +docker run --rm -it \ + --privileged --sysctl kernel.sem="500 1024000 200 4096" \ + gpdb6_pxf_regress:latest /tmp/build/pxf_src/arenadata/test_in_docker.sh +``` diff --git a/arenadata/test_in_docker.sh b/arenadata/test_in_docker.sh new file mode 100755 index 0000000000..dc97e3eb3a --- /dev/null +++ b/arenadata/test_in_docker.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash +# This script depends on hub.adsw.io/library/gpdb6_pxf_regress +set -exo pipefail + +# manually prepare gpadmin user; test_pxf.bash doesn't tweak gpadmin folder permissions and ssh keys +./gpdb_src/concourse/scripts/setup_gpadmin_user.bash +# unpack gpdb and pxf; run gpdb cluster and pxf server +/tmp/build/pxf_src/concourse/scripts/test_pxf.bash +# tweak necessary folders to run regression tests later +chown gpadmin:gpadmin -R /usr/local/greenplum-db-devel +chown gpadmin:gpadmin -R /tmp/build/pxf_src + +# test fdw and external-table +su - gpadmin -c " + source '/usr/local/greenplum-db-devel/greenplum_path.sh'; + source '/home/gpadmin/gpdb_src/gpAux/gpdemo/gpdemo-env.sh'; + cd /tmp/build/pxf_src/fdw && + make install && + make installcheck && + cd ../external-table/ && + make install && + make installcheck; +" diff --git a/server/pxf-api/src/main/java/org/greenplum/pxf/api/examples/DemoTextResolver.java b/server/pxf-api/src/main/java/org/greenplum/pxf/api/examples/DemoTextResolver.java index 775959e5ab..55eae11a5d 100644 --- a/server/pxf-api/src/main/java/org/greenplum/pxf/api/examples/DemoTextResolver.java +++ b/server/pxf-api/src/main/java/org/greenplum/pxf/api/examples/DemoTextResolver.java @@ -23,6 +23,8 @@ import org.greenplum.pxf.api.OneRow; import org.greenplum.pxf.api.io.DataType; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; import java.util.LinkedList; import java.util.List; @@ -62,8 +64,16 @@ public OneRow setFields(List record) throws Exception { throw new Exception("Unexpected record format, expected 1 field, found " + (record == null ? 0 : record.size())); } - byte[] value = (byte[]) record.get(0).val; + int readCount; + byte[] data = new byte[1024]; + ByteArrayOutputStream buffer = new ByteArrayOutputStream(); + DataInputStream dis = (DataInputStream) record.get(0).val; + while ((readCount = dis.read(data, 0, data.length)) != -1) { + buffer.write(data, 0, readCount); + } + buffer.flush(); + byte[] bytes= buffer.toByteArray(); // empty array means the end of input stream, return null to stop iterations - return value.length == 0 ? null : new OneRow(value); + return bytes.length == 0 ? null : new OneRow(bytes); } } diff --git a/server/pxf-api/src/test/java/org/greenplum/pxf/api/DemoResolverTest.java b/server/pxf-api/src/test/java/org/greenplum/pxf/api/DemoResolverTest.java index 1f7e72fc5f..3e455a6ce9 100755 --- a/server/pxf-api/src/test/java/org/greenplum/pxf/api/DemoResolverTest.java +++ b/server/pxf-api/src/test/java/org/greenplum/pxf/api/DemoResolverTest.java @@ -26,6 +26,8 @@ import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; +import java.io.ByteArrayInputStream; +import java.io.DataInputStream; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -55,7 +57,7 @@ public void setup() { textResolver = new DemoTextResolver(); row = new OneRow("0.0", DATA); - field = new OneField(VARCHAR.getOID(), DATA.getBytes()); + field = new OneField(VARCHAR.getOID(), new DataInputStream(new ByteArrayInputStream(DATA.getBytes()))); } @Test @@ -79,7 +81,7 @@ public void testSetTextData() throws Exception { @Test public void testSetEmptyTextData() throws Exception { - OneField field = new OneField(VARCHAR.getOID(), new byte[]{}); + OneField field = new OneField(VARCHAR.getOID(), new DataInputStream(new ByteArrayInputStream(new byte[]{}))); OneRow output = textResolver.setFields(Collections.singletonList(field)); assertNull(output); } From 3dcaed40d7357d1b81192e76aa8e82c6ebc92a29 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Thu, 2 Mar 2023 01:17:28 +0200 Subject: [PATCH 40/53] ADBDEV-3099: Add support year if it contains more than 4 digits --- .../pxf/plugins/jdbc/JdbcBasePlugin.java | 10 +++ .../pxf/plugins/jdbc/JdbcResolver.java | 71 +++++++++++++++++-- 2 files changed, 75 insertions(+), 6 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java index db8267ba11..440a13424b 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java @@ -97,6 +97,8 @@ public class JdbcBasePlugin extends BasePlugin { private static final String HIVE_URL_PREFIX = "jdbc:hive2://"; private static final String HIVE_DEFAULT_DRIVER_CLASS = "org.apache.hive.jdbc.HiveDriver"; private static final String MYSQL_DRIVER_PREFIX = "com.mysql."; + private static final String JDBC_DATE_WIDE_RANGE = "jdbc.date.wide-range"; + protected static boolean isDateWideRange; private enum TransactionIsolation { READ_UNCOMMITTED(1), @@ -379,6 +381,14 @@ public void afterPropertiesSet() { // to switch effective user once connection is established poolQualifier = configuration.get(JDBC_POOL_QUALIFIER_PROPERTY_NAME); } + + // Optional parameter. Get the flag whether the year might contain more than 4 digits in `date` or 'timestamp' or not + String dateWideRange = context.getOption(JDBC_DATE_WIDE_RANGE); + if (dateWideRange != null) { + isDateWideRange = Boolean.parseBoolean(dateWideRange); + } else { + isDateWideRange = configuration.getBoolean(JDBC_DATE_WIDE_RANGE, false); + } } /** diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java index 918576d397..8d3e995770 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java @@ -19,6 +19,7 @@ * under the License. */ +import org.apache.commons.lang.StringUtils; import org.greenplum.pxf.api.OneField; import org.greenplum.pxf.api.OneRow; import org.greenplum.pxf.api.io.DataType; @@ -36,6 +37,10 @@ import java.sql.Timestamp; import java.sql.Types; import java.text.ParseException; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.OffsetDateTime; +import java.time.ZoneId; import java.util.EnumSet; import java.util.LinkedList; import java.util.List; @@ -117,10 +122,25 @@ public List getFields(OneRow row) throws SQLException { value = result.getString(colName); break; case DATE: - value = result.getDate(colName); + if (isDateWideRange) { + value = getValueWithoutPrefix(result.getObject(colName, LocalDate.class)); + } else { + value = result.getDate(colName); + } break; case TIMESTAMP: - value = result.getTimestamp(colName); + if (isDateWideRange) { + value = result.getObject(colName, LocalDateTime.class); + } else { + value = result.getTimestamp(colName); + } + break; + case TIMESTAMP_WITH_TIME_ZONE: + if (isDateWideRange) { + value = result.getObject(colName, OffsetDateTime.class).atZoneSameInstant(ZoneId.systemDefault()).toLocalDateTime(); + } else { + value = result.getTimestamp(colName); + } break; default: throw new UnsupportedOperationException( @@ -211,10 +231,18 @@ public OneRow setFields(List record) throws UnsupportedOperationExcept oneField.val = new BigDecimal(rawVal); break; case TIMESTAMP: - oneField.val = Timestamp.valueOf(rawVal); + if (isDateWideRange) { + oneField.val = getLocalDateTime(rawVal); + } else { + oneField.val = Timestamp.valueOf(rawVal); + } break; case DATE: - oneField.val = Date.valueOf(rawVal); + if (isDateWideRange) { + oneField.val = getLocalDate(rawVal); + } else { + oneField.val = Date.valueOf(rawVal); + } break; default: throw new UnsupportedOperationException( @@ -311,14 +339,22 @@ public static void decodeOneRowToPreparedStatement(OneRow row, PreparedStatement if (field.val == null) { statement.setNull(i, Types.TIMESTAMP); } else { - statement.setTimestamp(i, (Timestamp) field.val); + if (field.val instanceof LocalDateTime) { + statement.setObject(i, (LocalDateTime) field.val); + } else { + statement.setTimestamp(i, (Timestamp) field.val); + } } break; case DATE: if (field.val == null) { statement.setNull(i, Types.DATE); } else { - statement.setDate(i, (Date) field.val); + if (field.val instanceof LocalDate) { + statement.setObject(i, (LocalDate) field.val); + } else { + statement.setDate(i, (Date) field.val); + } } break; default: @@ -326,4 +362,27 @@ public static void decodeOneRowToPreparedStatement(OneRow row, PreparedStatement } } } + + private Object getLocalDate(String rawVal) { + try { + String yearStr = rawVal.trim().substring(0, rawVal.indexOf("-")); + return yearStr.length() > 4 ? LocalDate.parse("+" + rawVal) : LocalDate.parse(rawVal); + } catch (Exception e) { + throw new IllegalArgumentException("Failed to convert date '" + rawVal + "' to LocalDate class: " + e.getMessage(), e); + } + } + + private Object getLocalDateTime(String rawVal) { + try { + String year = rawVal.trim().substring(0, rawVal.indexOf("-")); + String timestamp = year.length() > 4 ? "+" + rawVal : rawVal; + return LocalDateTime.parse(timestamp.trim().replace(" ", "T")); + } catch (Exception e) { + throw new IllegalArgumentException("Failed to convert timestamp '" + rawVal + "' to the LocalDateTime class: " + e.getMessage(), e); + } + } + + private String getValueWithoutPrefix(Object value) { + return StringUtils.removeStart(value.toString(), "+"); + } } From 7350d50252d85c0505038b7a723e5b77b181d58a Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Fri, 3 Mar 2023 00:34:55 +0200 Subject: [PATCH 41/53] ADBDEV-3099: Add support year with ERA --- .../pxf/plugins/jdbc/JdbcResolver.java | 39 ++++++++++++------- 1 file changed, 26 insertions(+), 13 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java index 8d3e995770..915f7d687b 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java @@ -19,7 +19,6 @@ * under the License. */ -import org.apache.commons.lang.StringUtils; import org.greenplum.pxf.api.OneField; import org.greenplum.pxf.api.OneRow; import org.greenplum.pxf.api.io.DataType; @@ -41,6 +40,7 @@ import java.time.LocalDateTime; import java.time.OffsetDateTime; import java.time.ZoneId; +import java.time.format.DateTimeFormatter; import java.util.EnumSet; import java.util.LinkedList; import java.util.List; @@ -50,6 +50,12 @@ * JDBC tables resolver */ public class JdbcResolver extends JdbcBasePlugin implements Resolver { + + private static final String LOCAL_DATE_PATTERN = "y-M-d G"; + private static final String LOCAL_DATE_OPTIONAL_PATTERN = "y-M-d[ G]"; + private static final String LOCAL_DATE_TIME_PATTERN = "y-MM-dd HH:mm:ss.SSSSSSSSS G"; + private static final String LOCAL_DATE_TIME_OPTIONAL_PATTERN = "y-MM-dd HH:mm:ss[.[SSSSSSSSS][SSSSSSSS][SSSSSSS][SSSSSS][SSSSS][SSSS][SSS][SS][S]][ G]"; + private static final Set DATATYPES_SUPPORTED = EnumSet.of( DataType.VARCHAR, DataType.BPCHAR, @@ -123,21 +129,26 @@ public List getFields(OneRow row) throws SQLException { break; case DATE: if (isDateWideRange) { - value = getValueWithoutPrefix(result.getObject(colName, LocalDate.class)); + value = result.getObject(colName, LocalDate.class) + .format(DateTimeFormatter.ofPattern(LOCAL_DATE_PATTERN)); } else { value = result.getDate(colName); } break; case TIMESTAMP: if (isDateWideRange) { - value = result.getObject(colName, LocalDateTime.class); + value = result.getObject(colName, LocalDateTime.class) + .format(DateTimeFormatter.ofPattern(LOCAL_DATE_TIME_PATTERN)); } else { value = result.getTimestamp(colName); } break; case TIMESTAMP_WITH_TIME_ZONE: if (isDateWideRange) { - value = result.getObject(colName, OffsetDateTime.class).atZoneSameInstant(ZoneId.systemDefault()).toLocalDateTime(); + value = result.getObject(colName, OffsetDateTime.class) + .atZoneSameInstant(ZoneId.systemDefault()) + .toLocalDateTime() + .format(DateTimeFormatter.ofPattern(LOCAL_DATE_TIME_PATTERN)); } else { value = result.getTimestamp(colName); } @@ -365,8 +376,11 @@ public static void decodeOneRowToPreparedStatement(OneRow row, PreparedStatement private Object getLocalDate(String rawVal) { try { - String yearStr = rawVal.trim().substring(0, rawVal.indexOf("-")); - return yearStr.length() > 4 ? LocalDate.parse("+" + rawVal) : LocalDate.parse(rawVal); + if (rawVal.startsWith("-")) { + return LocalDate.parse(rawVal); + } else { + return LocalDate.parse(rawVal, DateTimeFormatter.ofPattern(LOCAL_DATE_OPTIONAL_PATTERN)); + } } catch (Exception e) { throw new IllegalArgumentException("Failed to convert date '" + rawVal + "' to LocalDate class: " + e.getMessage(), e); } @@ -374,15 +388,14 @@ private Object getLocalDate(String rawVal) { private Object getLocalDateTime(String rawVal) { try { - String year = rawVal.trim().substring(0, rawVal.indexOf("-")); - String timestamp = year.length() > 4 ? "+" + rawVal : rawVal; - return LocalDateTime.parse(timestamp.trim().replace(" ", "T")); + if (rawVal.trim().startsWith("-")) { + String timestamp = rawVal.trim().replace(" ", "T"); + return LocalDateTime.parse(timestamp); + } else { + return LocalDateTime.parse(rawVal, DateTimeFormatter.ofPattern(LOCAL_DATE_TIME_OPTIONAL_PATTERN)); + } } catch (Exception e) { throw new IllegalArgumentException("Failed to convert timestamp '" + rawVal + "' to the LocalDateTime class: " + e.getMessage(), e); } } - - private String getValueWithoutPrefix(Object value) { - return StringUtils.removeStart(value.toString(), "+"); - } } From 9e5bc70e1b3ec59d0a837b6141f839d3d86e89c3 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Sun, 5 Mar 2023 23:47:59 +0200 Subject: [PATCH 42/53] ADBDEV-3099: Make the field 'isDateWideRange' non-static --- .../java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java index 440a13424b..0762501c3e 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePlugin.java @@ -98,7 +98,6 @@ public class JdbcBasePlugin extends BasePlugin { private static final String HIVE_DEFAULT_DRIVER_CLASS = "org.apache.hive.jdbc.HiveDriver"; private static final String MYSQL_DRIVER_PREFIX = "com.mysql."; private static final String JDBC_DATE_WIDE_RANGE = "jdbc.date.wide-range"; - protected static boolean isDateWideRange; private enum TransactionIsolation { READ_UNCOMMITTED(1), @@ -171,6 +170,9 @@ public static TransactionIsolation typeOf(String str) { private final SecureLogin secureLogin; private final DecryptClient decryptClient; + // Flag which is used when the year might contain more than 4 digits in `date` or 'timestamp' + protected boolean isDateWideRange; + static { // Deprecated as of Oct 22, 2019 in version 5.9.2+ Configuration.addDeprecation("pxf.impersonation.jdbc", From 8cde56187b3b69cc93bc4e34de0c517ce78c8df4 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Mon, 6 Mar 2023 01:42:02 +0200 Subject: [PATCH 43/53] ADBDEV-3099: Add formatters --- .../pxf/plugins/jdbc/JdbcResolver.java | 111 ++++++++++++++---- 1 file changed, 86 insertions(+), 25 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java index 915f7d687b..bc9aa43bcd 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java @@ -41,20 +41,65 @@ import java.time.OffsetDateTime; import java.time.ZoneId; import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.temporal.ChronoField; import java.util.EnumSet; import java.util.LinkedList; import java.util.List; import java.util.Set; +import static java.time.format.DateTimeFormatter.ISO_LOCAL_TIME; + /** * JDBC tables resolver */ public class JdbcResolver extends JdbcBasePlugin implements Resolver { - private static final String LOCAL_DATE_PATTERN = "y-M-d G"; - private static final String LOCAL_DATE_OPTIONAL_PATTERN = "y-M-d[ G]"; - private static final String LOCAL_DATE_TIME_PATTERN = "y-MM-dd HH:mm:ss.SSSSSSSSS G"; - private static final String LOCAL_DATE_TIME_OPTIONAL_PATTERN = "y-MM-dd HH:mm:ss[.[SSSSSSSSS][SSSSSSSS][SSSSSSS][SSSSSS][SSSSS][SSSS][SSS][SS][S]][ G]"; + private static final Logger LOG = LoggerFactory.getLogger(JdbcResolver.class); + + private static final DateTimeFormatter LOCAL_DATE_FORMATTER = (new DateTimeFormatterBuilder()) + .appendPattern("y-MM-dd[ G]") + .toFormatter(); + + private static final DateTimeFormatter LOCAL_DATE_LONG_YEAR_FORMATTER = (new DateTimeFormatterBuilder()) + .appendPattern("yyyy-MM-dd[ G]") + .toFormatter(); + + private static final DateTimeFormatter LOCAL_DATE_TIME_FORMATTER = (new DateTimeFormatterBuilder()) + .appendPattern("y").appendLiteral('-') + .appendValue(ChronoField.MONTH_OF_YEAR, 2).appendLiteral('-') + .appendValue(ChronoField.DAY_OF_MONTH, 2) + .appendLiteral(" ") + .append(ISO_LOCAL_TIME) + .appendPattern(" G") + .toFormatter(); + + private static final DateTimeFormatter LOCAL_DATE_TIME_LONG_YEAR_FORMATTER = (new DateTimeFormatterBuilder()) + .appendPattern("yyyy").appendLiteral('-') + .appendValue(ChronoField.MONTH_OF_YEAR, 2).appendLiteral('-') + .appendValue(ChronoField.DAY_OF_MONTH, 2) + .appendLiteral(" ") + .append(ISO_LOCAL_TIME) + .appendPattern(" G") + .toFormatter(); + + private static final DateTimeFormatter LOCAL_DATE_OPTIONAL_FORMATTER = (new DateTimeFormatterBuilder()) + .optionalStart().appendPattern("-").optionalEnd() + .appendPattern("y").appendLiteral('-') + .appendValue(ChronoField.MONTH_OF_YEAR, 2).appendLiteral('-') + .appendValue(ChronoField.DAY_OF_MONTH, 2) + .optionalStart().appendPattern(" G").optionalEnd() + .toFormatter(); + + private static final DateTimeFormatter LOCAL_DATE_TIME_OPTIONAL_FORMATTER = (new DateTimeFormatterBuilder()) + .optionalStart().appendPattern("-").optionalEnd() + .appendPattern("y").appendLiteral('-') + .appendValue(ChronoField.MONTH_OF_YEAR, 2).appendLiteral('-') + .appendValue(ChronoField.DAY_OF_MONTH, 2) + .appendLiteral(" ") + .append(ISO_LOCAL_TIME) + .optionalStart().appendPattern(" G").optionalEnd() + .toFormatter(); private static final Set DATATYPES_SUPPORTED = EnumSet.of( DataType.VARCHAR, @@ -72,8 +117,6 @@ public class JdbcResolver extends JdbcBasePlugin implements Resolver { DataType.DATE ); - private static final Logger LOG = LoggerFactory.getLogger(JdbcResolver.class); - /** * getFields() implementation * @@ -129,26 +172,53 @@ public List getFields(OneRow row) throws SQLException { break; case DATE: if (isDateWideRange) { - value = result.getObject(colName, LocalDate.class) - .format(DateTimeFormatter.ofPattern(LOCAL_DATE_PATTERN)); + LocalDate localDate = result.getObject(colName, LocalDate.class); + if (localDate == null) { + value = null; + break; + } + if (localDate.getYear() >= -999 && localDate.getYear() <= 999) { + // Return at least 4-digits year string with leading zero if year contains less than 4 + value = localDate.format(LOCAL_DATE_LONG_YEAR_FORMATTER); + } else { + // For year with more 4 digits 'LOCAL_DATE_FORMATTER' is used. + // We cannot use 'LOCAL_DATE_LONG_YEAR_FORMATTER' as it doesn't correctly format + // some years with minus sign and more than 4 digits, i.e. -123456 + value = localDate.format(LOCAL_DATE_FORMATTER); + } } else { value = result.getDate(colName); } break; case TIMESTAMP: if (isDateWideRange) { - value = result.getObject(colName, LocalDateTime.class) - .format(DateTimeFormatter.ofPattern(LOCAL_DATE_TIME_PATTERN)); + LocalDateTime localDateTime = result.getObject(colName, LocalDateTime.class); + if (localDateTime == null) { + value = null; + break; + } + if (localDateTime.getYear() >= -999 && localDateTime.getYear() <= 999) { + value = localDateTime.format(LOCAL_DATE_TIME_LONG_YEAR_FORMATTER); + } else { + value = localDateTime.format(LOCAL_DATE_TIME_FORMATTER); + } } else { value = result.getTimestamp(colName); } break; case TIMESTAMP_WITH_TIME_ZONE: if (isDateWideRange) { - value = result.getObject(colName, OffsetDateTime.class) - .atZoneSameInstant(ZoneId.systemDefault()) - .toLocalDateTime() - .format(DateTimeFormatter.ofPattern(LOCAL_DATE_TIME_PATTERN)); + OffsetDateTime offsetDateTime = result.getObject(colName, OffsetDateTime.class); + if (offsetDateTime == null) { + value = null; + break; + } + LocalDateTime localDateTime = offsetDateTime.atZoneSameInstant(ZoneId.systemDefault()).toLocalDateTime(); + if (localDateTime.getYear() >= -999 && localDateTime.getYear() <= 999) { + value = localDateTime.format(LOCAL_DATE_TIME_LONG_YEAR_FORMATTER); + } else { + value = localDateTime.format(LOCAL_DATE_TIME_FORMATTER); + } } else { value = result.getTimestamp(colName); } @@ -376,11 +446,7 @@ public static void decodeOneRowToPreparedStatement(OneRow row, PreparedStatement private Object getLocalDate(String rawVal) { try { - if (rawVal.startsWith("-")) { - return LocalDate.parse(rawVal); - } else { - return LocalDate.parse(rawVal, DateTimeFormatter.ofPattern(LOCAL_DATE_OPTIONAL_PATTERN)); - } + return LocalDate.parse(rawVal, LOCAL_DATE_OPTIONAL_FORMATTER); } catch (Exception e) { throw new IllegalArgumentException("Failed to convert date '" + rawVal + "' to LocalDate class: " + e.getMessage(), e); } @@ -388,12 +454,7 @@ private Object getLocalDate(String rawVal) { private Object getLocalDateTime(String rawVal) { try { - if (rawVal.trim().startsWith("-")) { - String timestamp = rawVal.trim().replace(" ", "T"); - return LocalDateTime.parse(timestamp); - } else { - return LocalDateTime.parse(rawVal, DateTimeFormatter.ofPattern(LOCAL_DATE_TIME_OPTIONAL_PATTERN)); - } + return LocalDateTime.parse(rawVal, LOCAL_DATE_TIME_OPTIONAL_FORMATTER); } catch (Exception e) { throw new IllegalArgumentException("Failed to convert timestamp '" + rawVal + "' to the LocalDateTime class: " + e.getMessage(), e); } From 8d6ea1cd15d74a38df4d98b02bb54eff27a4b603 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Mon, 6 Mar 2023 01:42:22 +0200 Subject: [PATCH 44/53] ADBDEV-3099: Add unit tests --- .../pxf/plugins/jdbc/JdbcBasePluginTest.java | 13 +- .../pxf/plugins/jdbc/JdbcResolverTest.java | 547 ++++++++++++++++++ 2 files changed, 557 insertions(+), 3 deletions(-) create mode 100644 server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcResolverTest.java diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePluginTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePluginTest.java index dd405b12bb..988092fdb6 100644 --- a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePluginTest.java +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcBasePluginTest.java @@ -39,9 +39,7 @@ import java.util.Map; import java.util.Properties; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertSame; -import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.*; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; import static org.mockito.Mockito.anyInt; @@ -487,6 +485,15 @@ public void testGetConnectionConnPropsPoolDisabledPoolProps() throws SQLExceptio verify(mockConnectionManager).getConnection("test-server", "test-url", connProps, false, null, null); } + @Test + public void testDateWideRangeFromConfiguration() throws SQLException { + configuration.set("jdbc.driver", "org.greenplum.pxf.plugins.jdbc.FakeJdbcDriver"); + configuration.set("jdbc.url", "test-url"); + configuration.set("jdbc.date.wide-range", "true"); + JdbcBasePlugin plugin = getPlugin(mockConnectionManager, mockSecureLogin, context); + assertTrue(plugin.isDateWideRange); + } + private JdbcBasePlugin getPlugin(ConnectionManager mockConnectionManager, SecureLogin mockSecureLogin, RequestContext context) { JdbcBasePlugin plugin = new JdbcBasePlugin(mockConnectionManager, mockSecureLogin, mockDecryptClient); plugin.setRequestContext(context); diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcResolverTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcResolverTest.java new file mode 100644 index 0000000000..617c361081 --- /dev/null +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcResolverTest.java @@ -0,0 +1,547 @@ +package org.greenplum.pxf.plugins.jdbc; + +import org.greenplum.pxf.api.OneField; +import org.greenplum.pxf.api.OneRow; +import org.greenplum.pxf.api.io.DataType; +import org.greenplum.pxf.api.model.RequestContext; +import org.greenplum.pxf.api.utilities.ColumnDescriptor; +import org.greenplum.pxf.api.utilities.SpringContext; +import org.greenplum.pxf.plugins.jdbc.utils.ConnectionManager; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.MockedStatic; +import org.mockito.junit.jupiter.MockitoExtension; + +import java.sql.ResultSet; +import java.sql.SQLException; +import java.text.ParseException; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.OffsetDateTime; +import java.util.*; + +import static org.junit.jupiter.api.Assertions.*; +import static org.mockito.Mockito.*; + +@ExtendWith(MockitoExtension.class) +class JdbcResolverTest { + @Mock + private OneRow row; + @Mock + private ResultSet result; + RequestContext context = new RequestContext(); + List columnDescriptors = new ArrayList<>(); + List oneFieldList = new ArrayList<>(); + private JdbcResolver resolver; + + @Test + void getFieldDateWithWideRangeTest() throws SQLException { + LocalDate localDate = LocalDate.of(1977, 12, 11); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.DATE.getOID(), 0, "date", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", LocalDate.class)).thenReturn(localDate); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + assertEquals("1977-12-11 AD", oneFields.get(0).val); + } + } + + @Test + void getFieldDateNullWithWideRangeTest() throws SQLException { + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.DATE.getOID(), 0, "date", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", LocalDate.class)).thenReturn(null); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + assertNull(oneFields.get(0).val); + } + } + + @Test + void getFieldDateWithWideRangeWithLeadingZeroTest() throws SQLException { + LocalDate localDate = LocalDate.of(3, 5, 4); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.DATE.getOID(), 0, "date", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", LocalDate.class)).thenReturn(localDate); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + assertEquals("0003-05-04 AD", oneFields.get(0).val); + } + } + + @Test + void getFieldDateWithMoreThan4digitsInYearTest() throws SQLException { + LocalDate localDate = LocalDate.of(+12345678, 12, 11); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.DATE.getOID(), 0, "date", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", LocalDate.class)).thenReturn(localDate); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + assertEquals("12345678-12-11 AD", oneFields.get(0).val); + } + } + + @Test + void getFieldDateWithEraWithMoreThan4digitsInYearTest() throws SQLException { + LocalDate localDate = LocalDate.of(-1234567, 6, 1); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.DATE.getOID(), 0, "date", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", LocalDate.class)).thenReturn(localDate); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + // The year -1234567 is transferred to 1234568 BC: https://en.wikipedia.org/wiki/Astronomical_year_numbering + assertEquals("1234568-06-01 BC", oneFields.get(0).val); + } + } + + @Test + void getFieldDateWithEraTest() throws SQLException { + LocalDate localDate = LocalDate.of(-1234, 6, 1); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.DATE.getOID(), 0, "date", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", LocalDate.class)).thenReturn(localDate); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + // The year -1234 is transferred to 1235 BC: https://en.wikipedia.org/wiki/Astronomical_year_numbering + assertEquals("1235-06-01 BC", oneFields.get(0).val); + } + } + + @Test + void getFieldDateTimeWithWideRangeTest() throws SQLException { + LocalDateTime localDateTime = LocalDateTime.parse("1977-12-11T11:15:30.1234"); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP.getOID(), 1, "timestamp", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", LocalDateTime.class)).thenReturn(localDateTime); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + assertEquals("1977-12-11 11:15:30.1234 AD", oneFields.get(0).val); + } + } + + @Test + void getFieldDateNullTimeWithWideRangeTest() throws SQLException { + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP.getOID(), 1, "timestamp", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", LocalDateTime.class)).thenReturn(null); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + assertNull(oneFields.get(0).val); + } + } + + @Test + void getFieldDateTimeWithWideRangeWithLeadingZeroTest() throws SQLException { + LocalDateTime localDateTime = LocalDateTime.parse("0003-01-02T04:05:06.0000015"); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP.getOID(), 1, "timestamp", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", LocalDateTime.class)).thenReturn(localDateTime); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + assertEquals("0003-01-02 04:05:06.0000015 AD", oneFields.get(0).val); + } + } + + @Test + void getFieldDateTimeWithMoreThan4digitsInYearTest() throws SQLException { + LocalDateTime localDateTime = LocalDateTime.parse("+9876543-12-11T11:15:30.1234"); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP.getOID(), 1, "timestamp", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", LocalDateTime.class)).thenReturn(localDateTime); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + assertEquals("9876543-12-11 11:15:30.1234 AD", oneFields.get(0).val); + } + } + + @Test + void getFieldDateTimeWithEraTest() throws SQLException { + LocalDateTime localDateTime = LocalDateTime.parse("-3456-12-11T11:15:30"); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP.getOID(), 1, "timestamp", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", LocalDateTime.class)).thenReturn(localDateTime); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + // The year -3456 is transferred to 3457 BC: https://en.wikipedia.org/wiki/Astronomical_year_numbering + assertEquals("3457-12-11 11:15:30 BC", oneFields.get(0).val); + } + } + + @Test + void getFieldOffsetDateTimeWithWideRangeTest() throws SQLException { + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + OffsetDateTime offsetDateTime = OffsetDateTime.parse("1977-12-11T10:15:30.1234+05:00"); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP_WITH_TIME_ZONE.getOID(), 1, "timestamptz", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", OffsetDateTime.class)).thenReturn(offsetDateTime); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + assertEquals("1977-12-11 05:15:30.1234 AD", oneFields.get(0).val); + } + } + + @Test + void getFieldOffsetDateTimeNullWithWideRangeTest() throws SQLException { + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP_WITH_TIME_ZONE.getOID(), 1, "timestamptz", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", OffsetDateTime.class)).thenReturn(null); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + assertNull(oneFields.get(0).val); + } + } + + @Test + void getFieldOffsetDateTimeWithWideRangeWithLeadingZeroTest() throws SQLException { + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + OffsetDateTime offsetDateTime = OffsetDateTime.parse("0003-01-02T04:05:06.0000015+03:00"); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP_WITH_TIME_ZONE.getOID(), 1, "timestamptz", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", OffsetDateTime.class)).thenReturn(offsetDateTime); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + assertEquals("0003-01-02 01:05:06.0000015 AD", oneFields.get(0).val); + } + } + + @Test + void getFieldOffsetDateTimeWithMoreThan4digitsInYearTest() throws SQLException { + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + OffsetDateTime offsetDateTime = OffsetDateTime.parse("+9876543-12-11T11:15:30.1234-03:00"); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP_WITH_TIME_ZONE.getOID(), 1, "timestamptz", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", OffsetDateTime.class)).thenReturn(offsetDateTime); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + assertEquals("9876543-12-11 14:15:30.1234 AD", oneFields.get(0).val); + } + } + + @Test + void getFieldOffsetDateTimeWithEraTest() throws SQLException { + TimeZone.setDefault(TimeZone.getTimeZone("UTC")); + OffsetDateTime offsetDateTime = OffsetDateTime.parse("-3456-12-11T11:15:30+02:00"); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP_WITH_TIME_ZONE.getOID(), 1, "timestamptz", null)); + context.setTupleDescription(columnDescriptors); + when(row.getData()).thenReturn(result); + when(result.getObject("birth_date", OffsetDateTime.class)).thenReturn(offsetDateTime); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + List oneFields = resolver.getFields(row); + // The year -3456 is transferred to 3457 BC: https://en.wikipedia.org/wiki/Astronomical_year_numbering + assertEquals("3457-12-11 09:15:30 BC", oneFields.get(0).val); + } + } + + @Test + void setFieldDateWithWideRangeTest() throws ParseException { + LocalDate expectedLocalDate = LocalDate.of(1977, 12, 11); + String date = "1977-12-11"; + oneFieldList.add(new OneField(DataType.TEXT.getOID(), date)); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.DATE.getOID(), 0, "date", null)); + context.setTupleDescription(columnDescriptors); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + OneRow oneRow = resolver.setFields(oneFieldList); + assertTrue(((OneField) ((List) oneRow.getData()).get(0)).val instanceof LocalDate); + assertEquals(expectedLocalDate, ((OneField) ((List) oneRow.getData()).get(0)).val); + } + } + + @Test + void setFieldDateWithWideRangeWithLeadingZeroTest() throws ParseException { + LocalDate expectedLocalDate = LocalDate.of(3, 5, 4); + String date = "0003-05-04"; + oneFieldList.add(new OneField(DataType.TEXT.getOID(), date)); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.DATE.getOID(), 0, "date", null)); + context.setTupleDescription(columnDescriptors); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + OneRow oneRow = resolver.setFields(oneFieldList); + assertTrue(((OneField) ((List) oneRow.getData()).get(0)).val instanceof LocalDate); + assertEquals(expectedLocalDate, ((OneField) ((List) oneRow.getData()).get(0)).val); + } + } + + @Test + void setFieldDateWithMoreThan4digitsInYearTest() throws ParseException { + LocalDate expectedLocalDate = LocalDate.of(+12345678, 12, 11); + String date = "12345678-12-11"; + oneFieldList.add(new OneField(DataType.TEXT.getOID(), date)); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.DATE.getOID(), 0, "date", null)); + context.setTupleDescription(columnDescriptors); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + OneRow oneRow = resolver.setFields(oneFieldList); + assertTrue(((OneField) ((List) oneRow.getData()).get(0)).val instanceof LocalDate); + assertEquals(expectedLocalDate, ((OneField) ((List) oneRow.getData()).get(0)).val); + } + } + + @Test + void setFieldDateWithEraWithMoreThan4digitsInYearTest() throws ParseException { + LocalDate expectedLocalDate = LocalDate.of(-1234567, 6, 1); + String date = "1234568-06-01 BC"; + oneFieldList.add(new OneField(DataType.TEXT.getOID(), date)); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.DATE.getOID(), 0, "date", null)); + context.setTupleDescription(columnDescriptors); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + OneRow oneRow = resolver.setFields(oneFieldList); + assertTrue(((OneField) ((List) oneRow.getData()).get(0)).val instanceof LocalDate); + assertEquals(expectedLocalDate, ((OneField) ((List) oneRow.getData()).get(0)).val); + } + } + + @Test + void setFieldDateWithEraTest() throws ParseException { + LocalDate expectedLocalDate = LocalDate.of(-1234, 11, 1); + String date = "1235-11-01 BC"; + oneFieldList.add(new OneField(DataType.TEXT.getOID(), date)); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.DATE.getOID(), 0, "date", null)); + context.setTupleDescription(columnDescriptors); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + OneRow oneRow = resolver.setFields(oneFieldList); + assertTrue(((OneField) ((List) oneRow.getData()).get(0)).val instanceof LocalDate); + assertEquals(expectedLocalDate, ((OneField) ((List) oneRow.getData()).get(0)).val); + } + } + + @Test + void setFieldDateTimeWithWideRangeTest() throws ParseException { + LocalDateTime expectedLocalDateTime = LocalDateTime.of(1977, 12, 11, 15, 12, 11, 123456789); + String date = "1977-12-11 15:12:11.123456789"; + oneFieldList.add(new OneField(DataType.TEXT.getOID(), date)); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP.getOID(), 1, "timestamp", null)); + context.setTupleDescription(columnDescriptors); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + OneRow oneRow = resolver.setFields(oneFieldList); + assertTrue(((OneField) ((List) oneRow.getData()).get(0)).val instanceof LocalDateTime); + assertEquals(expectedLocalDateTime, ((OneField) ((List) oneRow.getData()).get(0)).val); + } + } + + @Test + void setFieldDateTimeWithWideRangeWithLeadingZeroTest() throws ParseException { + LocalDateTime expectedLocalDateTime = LocalDateTime.of(3, 5, 4, 1, 2, 1); + String date = "0003-05-04 01:02:01"; + oneFieldList.add(new OneField(DataType.TEXT.getOID(), date)); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP.getOID(), 1, "timestamp", null)); + context.setTupleDescription(columnDescriptors); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + OneRow oneRow = resolver.setFields(oneFieldList); + assertTrue(((OneField) ((List) oneRow.getData()).get(0)).val instanceof LocalDateTime); + assertEquals(expectedLocalDateTime, ((OneField) ((List) oneRow.getData()).get(0)).val); + } + } + + @Test + void setFieldDateTimeWithMoreThan4digitsInYearTest() throws ParseException { + LocalDateTime expectedLocalDateTime = LocalDateTime.of(+12345678, 12, 11, 15, 35); + String date = "12345678-12-11 15:35 AD"; + oneFieldList.add(new OneField(DataType.TEXT.getOID(), date)); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP.getOID(), 1, "timestamp", null)); + context.setTupleDescription(columnDescriptors); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + OneRow oneRow = resolver.setFields(oneFieldList); + assertTrue(((OneField) ((List) oneRow.getData()).get(0)).val instanceof LocalDateTime); + assertEquals(expectedLocalDateTime, ((OneField) ((List) oneRow.getData()).get(0)).val); + } + } + + @Test + void setFieldDateTimeWithEraWithMoreThan4digitsInYearTest() throws ParseException { + LocalDateTime expectedLocalDateTime = LocalDateTime.of(-1234567, 6, 1, 19, 56, 43, 12); + String date = "1234568-06-01 19:56:43.000000012 BC"; + oneFieldList.add(new OneField(DataType.TEXT.getOID(), date)); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP.getOID(), 1, "timestamp", null)); + context.setTupleDescription(columnDescriptors); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + OneRow oneRow = resolver.setFields(oneFieldList); + assertTrue(((OneField) ((List) oneRow.getData()).get(0)).val instanceof LocalDateTime); + assertEquals(expectedLocalDateTime, ((OneField) ((List) oneRow.getData()).get(0)).val); + } + } + + @Test + void setFieldDateTimeWithEraTest() throws ParseException { + LocalDateTime expectedLocalDateTime = LocalDateTime.of(-1234, 11, 1, 16, 20); + String date = "1235-11-01 16:20 BC"; + oneFieldList.add(new OneField(DataType.TEXT.getOID(), date)); + boolean isDateWideRange = true; + columnDescriptors.add(new ColumnDescriptor("birth_date", DataType.TIMESTAMP.getOID(), 1, "timestamp", null)); + context.setTupleDescription(columnDescriptors); + + try (MockedStatic springContextMockedStatic = mockStatic(SpringContext.class)) { + springContextMockedStatic.when(() -> SpringContext.getBean(ConnectionManager.class)).thenReturn(mock(ConnectionManager.class)); + resolver = new JdbcResolver(); + resolver.columns = context.getTupleDescription(); + resolver.isDateWideRange = isDateWideRange; + OneRow oneRow = resolver.setFields(oneFieldList); + assertTrue(((OneField) ((List) oneRow.getData()).get(0)).val instanceof LocalDateTime); + assertEquals(expectedLocalDateTime, ((OneField) ((List) oneRow.getData()).get(0)).val); + } + } +} \ No newline at end of file From 55674612191b55a72d6554c641db05fbe3c1a7fb Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Mon, 6 Mar 2023 20:00:42 +0200 Subject: [PATCH 45/53] ADBDEV-3099: Modify and remove redundant formatters --- .../pxf/plugins/jdbc/JdbcResolver.java | 76 +++++-------------- 1 file changed, 20 insertions(+), 56 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java index bc9aa43bcd..8c7f1e536a 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java @@ -42,6 +42,7 @@ import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; +import java.time.format.SignStyle; import java.time.temporal.ChronoField; import java.util.EnumSet; import java.util.LinkedList; @@ -57,45 +58,32 @@ public class JdbcResolver extends JdbcBasePlugin implements Resolver { private static final Logger LOG = LoggerFactory.getLogger(JdbcResolver.class); - private static final DateTimeFormatter LOCAL_DATE_FORMATTER = (new DateTimeFormatterBuilder()) - .appendPattern("y-MM-dd[ G]") - .toFormatter(); - - private static final DateTimeFormatter LOCAL_DATE_LONG_YEAR_FORMATTER = (new DateTimeFormatterBuilder()) - .appendPattern("yyyy-MM-dd[ G]") - .toFormatter(); - - private static final DateTimeFormatter LOCAL_DATE_TIME_FORMATTER = (new DateTimeFormatterBuilder()) - .appendPattern("y").appendLiteral('-') + private static final DateTimeFormatter LOCAL_DATE_GET_FORMATTER = (new DateTimeFormatterBuilder()) + .appendValue(ChronoField.YEAR_OF_ERA, 4, 9, SignStyle.NORMAL).appendLiteral("-") .appendValue(ChronoField.MONTH_OF_YEAR, 2).appendLiteral('-') .appendValue(ChronoField.DAY_OF_MONTH, 2) - .appendLiteral(" ") - .append(ISO_LOCAL_TIME) .appendPattern(" G") .toFormatter(); - private static final DateTimeFormatter LOCAL_DATE_TIME_LONG_YEAR_FORMATTER = (new DateTimeFormatterBuilder()) - .appendPattern("yyyy").appendLiteral('-') + private static final DateTimeFormatter LOCAL_DATE_TIME_GET_FORMATTER = (new DateTimeFormatterBuilder()) + .appendValue(ChronoField.YEAR_OF_ERA, 4, 9, SignStyle.NORMAL).appendLiteral("-") .appendValue(ChronoField.MONTH_OF_YEAR, 2).appendLiteral('-') - .appendValue(ChronoField.DAY_OF_MONTH, 2) - .appendLiteral(" ") + .appendValue(ChronoField.DAY_OF_MONTH, 2).appendLiteral(" ") .append(ISO_LOCAL_TIME) .appendPattern(" G") .toFormatter(); - private static final DateTimeFormatter LOCAL_DATE_OPTIONAL_FORMATTER = (new DateTimeFormatterBuilder()) - .optionalStart().appendPattern("-").optionalEnd() - .appendPattern("y").appendLiteral('-') + private static final DateTimeFormatter LOCAL_DATE_SET_FORMATTER = (new DateTimeFormatterBuilder()) + .appendValue(ChronoField.YEAR_OF_ERA, 1, 9, SignStyle.NORMAL).appendLiteral('-') .appendValue(ChronoField.MONTH_OF_YEAR, 2).appendLiteral('-') .appendValue(ChronoField.DAY_OF_MONTH, 2) .optionalStart().appendPattern(" G").optionalEnd() .toFormatter(); - private static final DateTimeFormatter LOCAL_DATE_TIME_OPTIONAL_FORMATTER = (new DateTimeFormatterBuilder()) - .optionalStart().appendPattern("-").optionalEnd() - .appendPattern("y").appendLiteral('-') - .appendValue(ChronoField.MONTH_OF_YEAR, 2).appendLiteral('-') - .appendValue(ChronoField.DAY_OF_MONTH, 2) + private static final DateTimeFormatter LOCAL_DATE_TIME_SET_FORMATTER = (new DateTimeFormatterBuilder()) + .appendValue(ChronoField.YEAR_OF_ERA, 1, 9, SignStyle.NORMAL).appendLiteral('-') + .appendValue(ChronoField.MONTH_OF_YEAR, 1, 2, SignStyle.NORMAL).appendLiteral('-') + .appendValue(ChronoField.DAY_OF_MONTH, 1, 2, SignStyle.NORMAL) .appendLiteral(" ") .append(ISO_LOCAL_TIME) .optionalStart().appendPattern(" G").optionalEnd() @@ -173,19 +161,7 @@ public List getFields(OneRow row) throws SQLException { case DATE: if (isDateWideRange) { LocalDate localDate = result.getObject(colName, LocalDate.class); - if (localDate == null) { - value = null; - break; - } - if (localDate.getYear() >= -999 && localDate.getYear() <= 999) { - // Return at least 4-digits year string with leading zero if year contains less than 4 - value = localDate.format(LOCAL_DATE_LONG_YEAR_FORMATTER); - } else { - // For year with more 4 digits 'LOCAL_DATE_FORMATTER' is used. - // We cannot use 'LOCAL_DATE_LONG_YEAR_FORMATTER' as it doesn't correctly format - // some years with minus sign and more than 4 digits, i.e. -123456 - value = localDate.format(LOCAL_DATE_FORMATTER); - } + value = localDate != null ? localDate.format(LOCAL_DATE_GET_FORMATTER) : null; } else { value = result.getDate(colName); } @@ -193,15 +169,7 @@ public List getFields(OneRow row) throws SQLException { case TIMESTAMP: if (isDateWideRange) { LocalDateTime localDateTime = result.getObject(colName, LocalDateTime.class); - if (localDateTime == null) { - value = null; - break; - } - if (localDateTime.getYear() >= -999 && localDateTime.getYear() <= 999) { - value = localDateTime.format(LOCAL_DATE_TIME_LONG_YEAR_FORMATTER); - } else { - value = localDateTime.format(LOCAL_DATE_TIME_FORMATTER); - } + value = localDateTime != null ? localDateTime.format(LOCAL_DATE_TIME_GET_FORMATTER) : null; } else { value = result.getTimestamp(colName); } @@ -209,15 +177,11 @@ public List getFields(OneRow row) throws SQLException { case TIMESTAMP_WITH_TIME_ZONE: if (isDateWideRange) { OffsetDateTime offsetDateTime = result.getObject(colName, OffsetDateTime.class); - if (offsetDateTime == null) { - value = null; - break; - } - LocalDateTime localDateTime = offsetDateTime.atZoneSameInstant(ZoneId.systemDefault()).toLocalDateTime(); - if (localDateTime.getYear() >= -999 && localDateTime.getYear() <= 999) { - value = localDateTime.format(LOCAL_DATE_TIME_LONG_YEAR_FORMATTER); + if (offsetDateTime != null) { + LocalDateTime localDateTime = offsetDateTime.atZoneSameInstant(ZoneId.systemDefault()).toLocalDateTime(); + value = localDateTime.format(LOCAL_DATE_TIME_GET_FORMATTER); } else { - value = localDateTime.format(LOCAL_DATE_TIME_FORMATTER); + value = null; } } else { value = result.getTimestamp(colName); @@ -446,7 +410,7 @@ public static void decodeOneRowToPreparedStatement(OneRow row, PreparedStatement private Object getLocalDate(String rawVal) { try { - return LocalDate.parse(rawVal, LOCAL_DATE_OPTIONAL_FORMATTER); + return LocalDate.parse(rawVal, LOCAL_DATE_SET_FORMATTER); } catch (Exception e) { throw new IllegalArgumentException("Failed to convert date '" + rawVal + "' to LocalDate class: " + e.getMessage(), e); } @@ -454,7 +418,7 @@ private Object getLocalDate(String rawVal) { private Object getLocalDateTime(String rawVal) { try { - return LocalDateTime.parse(rawVal, LOCAL_DATE_TIME_OPTIONAL_FORMATTER); + return LocalDateTime.parse(rawVal, LOCAL_DATE_TIME_SET_FORMATTER); } catch (Exception e) { throw new IllegalArgumentException("Failed to convert timestamp '" + rawVal + "' to the LocalDateTime class: " + e.getMessage(), e); } From 5f48e9e514e2efa914484f37e0992bcf2c1987d9 Mon Sep 17 00:00:00 2001 From: dimoffon Date: Tue, 21 Mar 2023 06:32:21 +0300 Subject: [PATCH 46/53] ADBDEV-3266 Added IN predicate --- .../java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilder.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilder.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilder.java index 453b9077ff..a472305a73 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilder.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilder.java @@ -58,8 +58,7 @@ public class SQLQueryBuilder { Operator.EQUALS, Operator.LIKE, Operator.NOT_EQUALS, - // TODO: In is not supported? - // Operator.IN, + Operator.IN, Operator.IS_NULL, Operator.IS_NOT_NULL, Operator.NOOP, From c60c73656bd268cca24825597a121d3c001e5af8 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Thu, 2 Mar 2023 01:17:28 +0200 Subject: [PATCH 47/53] ADBDEV-3583: Change unit test to support operator 'IN' --- .../org/greenplum/pxf/plugins/jdbc/SQLQueryBuilderTest.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilderTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilderTest.java index 5c1562a130..bae649e9a1 100644 --- a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilderTest.java +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilderTest.java @@ -133,17 +133,17 @@ public void testIsNotNullOperator() throws Exception { } @Test - public void testUnsupportedOperationFilter() throws Exception { + public void testInOperator() throws Exception { when(mockMetaData.getDatabaseProductName()).thenReturn("mysql"); when(mockMetaData.getExtraNameCharacters()).thenReturn(""); - // IN 'bad' + // grade IN 'bad' context.setFilterString("a3c25s3dbado10"); SQLQueryBuilder builder = new SQLQueryBuilder(context, mockMetaData); builder.autoSetQuoteString(); String query = builder.buildSelectQuery(); - assertEquals(SQL, query); + assertEquals(SQL + " WHERE grade IN 'bad'", query); } @Test From 0d12ae916fa85275116c0aa1de1631686a00f9e4 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Mon, 3 Apr 2023 16:37:27 +0300 Subject: [PATCH 48/53] ADBDEV-3583: Change unit test to support operator 'IN' --- .../pxf/plugins/jdbc/SQLQueryBuilderTest.java | 51 +++++++++++++++++-- 1 file changed, 47 insertions(+), 4 deletions(-) diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilderTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilderTest.java index bae649e9a1..8443c5de35 100644 --- a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilderTest.java +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilderTest.java @@ -35,6 +35,7 @@ import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -133,17 +134,59 @@ public void testIsNotNullOperator() throws Exception { } @Test - public void testInOperator() throws Exception { + public void testInOperatorWithWhere() throws Exception { when(mockMetaData.getDatabaseProductName()).thenReturn("mysql"); when(mockMetaData.getExtraNameCharacters()).thenReturn(""); - // grade IN 'bad' - context.setFilterString("a3c25s3dbado10"); + // grade IN ('bad') + context.setFilterString("a3m1009s3dbado10"); SQLQueryBuilder builder = new SQLQueryBuilder(context, mockMetaData); builder.autoSetQuoteString(); String query = builder.buildSelectQuery(); - assertEquals(SQL + " WHERE grade IN 'bad'", query); + assertEquals(SQL + " WHERE grade IN ('bad')", query); + } + + @Test + public void testInOperatorWithWhereAndFewIn() throws Exception { + when(mockMetaData.getDatabaseProductName()).thenReturn("mysql"); + when(mockMetaData.getExtraNameCharacters()).thenReturn(""); + + // grade IN ('bad') + context.setFilterString("a3m1009s3dbads4dgoodo10"); + + SQLQueryBuilder builder = new SQLQueryBuilder(context, mockMetaData); + builder.autoSetQuoteString(); + String query = builder.buildSelectQuery(); + assertEquals(SQL + " WHERE grade IN ('bad','good')", query); + } + + @Test + public void testInOperatorShouldContainWhere() throws Exception { + when(mockMetaData.getDatabaseProductName()).thenReturn("mysql"); + when(mockMetaData.getExtraNameCharacters()).thenReturn(""); + + // grade IN ('bad') + context.setFilterString("a3m1009s3dbado10"); + + SQLQueryBuilder builder = new SQLQueryBuilder(context, mockMetaData); + builder.autoSetQuoteString(); + String query = builder.buildSelectQuery(); + assertNotEquals(SQL, query); + } + + @Test + public void testInOperatorShouldContainBrackets() throws Exception { + when(mockMetaData.getDatabaseProductName()).thenReturn("mysql"); + when(mockMetaData.getExtraNameCharacters()).thenReturn(""); + + // grade IN ('bad') + context.setFilterString("a3m1009s3dbado10"); + + SQLQueryBuilder builder = new SQLQueryBuilder(context, mockMetaData); + builder.autoSetQuoteString(); + String query = builder.buildSelectQuery(); + assertNotEquals(SQL + " WHERE grade IN 'bad'", query); } @Test From d6a0c1741d5f06c703fdb89acd73f7ab8762006e Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Mon, 3 Apr 2023 16:39:53 +0300 Subject: [PATCH 49/53] ADBDEV-3583: Change unit test to support operator 'IN' --- .../org/greenplum/pxf/plugins/jdbc/SQLQueryBuilderTest.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilderTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilderTest.java index 8443c5de35..2e3a468bc4 100644 --- a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilderTest.java +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/SQLQueryBuilderTest.java @@ -152,7 +152,7 @@ public void testInOperatorWithWhereAndFewIn() throws Exception { when(mockMetaData.getDatabaseProductName()).thenReturn("mysql"); when(mockMetaData.getExtraNameCharacters()).thenReturn(""); - // grade IN ('bad') + // grade IN ('bad','good') context.setFilterString("a3m1009s3dbads4dgoodo10"); SQLQueryBuilder builder = new SQLQueryBuilder(context, mockMetaData); From 499c574a5c9f58501b2dfa26d28f67666c9904d8 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Tue, 4 Apr 2023 19:36:15 +0300 Subject: [PATCH 50/53] ADBDEV-3099: Fix TIMESTAMP_WITH_TIME_ZONE for readable tables We don't need to convert OffsetDateTime to LocalDateTime on the PXF site as it has its own time zone that might be not the same as in GP. --- .../pxf/plugins/jdbc/JdbcResolver.java | 22 ++++++++++++------- .../pxf/plugins/jdbc/JdbcResolverTest.java | 8 +++---- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java index 8c7f1e536a..ca156319d9 100644 --- a/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java +++ b/server/pxf-jdbc/src/main/java/org/greenplum/pxf/plugins/jdbc/JdbcResolver.java @@ -39,7 +39,6 @@ import java.time.LocalDate; import java.time.LocalDateTime; import java.time.OffsetDateTime; -import java.time.ZoneId; import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatterBuilder; import java.time.format.SignStyle; @@ -50,6 +49,7 @@ import java.util.Set; import static java.time.format.DateTimeFormatter.ISO_LOCAL_TIME; +import static java.time.format.DateTimeFormatter.ISO_OFFSET_TIME; /** * JDBC tables resolver @@ -73,6 +73,14 @@ public class JdbcResolver extends JdbcBasePlugin implements Resolver { .appendPattern(" G") .toFormatter(); + private static final DateTimeFormatter OFFSET_DATE_TIME_GET_FORMATTER = (new DateTimeFormatterBuilder()) + .appendValue(ChronoField.YEAR_OF_ERA, 4, 9, SignStyle.NORMAL).appendLiteral("-") + .appendValue(ChronoField.MONTH_OF_YEAR, 2).appendLiteral('-') + .appendValue(ChronoField.DAY_OF_MONTH, 2).appendLiteral(" ") + .append(ISO_OFFSET_TIME) + .appendPattern(" G") + .toFormatter(); + private static final DateTimeFormatter LOCAL_DATE_SET_FORMATTER = (new DateTimeFormatterBuilder()) .appendValue(ChronoField.YEAR_OF_ERA, 1, 9, SignStyle.NORMAL).appendLiteral('-') .appendValue(ChronoField.MONTH_OF_YEAR, 2).appendLiteral('-') @@ -177,14 +185,12 @@ public List getFields(OneRow row) throws SQLException { case TIMESTAMP_WITH_TIME_ZONE: if (isDateWideRange) { OffsetDateTime offsetDateTime = result.getObject(colName, OffsetDateTime.class); - if (offsetDateTime != null) { - LocalDateTime localDateTime = offsetDateTime.atZoneSameInstant(ZoneId.systemDefault()).toLocalDateTime(); - value = localDateTime.format(LOCAL_DATE_TIME_GET_FORMATTER); - } else { - value = null; - } + value = offsetDateTime != null ? offsetDateTime.format(OFFSET_DATE_TIME_GET_FORMATTER) : null; } else { - value = result.getTimestamp(colName); + throw new UnsupportedOperationException( + String.format("Field type '%s' (column '%s') is not supported", + DataType.get(oneField.type), + column)); } break; default: diff --git a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcResolverTest.java b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcResolverTest.java index 617c361081..ff82b12a12 100644 --- a/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcResolverTest.java +++ b/server/pxf-jdbc/src/test/java/org/greenplum/pxf/plugins/jdbc/JdbcResolverTest.java @@ -261,7 +261,7 @@ void getFieldOffsetDateTimeWithWideRangeTest() throws SQLException { resolver.columns = context.getTupleDescription(); resolver.isDateWideRange = isDateWideRange; List oneFields = resolver.getFields(row); - assertEquals("1977-12-11 05:15:30.1234 AD", oneFields.get(0).val); + assertEquals("1977-12-11 10:15:30.1234+05:00 AD", oneFields.get(0).val); } } @@ -300,7 +300,7 @@ void getFieldOffsetDateTimeWithWideRangeWithLeadingZeroTest() throws SQLExceptio resolver.columns = context.getTupleDescription(); resolver.isDateWideRange = isDateWideRange; List oneFields = resolver.getFields(row); - assertEquals("0003-01-02 01:05:06.0000015 AD", oneFields.get(0).val); + assertEquals("0003-01-02 04:05:06.0000015+03:00 AD", oneFields.get(0).val); } } @@ -320,7 +320,7 @@ void getFieldOffsetDateTimeWithMoreThan4digitsInYearTest() throws SQLException { resolver.columns = context.getTupleDescription(); resolver.isDateWideRange = isDateWideRange; List oneFields = resolver.getFields(row); - assertEquals("9876543-12-11 14:15:30.1234 AD", oneFields.get(0).val); + assertEquals("9876543-12-11 11:15:30.1234-03:00 AD", oneFields.get(0).val); } } @@ -341,7 +341,7 @@ void getFieldOffsetDateTimeWithEraTest() throws SQLException { resolver.isDateWideRange = isDateWideRange; List oneFields = resolver.getFields(row); // The year -3456 is transferred to 3457 BC: https://en.wikipedia.org/wiki/Astronomical_year_numbering - assertEquals("3457-12-11 09:15:30 BC", oneFields.get(0).val); + assertEquals("3457-12-11 11:15:30+02:00 BC", oneFields.get(0).val); } } From 470f8aa4220d12960f47cb9202e22447b410a4b2 Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Thu, 11 May 2023 12:55:52 +0300 Subject: [PATCH 51/53] ADBDEV-3683: Downgrade to Spring Boot 2.4.3 --- server/build.gradle | 12 ++++++++++++ server/gradle.properties | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/server/build.gradle b/server/build.gradle index 63c21cebed..67403c1135 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -118,6 +118,18 @@ configure(javaProjects) { dependency("org.apache.htrace:htrace-core:3.1.0-incubating") dependency("org.apache.htrace:htrace-core4:4.0.1-incubating") + // --- bump log4j2 to 2.17.1 for CVE-2021-44228, CVE-2021-45046, and CVE-2021-45105 fixes, + // more details: https://logging.apache.org/log4j/2.x/security.html + // revert once org.springframework.boot:spring-boot-starter-log4j2 is upgraded to bundle log4j2:2.17.1+ + dependencySet(group:"org.apache.logging.log4j", version:"2.17.1") { + entry("log4j-jul") + entry("log4j-api") + entry("log4j-core") + entry("log4j-spring-boot") + } + dependency("org.apache.logging.log4j:log4j-slf4j-impl:2.17.1") + // --- end of CVE patch + dependency("org.apache.zookeeper:zookeeper:3.4.6") dependency("org.codehaus.woodstox:stax2-api:3.1.4") dependency("org.datanucleus:datanucleus-api-jdo:4.2.4") diff --git a/server/gradle.properties b/server/gradle.properties index 0349d875c7..2bdf9c0ade 100644 --- a/server/gradle.properties +++ b/server/gradle.properties @@ -25,7 +25,7 @@ hbaseVersion=1.3.2 junitVersion=4.11 parquetVersion=1.11.1 awsJavaSdk=1.12.261 -springBootVersion=2.5.12 +springBootVersion=2.4.3 org.gradle.daemon=true org.gradle.parallel=false orcVersion=1.6.13 From 865f06d376c023ecea8a0d99416d3e40e31a663f Mon Sep 17 00:00:00 2001 From: Roman Zolotov Date: Tue, 6 Jun 2023 14:09:14 +0300 Subject: [PATCH 52/53] ADBDEV-3816: Switch off building and testing fdw extension In the previous PXF versions we didn't test and build fdw extension. In the current version the script `compile_pxf.bash` initiate `make -C fdw installcheck` command which builds fdw extension and starts fdw regression test. We have to have running greenplum database to pass regression test . But at that time we don't have running greenplum yet. That is the reason why I switched off building fdw extension. We can build and test fdw extension directly in a project which needs pxf_fdw.so fo some reason. --- arenadata/Dockerfile | 1 + 1 file changed, 1 insertion(+) diff --git a/arenadata/Dockerfile b/arenadata/Dockerfile index 28e46aeb72..f64438d47e 100644 --- a/arenadata/Dockerfile +++ b/arenadata/Dockerfile @@ -25,6 +25,7 @@ RUN source gpdb_src/concourse/scripts/common.bash && \ install_gpdb && \ source '/usr/local/greenplum-db-devel/greenplum_path.sh' && \ mkdir ${OUTPUT_ARTIFACT_DIR} && \ + export SKIP_FDW_BUILD_REASON=0 && \ pxf_src/concourse/scripts/compile_pxf.bash # create test image which prepares base image and keeps only pxf artifacts from build image From 21efde567e9d797ae6de03465ccd9eb3b4122149 Mon Sep 17 00:00:00 2001 From: Georgy Shelkovy Date: Mon, 10 Jul 2023 15:03:53 +0500 Subject: [PATCH 53/53] Close connection to PXF when query to external table is canceled (#42) The C-part of the PXF releases the context (cleanup_context) only on the last call in the pxfprotocol_export and pxfprotocol_import functions. That is, on errors, the context is not released, including curl-connections to the Java-part are not closed. Therefore, I added a callback to release resources on errors, which releases the context, including closing curl-connections. --- external-table/src/pxfbridge.c | 2 +- external-table/src/pxfbridge.h | 1 + external-table/src/pxfprotocol.c | 19 +++++++++++++++++++ 3 files changed, 21 insertions(+), 1 deletion(-) diff --git a/external-table/src/pxfbridge.c b/external-table/src/pxfbridge.c index 713c1d306a..26ab7274e9 100644 --- a/external-table/src/pxfbridge.c +++ b/external-table/src/pxfbridge.c @@ -40,7 +40,7 @@ gpbridge_cleanup(gphadoop_context *context) if (context == NULL) return; - churl_cleanup(context->churl_handle, false); + churl_cleanup(context->churl_handle, context->after_error); context->churl_handle = NULL; churl_headers_cleanup(context->churl_headers); diff --git a/external-table/src/pxfbridge.h b/external-table/src/pxfbridge.h index 96517ae913..63b8224131 100644 --- a/external-table/src/pxfbridge.h +++ b/external-table/src/pxfbridge.h @@ -43,6 +43,7 @@ typedef struct ProjectionInfo *proj_info; List *quals; bool completed; + bool after_error; } gphadoop_context; /* diff --git a/external-table/src/pxfprotocol.c b/external-table/src/pxfprotocol.c index 4ccb1b96bb..2f7d3472bd 100644 --- a/external-table/src/pxfprotocol.c +++ b/external-table/src/pxfprotocol.c @@ -26,6 +26,7 @@ #include "access/fileam.h" #endif #include "utils/elog.h" +#include "utils/resowner.h" /* define magic module unless run as a part of test cases */ #ifndef UNIT_TESTING @@ -154,6 +155,21 @@ pxfprotocol_import(PG_FUNCTION_ARGS) PG_RETURN_INT32(bytes_read); } +static void +url_curl_abort_callback(ResourceReleasePhase phase, + bool isCommit, + bool isTopLevel, + void *arg) +{ + gphadoop_context *context = arg; + + if (phase != RESOURCE_RELEASE_AFTER_LOCKS || isCommit || !isTopLevel) + return; + + context->after_error = true; + cleanup_context(context); +} + /* * Allocates context and sets values for the segment */ @@ -190,6 +206,8 @@ create_context(PG_FUNCTION_ARGS, bool is_import) context->proj_info = proj_info; context->quals = filter_quals; context->completed = false; + context->after_error = false; + RegisterResourceReleaseCallback(url_curl_abort_callback, context); return context; } @@ -201,6 +219,7 @@ cleanup_context(gphadoop_context *context) { if (context != NULL) { + UnregisterResourceReleaseCallback(url_curl_abort_callback, context); gpbridge_cleanup(context); pfree(context->uri.data); pfree(context);