From cf50e87158e701de0a0c5a81c3e02ce4767779b7 Mon Sep 17 00:00:00 2001 From: Anastasiia Sergienko Date: Fri, 14 Feb 2020 15:57:07 +0100 Subject: [PATCH 1/6] #314: implemented switch for IMPORT statement for BigQuery dialect --- doc/dialects/athena.md | 2 +- doc/dialects/aurora.md | 2 +- doc/dialects/bigquery.md | 8 +++- doc/dialects/db2.md | 4 +- doc/dialects/exasol.md | 2 +- doc/dialects/hive.md | 2 +- doc/dialects/impala.md | 2 +- doc/dialects/mysql.md | 2 +- doc/dialects/oracle.md | 2 +- doc/dialects/postgresql.md | 2 +- doc/dialects/redshift.md | 2 +- doc/dialects/saphana.md | 2 +- doc/dialects/sql_server.md | 2 +- doc/dialects/sybase.md | 2 +- doc/dialects/teradata.md | 2 +- .../deploying_the_virtual_schema_adapter.md | 8 ++-- pom.xml | 2 +- .../BigQueryColumnMetadataReader.java | 2 +- .../bigquery/BigQueryMetadataReader.java | 2 +- .../dialects/bigquery/BigQueryProperties.java | 12 ++++++ .../dialects/bigquery/BigQuerySqlDialect.java | 19 ++++++++-- .../BigQuerySqlGenerationVisitor.java | 34 ----------------- .../dialects/IntegrationTestConstants.java | 2 +- .../BigQueryColumnMetadataReaderTest.java | 2 +- .../bigquery/BigQueryMetadataReaderTest.java | 2 +- .../bigquery/BigQuerySqlDialectTest.java | 20 ++++++++-- .../BigQuerySqlGenerationVisitorTest.java | 37 ------------------- 27 files changed, 76 insertions(+), 104 deletions(-) create mode 100644 src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryProperties.java delete mode 100644 src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlGenerationVisitor.java delete mode 100644 src/test/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlGenerationVisitorTest.java diff --git a/doc/dialects/athena.md b/doc/dialects/athena.md index 38680f12d..1c42788be 100644 --- a/doc/dialects/athena.md +++ b/doc/dialects/athena.md @@ -49,7 +49,7 @@ The SQL statement below creates the adapter script, defines the Java class that ```sql CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///AthenaJDBC42-.jar; / ; diff --git a/doc/dialects/aurora.md b/doc/dialects/aurora.md index be2cadf6f..89668df27 100644 --- a/doc/dialects/aurora.md +++ b/doc/dialects/aurora.md @@ -62,7 +62,7 @@ The SQL statement below creates the adapter script, defines the Java class that ```sql CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///postgresql-.jar; / ``` diff --git a/doc/dialects/bigquery.md b/doc/dialects/bigquery.md index 0dd1320c4..1abadf6ab 100644 --- a/doc/dialects/bigquery.md +++ b/doc/dialects/bigquery.md @@ -31,7 +31,7 @@ Please remember to check the versions of your JAR files after downloading driver ```sql CREATE JAVA ADAPTER SCRIPT SCHEMA_FOR_VS_SCRIPT.ADAPTER_SCRIPT_BIGQUERY AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///avro-1.8.2.jar; %jar /buckets///gax-1.42.0.jar; %jar /buckets///google-api-client-1.28.0.jar; @@ -101,6 +101,12 @@ If you need to use currently unsupported data types or find a way around known l ## Performance Please be aware that the current implementation of the dialect can only handle result sets with limited size (a few thousand rows). +If you need to proceed a large amount of data, please, contact our support team. Another implementation of the dialect with a speed improvement(using IMPORT INTO) is available, but not documented and announced officially on our GitHub due to: + +1. the complex installation process +2. security risks (a user has to disable the drivers' security manager to use it) + +Please, try to avoid using IMPORT implementation if it is possible. Be aware of the security risks. ## Testing information diff --git a/doc/dialects/db2.md b/doc/dialects/db2.md index ae2b3b64b..ac0f4b838 100644 --- a/doc/dialects/db2.md +++ b/doc/dialects/db2.md @@ -56,7 +56,7 @@ The SQL statement below creates the adapter script, defines the Java class that ```sql CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///db2jcc4.jar; %jar /buckets///db2jcc_license_cu.jar; / @@ -68,7 +68,7 @@ CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS ```sql CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///db2jcc4.jar; %jar /buckets///db2jcc_license_cu.jar; %jar /buckets///db2jcc_license_cisuz.jar; diff --git a/doc/dialects/exasol.md b/doc/dialects/exasol.md index e23c303fd..643ba88d2 100644 --- a/doc/dialects/exasol.md +++ b/doc/dialects/exasol.md @@ -18,7 +18,7 @@ The SQL statement below creates the adapter script, defines the Java class that ```sql CREATE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; / ``` diff --git a/doc/dialects/hive.md b/doc/dialects/hive.md index 9b6a0c554..a6c2a0938 100644 --- a/doc/dialects/hive.md +++ b/doc/dialects/hive.md @@ -47,7 +47,7 @@ The SQL statement below creates the adapter script, defines the Java class that ```sql CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///jars/virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///jars/virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///jars/HiveJDBC41.jar; / ``` diff --git a/doc/dialects/impala.md b/doc/dialects/impala.md index ec254829c..bb1c8433c 100644 --- a/doc/dialects/impala.md +++ b/doc/dialects/impala.md @@ -47,7 +47,7 @@ The SQL statement below creates the adapter script, defines the Java class that ```sql CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///ImpalaJDBC41.jar; / ; diff --git a/doc/dialects/mysql.md b/doc/dialects/mysql.md index 2a8dc069f..970c1da42 100644 --- a/doc/dialects/mysql.md +++ b/doc/dialects/mysql.md @@ -51,7 +51,7 @@ The SQL statement below creates the adapter script, defines the Java class that ```sql CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///mysql-connector-java-.jar; / ; diff --git a/doc/dialects/oracle.md b/doc/dialects/oracle.md index 15f93ea04..d74e96332 100644 --- a/doc/dialects/oracle.md +++ b/doc/dialects/oracle.md @@ -48,7 +48,7 @@ The SQL statement below creates the adapter script, defines the Java class that ```sql CREATE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///ojdbc.jar; / ; diff --git a/doc/dialects/postgresql.md b/doc/dialects/postgresql.md index aa5850001..2878d1289 100644 --- a/doc/dialects/postgresql.md +++ b/doc/dialects/postgresql.md @@ -24,7 +24,7 @@ The SQL statement below creates the adapter script, defines the Java class that ```sql CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///postgresql-.jar; / ``` diff --git a/doc/dialects/redshift.md b/doc/dialects/redshift.md index 879058e69..9144506ca 100644 --- a/doc/dialects/redshift.md +++ b/doc/dialects/redshift.md @@ -51,7 +51,7 @@ The SQL statement below creates the adapter script, defines the Java class that ```sql CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///RedshiftJDBC42-.jar; / ; diff --git a/doc/dialects/saphana.md b/doc/dialects/saphana.md index 684e33ae1..94a7e1612 100644 --- a/doc/dialects/saphana.md +++ b/doc/dialects/saphana.md @@ -47,7 +47,7 @@ The SQL statement below creates the adapter script, defines the Java class that ```sql CREATE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///ngdbc-.jar; / ; diff --git a/doc/dialects/sql_server.md b/doc/dialects/sql_server.md index f18a4dbd5..abe244222 100644 --- a/doc/dialects/sql_server.md +++ b/doc/dialects/sql_server.md @@ -24,7 +24,7 @@ The SQL statement below creates the adapter script, defines the Java class that ```sql CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///jtds.jar; / ``` diff --git a/doc/dialects/sybase.md b/doc/dialects/sybase.md index 80988a2b9..dde34927e 100644 --- a/doc/dialects/sybase.md +++ b/doc/dialects/sybase.md @@ -29,7 +29,7 @@ The SQL statement below creates the adapter script, defines the Java class that ```sql CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///jtds-.jar; / ``` diff --git a/doc/dialects/teradata.md b/doc/dialects/teradata.md index 5ea24bcbb..70fa74494 100644 --- a/doc/dialects/teradata.md +++ b/doc/dialects/teradata.md @@ -47,7 +47,7 @@ The SQL statement below creates the adapter script, defines the Java class that ```sql CREATE OR REPLACE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets///virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets///terajdbc4.jar; %jar /buckets///tdgssconfig.jar; / diff --git a/doc/user-guide/deploying_the_virtual_schema_adapter.md b/doc/user-guide/deploying_the_virtual_schema_adapter.md index 496e10ad1..3e615da98 100644 --- a/doc/user-guide/deploying_the_virtual_schema_adapter.md +++ b/doc/user-guide/deploying_the_virtual_schema_adapter.md @@ -23,7 +23,7 @@ cd virtual-schemas/jdbc-adapter/ mvn clean -DskipTests package ``` -The resulting fat JAR is stored in `virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-3.0.2.jar`. +The resulting fat JAR is stored in `virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-3.1.0.jar`. ## Uploading the Adapter JAR Archive @@ -46,8 +46,8 @@ Following steps are required to upload a file to a bucket: 1. Now upload the file into this bucket, e.g. using curl (adapt the hostname, BucketFS port, bucket name and bucket write password). ```bash -curl -X PUT -T virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-3.0.2.jar \ - http://w:write-password@your.exasol.host.com:2580/bucket1/virtualschema-jdbc-adapter-dist-3.0.2.jar +curl -X PUT -T virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-3.1.0.jar \ + http://w:write-password@your.exasol.host.com:2580/bucket1/virtualschema-jdbc-adapter-dist-3.1.0.jar ``` If you later need to change the bucket passwords, select the bucket and click "Edit". @@ -84,7 +84,7 @@ CREATE SCHEMA ADAPTER; CREATE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-3.0.2.jar; + %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-3.1.0.jar; %jar /buckets/your-bucket-fs/your-bucket/.jar; / ``` diff --git a/pom.xml b/pom.xml index b79415176..22d1fa14b 100644 --- a/pom.xml +++ b/pom.xml @@ -7,7 +7,7 @@ ${product.version} Virtual Schema JDBC Adapter - 3.0.2 + 3.1.0 UTF-8 UTF-8 11 diff --git a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryColumnMetadataReader.java b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryColumnMetadataReader.java index 798ba9302..35b59d32c 100644 --- a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryColumnMetadataReader.java +++ b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryColumnMetadataReader.java @@ -28,7 +28,7 @@ public BigQueryColumnMetadataReader(final Connection connection, final AdapterPr @Override public DataType mapJdbcType(final JdbcTypeDescription jdbcTypeDescription) { if (jdbcTypeDescription.getJdbcType() == Types.TIME) { - return DataType.createVarChar(16, DataType.ExaCharset.UTF8); + return DataType.createVarChar(30, DataType.ExaCharset.UTF8); } return super.mapJdbcType(jdbcTypeDescription); } diff --git a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryMetadataReader.java b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryMetadataReader.java index 53669950b..1e91d8248 100644 --- a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryMetadataReader.java +++ b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryMetadataReader.java @@ -41,7 +41,7 @@ public Set getSupportedTableTypes() { @Override protected IdentifierConverter createIdentifierConverter() { - return new BaseIdentifierConverter(IdentifierCaseHandling.INTERPRET_AS_UPPER, + return new BaseIdentifierConverter(IdentifierCaseHandling.INTERPRET_AS_LOWER, IdentifierCaseHandling.INTERPRET_CASE_SENSITIVE); } } diff --git a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryProperties.java b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryProperties.java new file mode 100644 index 000000000..ec3b60595 --- /dev/null +++ b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQueryProperties.java @@ -0,0 +1,12 @@ +package com.exasol.adapter.dialects.bigquery; + +/** + * This class contains BigQuery-specific adapter properties. + */ +public class BigQueryProperties { + public static final String BIGQUERY_ENABLE_IMPORT_PROPERTY = "BIGQUERY_ENABLE_IMPORT"; + + private BigQueryProperties() { + // prevent instantiation + } +} diff --git a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialect.java b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialect.java index 34cdda7e0..db5e89507 100644 --- a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialect.java +++ b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialect.java @@ -6,9 +6,11 @@ import static com.exasol.adapter.capabilities.MainCapability.*; import static com.exasol.adapter.capabilities.PredicateCapability.*; import static com.exasol.adapter.capabilities.ScalarFunctionCapability.*; +import static com.exasol.adapter.dialects.bigquery.BigQueryProperties.BIGQUERY_ENABLE_IMPORT_PROPERTY; import java.sql.Connection; import java.util.*; +import java.util.logging.Logger; import com.exasol.adapter.AdapterProperties; import com.exasol.adapter.capabilities.Capabilities; @@ -23,12 +25,13 @@ * @see BigQuery */ public class BigQuerySqlDialect extends AbstractSqlDialect { + private static final Logger LOGGER = Logger.getLogger(BigQuerySqlDialect.class.getName()); static final String NAME = "BIGQUERY"; private static final Capabilities CAPABILITIES = createCapabilityList(); private static final List SUPPORTED_PROPERTIES = Arrays.asList(SQL_DIALECT_PROPERTY, CONNECTION_NAME_PROPERTY, CONNECTION_STRING_PROPERTY, USERNAME_PROPERTY, PASSWORD_PROPERTY, CATALOG_NAME_PROPERTY, SCHEMA_NAME_PROPERTY, TABLE_FILTER_PROPERTY, EXCLUDED_CAPABILITIES_PROPERTY, - DEBUG_ADDRESS_PROPERTY, LOG_LEVEL_PROPERTY); + DEBUG_ADDRESS_PROPERTY, LOG_LEVEL_PROPERTY, BIGQUERY_ENABLE_IMPORT_PROPERTY); /** * Create a new instance of the {@link BigQuerySqlDialect}. @@ -48,7 +51,14 @@ protected RemoteMetadataReader createRemoteMetadataReader() { @Override protected QueryRewriter createQueryRewriter() { - return new BigQueryQueryRewriter(this, this.remoteMetadataReader, this.connection); + if (this.properties.containsKey(BIGQUERY_ENABLE_IMPORT_PROPERTY) + && "true".equalsIgnoreCase(this.properties.get(BIGQUERY_ENABLE_IMPORT_PROPERTY))) { + LOGGER.info("Attention, IMPORT was activated for BIGQUERY dialect. " + + "Please, be aware that it is not secured to use IMPORT with this dialect."); + return new BaseQueryRewriter(this, this.remoteMetadataReader, this.connection); + } else { + return new BigQueryQueryRewriter(this, this.remoteMetadataReader, this.connection); + } } @Override @@ -135,7 +145,8 @@ public NullSorting getDefaultNullSorting() { } @Override - public SqlGenerationVisitor getSqlGenerationVisitor(final SqlGenerationContext context) { - return new BigQuerySqlGenerationVisitor(this, context); + public void validateProperties() throws PropertyValidationException { + super.validateProperties(); + validateBooleanProperty(BIGQUERY_ENABLE_IMPORT_PROPERTY); } } \ No newline at end of file diff --git a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlGenerationVisitor.java b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlGenerationVisitor.java deleted file mode 100644 index 9d2008f63..000000000 --- a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlGenerationVisitor.java +++ /dev/null @@ -1,34 +0,0 @@ -package com.exasol.adapter.dialects.bigquery; - -import java.util.ArrayList; -import java.util.List; - -import com.exasol.adapter.AdapterException; -import com.exasol.adapter.dialects.*; -import com.exasol.adapter.sql.SqlOrderBy; - -/** - * This class implements a Google-Big-Query-specific variant of an SQL generation visitor. - */ -public class BigQuerySqlGenerationVisitor extends SqlGenerationVisitor { - /** - * Create a new instance of the {@link BigQuerySqlGenerationVisitor}. - * - * @param dialect Big Query SQL dialect - * @param context SQL generation context - */ - public BigQuerySqlGenerationVisitor(final SqlDialect dialect, final SqlGenerationContext context) { - super(dialect, context); - } - - @Override - public String visit(final SqlOrderBy orderBy) throws AdapterException { - final List sqlOrderElement = new ArrayList<>(); - for (int i = 0; i < orderBy.getExpressions().size(); ++i) { - final boolean isAscending = orderBy.isAscending().get(i); - final String elementSql = orderBy.getExpressions().get(i).accept(this) + (isAscending ? "" : " DESC"); - sqlOrderElement.add(elementSql); - } - return "ORDER BY " + String.join(", ", sqlOrderElement); - } -} diff --git a/src/test/java/com/exasol/adapter/dialects/IntegrationTestConstants.java b/src/test/java/com/exasol/adapter/dialects/IntegrationTestConstants.java index 35528012c..9b0a430d3 100644 --- a/src/test/java/com/exasol/adapter/dialects/IntegrationTestConstants.java +++ b/src/test/java/com/exasol/adapter/dialects/IntegrationTestConstants.java @@ -4,7 +4,7 @@ public final class IntegrationTestConstants { public static final String INTEGRATION_TEST_CONFIGURATION_FILE_PROPERTY = "integrationtest.configfile"; - public static final String VIRTUAL_SCHEMAS_JAR_NAME_AND_VERSION = "virtualschema-jdbc-adapter-dist-3.0.2.jar"; + public static final String VIRTUAL_SCHEMAS_JAR_NAME_AND_VERSION = "virtualschema-jdbc-adapter-dist-3.1.0.jar"; public static final Path PATH_TO_VIRTUAL_SCHEMAS_JAR = Path.of("target", VIRTUAL_SCHEMAS_JAR_NAME_AND_VERSION); public static final String SCHEMA_EXASOL = "SCHEMA_EXASOL"; public static final String ADAPTER_SCRIPT_EXASOL = "ADAPTER_SCRIPT_EXASOL"; diff --git a/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryColumnMetadataReaderTest.java b/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryColumnMetadataReaderTest.java index 8d85f82f3..4c07a56f7 100644 --- a/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryColumnMetadataReaderTest.java +++ b/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryColumnMetadataReaderTest.java @@ -26,6 +26,6 @@ void beforeEach() { void mapDecimalReturnDecimal() { final JdbcTypeDescription typeDescription = new JdbcTypeDescription(Types.TIME, 0, 0, 10, "TIME"); assertThat(this.columnMetadataReader.mapJdbcType(typeDescription), - equalTo(DataType.createVarChar(16, DataType.ExaCharset.UTF8))); + equalTo(DataType.createVarChar(30, DataType.ExaCharset.UTF8))); } } \ No newline at end of file diff --git a/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryMetadataReaderTest.java b/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryMetadataReaderTest.java index a994223f2..c20c78499 100644 --- a/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryMetadataReaderTest.java +++ b/src/test/java/com/exasol/adapter/dialects/bigquery/BigQueryMetadataReaderTest.java @@ -48,6 +48,6 @@ void testCreateIdentifierConverter() { () -> assertThat(converter.getQuotedIdentifierHandling(), equalTo(IdentifierCaseHandling.INTERPRET_CASE_SENSITIVE)), () -> assertThat(converter.getUnquotedIdentifierHandling(), - equalTo(IdentifierCaseHandling.INTERPRET_AS_UPPER))); + equalTo(IdentifierCaseHandling.INTERPRET_AS_LOWER))); } } \ No newline at end of file diff --git a/src/test/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialectTest.java b/src/test/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialectTest.java index b208f5e54..b0ec42926 100644 --- a/src/test/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialectTest.java +++ b/src/test/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialectTest.java @@ -6,15 +6,20 @@ import static com.exasol.adapter.capabilities.MainCapability.*; import static com.exasol.adapter.capabilities.PredicateCapability.*; import static com.exasol.adapter.capabilities.ScalarFunctionCapability.*; +import static com.exasol.adapter.dialects.bigquery.BigQueryProperties.BIGQUERY_ENABLE_IMPORT_PROPERTY; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; +import java.util.HashMap; +import java.util.Map; + import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import com.exasol.adapter.AdapterProperties; +import com.exasol.adapter.dialects.BaseQueryRewriter; import com.exasol.adapter.dialects.SqlDialect; import com.exasol.adapter.sql.AggregateFunction; import com.exasol.adapter.sql.ScalarFunction; @@ -105,7 +110,7 @@ void testGetSupportedProperties() { containsInAnyOrder(SQL_DIALECT_PROPERTY, CONNECTION_NAME_PROPERTY, CONNECTION_STRING_PROPERTY, USERNAME_PROPERTY, PASSWORD_PROPERTY, CATALOG_NAME_PROPERTY, SCHEMA_NAME_PROPERTY, TABLE_FILTER_PROPERTY, EXCLUDED_CAPABILITIES_PROPERTY, DEBUG_ADDRESS_PROPERTY, - LOG_LEVEL_PROPERTY)); + LOG_LEVEL_PROPERTY, BIGQUERY_ENABLE_IMPORT_PROPERTY)); } @Test @@ -125,7 +130,16 @@ void testGetScalarFunctionAliases() { } @Test - void testGetSqlGenerationVisitor() { - assertThat(this.dialect.getSqlGenerationVisitor(null), instanceOf(BigQuerySqlGenerationVisitor.class)); + void testCreateQueryRewriterBigQueryRewriter() { + assertThat(this.dialect.createQueryRewriter(), instanceOf(BigQueryQueryRewriter.class)); + } + + @Test + void testCreateQueryRewriterBaseQueryRewriter() { + Map properties = new HashMap<>(); + properties.put(BIGQUERY_ENABLE_IMPORT_PROPERTY, "TRUE"); + AdapterProperties adapterProperties = new AdapterProperties(properties); + final BigQuerySqlDialect dialect = new BigQuerySqlDialect(null, adapterProperties); + assertThat(dialect.createQueryRewriter(), instanceOf(BaseQueryRewriter.class)); } } \ No newline at end of file diff --git a/src/test/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlGenerationVisitorTest.java b/src/test/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlGenerationVisitorTest.java deleted file mode 100644 index f3408e2d2..000000000 --- a/src/test/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlGenerationVisitorTest.java +++ /dev/null @@ -1,37 +0,0 @@ -package com.exasol.adapter.dialects.bigquery; - -import static com.exasol.adapter.dialects.DialectTestData.getClicksTableMetadata; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.MatcherAssert.assertThat; - -import java.sql.Connection; -import java.util.List; - -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -import com.exasol.adapter.AdapterException; -import com.exasol.adapter.AdapterProperties; -import com.exasol.adapter.dialects.SqlDialect; -import com.exasol.adapter.dialects.SqlGenerationContext; -import com.exasol.adapter.metadata.TableMetadata; -import com.exasol.adapter.sql.*; - -@ExtendWith(MockitoExtension.class) -class BigQuerySqlGenerationVisitorTest { - @Mock - Connection connection; - - @Test - void visit() throws AdapterException { - final SqlDialect dialect = new BigQuerySqlDialect(this.connection, AdapterProperties.emptyProperties()); - final SqlNodeVisitor visitor = new BigQuerySqlGenerationVisitor(dialect, - new SqlGenerationContext("catalog", "schema", false)); - final TableMetadata clicksMeta = getClicksTableMetadata(); - final SqlOrderBy orderBy = new SqlOrderBy(List.of(new SqlColumn(0, clicksMeta.getColumns().get(0))), - List.of(true), List.of(true)); - assertThat(visitor.visit(orderBy), equalTo("ORDER BY `USER_ID`")); - } -} \ No newline at end of file From 70401503dc826bfa5742d8b8123ad95698172e50 Mon Sep 17 00:00:00 2001 From: Anastasiia Sergienko Date: Mon, 17 Feb 2020 07:46:45 +0100 Subject: [PATCH 2/6] #314: improved code coverage --- .../bigquery/BigQuerySqlDialectTest.java | 23 +++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/src/test/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialectTest.java b/src/test/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialectTest.java index b0ec42926..859277a9a 100644 --- a/src/test/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialectTest.java +++ b/src/test/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialectTest.java @@ -7,10 +7,12 @@ import static com.exasol.adapter.capabilities.PredicateCapability.*; import static com.exasol.adapter.capabilities.ScalarFunctionCapability.*; import static com.exasol.adapter.dialects.bigquery.BigQueryProperties.BIGQUERY_ENABLE_IMPORT_PROPERTY; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.assertThat; +import static org.junit.jupiter.api.Assertions.assertThrows; import java.util.HashMap; import java.util.Map; @@ -19,8 +21,7 @@ import org.junit.jupiter.api.Test; import com.exasol.adapter.AdapterProperties; -import com.exasol.adapter.dialects.BaseQueryRewriter; -import com.exasol.adapter.dialects.SqlDialect; +import com.exasol.adapter.dialects.*; import com.exasol.adapter.sql.AggregateFunction; import com.exasol.adapter.sql.ScalarFunction; @@ -136,10 +137,24 @@ void testCreateQueryRewriterBigQueryRewriter() { @Test void testCreateQueryRewriterBaseQueryRewriter() { - Map properties = new HashMap<>(); + final Map properties = new HashMap<>(); properties.put(BIGQUERY_ENABLE_IMPORT_PROPERTY, "TRUE"); - AdapterProperties adapterProperties = new AdapterProperties(properties); + final AdapterProperties adapterProperties = new AdapterProperties(properties); final BigQuerySqlDialect dialect = new BigQuerySqlDialect(null, adapterProperties); assertThat(dialect.createQueryRewriter(), instanceOf(BaseQueryRewriter.class)); } + + @Test + void testValidateProperties() { + final Map properties = new HashMap<>(); + properties.put(BIGQUERY_ENABLE_IMPORT_PROPERTY, "WRONG VALUE"); + properties.put(CONNECTION_STRING_PROPERTY, "CONNECTION_STRING_PROPERTY"); + final AdapterProperties adapterProperties = new AdapterProperties(properties); + final BigQuerySqlDialect dialect = new BigQuerySqlDialect(null, adapterProperties); + final PropertyValidationException exception = assertThrows(PropertyValidationException.class, + dialect::validateProperties); + assertThat(exception.getMessage(), + containsString("The value 'WRONG VALUE' for the property BIGQUERY_ENABLE_IMPORT is invalid. " + + "It has to be either 'true' or 'false' (case insensitive)")); + } } \ No newline at end of file From 3af06ee78996ca6ba9c48ec4c49d862fd55ac1bf Mon Sep 17 00:00:00 2001 From: Anastasiia Sergienko <46891819+AnastasiiaSergienko@users.noreply.github.com> Date: Mon, 17 Feb 2020 11:27:16 +0100 Subject: [PATCH 3/6] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Sebastian Bär --- .../adapter/dialects/bigquery/BigQuerySqlDialect.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialect.java b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialect.java index db5e89507..c188ad53b 100644 --- a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialect.java +++ b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialect.java @@ -53,8 +53,8 @@ protected RemoteMetadataReader createRemoteMetadataReader() { protected QueryRewriter createQueryRewriter() { if (this.properties.containsKey(BIGQUERY_ENABLE_IMPORT_PROPERTY) && "true".equalsIgnoreCase(this.properties.get(BIGQUERY_ENABLE_IMPORT_PROPERTY))) { - LOGGER.info("Attention, IMPORT was activated for BIGQUERY dialect. " - + "Please, be aware that it is not secured to use IMPORT with this dialect."); + LOGGER.warning("Attention: IMPORT is activated for the BIGQUERY dialect. " + + "Please be aware that it using IMPORT with this dialect requires disabling important security features and is therefore not recommended!"); return new BaseQueryRewriter(this, this.remoteMetadataReader, this.connection); } else { return new BigQueryQueryRewriter(this, this.remoteMetadataReader, this.connection); @@ -149,4 +149,4 @@ public void validateProperties() throws PropertyValidationException { super.validateProperties(); validateBooleanProperty(BIGQUERY_ENABLE_IMPORT_PROPERTY); } -} \ No newline at end of file +} From 843d960618b911214f170c7756f0be4e245becb2 Mon Sep 17 00:00:00 2001 From: Anastasiia Sergienko Date: Mon, 17 Feb 2020 15:52:35 +0100 Subject: [PATCH 4/6] #314: adjusted documentation --- README.md | 1 - doc/dialects/bigquery.md | 33 +++---- .../deploying_the_virtual_schema_adapter.md | 92 ------------------- doc/user-guide/user_guide.md | 57 +++++++++--- .../dialects/bigquery/BigQuerySqlDialect.java | 2 +- 5 files changed, 61 insertions(+), 124 deletions(-) delete mode 100644 doc/user-guide/deploying_the_virtual_schema_adapter.md diff --git a/README.md b/README.md index 3b5b7487b..011be0efb 100644 --- a/README.md +++ b/README.md @@ -70,7 +70,6 @@ This is an open source project which is officially supported by Exasol. For any ## Information for Users * [User Guide](doc/user-guide/user_guide.md) -* [Deployment Guide](doc/user-guide/deploying_the_virtual_schema_adapter.md) Supported dialects: diff --git a/doc/dialects/bigquery.md b/doc/dialects/bigquery.md index 1abadf6ab..e47665534 100644 --- a/doc/dialects/bigquery.md +++ b/doc/dialects/bigquery.md @@ -9,11 +9,13 @@ Download the [Simba JDBC Driver for Google BigQuery](https://cloud.google.com/bi ## Uploading the JDBC Driver to EXAOperation 1. [Create a bucket in BucketFS](https://docs.exasol.com/administration/on-premise/bucketfs/create_new_bucket_in_bucketfs_service.htm) -1. Upload the driver to BucketFS +1. [Upload the driver to BucketFS](https://docs.exasol.com/administration/on-premise/bucketfs/accessfiles.htm) -Hint: Magnitude Simba driver contains a lot of jar files, but you can upload all of them together as an archive (`.tar.gz`, for example). +**Hint**: Magnitude Simba driver contains a lot of jar files, but you can upload all of them together as an archive (`.tar.gz`, for example). The archive will be unpacked automatically in the bucket and you can access the files using the following path pattern '//.jar' +Leave only `.jar` files in the archive. It will help you to generate a list for adapter script later. + ## Installing the Adapter Script Upload the latest available release of [Virtual Schema JDBC Adapter](https://github.com/exasol/virtual-schemas/releases) to Bucket FS. @@ -26,32 +28,27 @@ CREATE SCHEMA SCHEMA_FOR_VS_SCRIPT; The SQL statement below creates the adapter script, defines the Java class that serves as entry point and tells the UDF framework where to find the libraries (JAR files) for Virtual Schema and database driver. -Please remember to check the versions of your JAR files after downloading driver. They can differ from the list below. +List all the jars from Magnitude Simba JDBC driver. ```sql CREATE JAVA ADAPTER SCRIPT SCHEMA_FOR_VS_SCRIPT.ADAPTER_SCRIPT_BIGQUERY AS %scriptclass com.exasol.adapter.RequestDispatcher; %jar /buckets///virtualschema-jdbc-adapter-dist-3.1.0.jar; - %jar /buckets///avro-1.8.2.jar; - %jar /buckets///gax-1.42.0.jar; - %jar /buckets///google-api-client-1.28.0.jar; - %jar /buckets///google-api-services-bigquery-v2-rev426-1.25.0.jar; - %jar /buckets///google-auth-library-credentials-0.15.0.jar; - %jar /buckets///google-auth-library-oauth2-http-0.13.0.jar; %jar /buckets///GoogleBigQueryJDBC42.jar; - %jar /buckets///google-http-client-1.29.0.jar; - %jar /buckets///google-http-client-jackson2-1.28.0.jar; - %jar /buckets///google-oauth-client-1.28.0.jar; - %jar /buckets///grpc-context-1.18.0.jar; - %jar /buckets///guava-26.0-android.jar - %jar /buckets///jackson-core-2.9.6.jar; - %jar /buckets///joda-time-2.10.1.jar; - %jar /buckets///opencensus-api-0.18.0.jar; - %jar /buckets///opencensus-contrib-http-util-0.18.0.jar; + ... + ... + ... / ; ``` +**Hint**: to avoid filling the list by hands, use a convenience UDF script [bucketfs_ls](https://github.com/exasol/exa-toolbox/blob/master/utilities/bucketfs_ls.sql). +Create a script and run it as in the following example: + +```sql +SELECT '%jar /buckets////'|| files || ';' FROM (SELECT EXA_toolbox.bucketfs_ls('/buckets////') files ); +``` + ## Defining a Named Connection Please follow the [Authenticating to a Cloud API Service article](https://cloud.google.com/docs/authentication/) to get Google service account credentials. diff --git a/doc/user-guide/deploying_the_virtual_schema_adapter.md b/doc/user-guide/deploying_the_virtual_schema_adapter.md deleted file mode 100644 index 3e615da98..000000000 --- a/doc/user-guide/deploying_the_virtual_schema_adapter.md +++ /dev/null @@ -1,92 +0,0 @@ -# Deploying the Adapter Step By Step - -Run the following steps to deploy your adapter: - -## Prerequisites - -* Exasol Version 6.0 or later -* Advanced edition (which includes the ability to execute adapter scripts), or Free Small Business Edition -* Exasol must be able to connect to the host and port specified in the JDBC connection string. In case of problems you can use a [UDF to test the connectivity](https://www.exasol.com/support/browse/SOL-307). -* If the JDBC driver requires Kerberos authentication (e.g. for Hive or Impala), the Exasol database will authenticate using a keytab file. Each Exasol node needs access to port 88 of the the Kerberos KDC (key distribution center). - -## Obtaining JAR Archives - -First you have to obtain the so called fat JAR (including all dependencies). - -The easiest way is to download the JAR from the last [Release](https://github.com/Exasol/virtual-schemas/releases). - -Alternatively you can clone the repository and build the JAR as follows: - -```bash -git clone https://github.com/Exasol/virtual-schemas.git -cd virtual-schemas/jdbc-adapter/ -mvn clean -DskipTests package -``` - -The resulting fat JAR is stored in `virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-3.1.0.jar`. - -## Uploading the Adapter JAR Archive - -You have to upload the JAR of the adapter to a bucket of your choice in the Exasol bucket file system (BucketFS). This will allow using the jar in the adapter script. - -Following steps are required to upload a file to a bucket: - -1. Make sure you have a bucket file system (BucketFS) and you know the port for either HTTP or HTTPS. - - This can be done in EXAOperation under "EXABuckets". E.g. the ID could be `bucketfs1`. The recommended default HTTP port is 2580, but any unused TCP port will work. As a best practice choose an unprivileged port (1024 or higher). - -1. Create a bucket in the BucketFS: - - 1. Click on the name of the BucketFS in EXAOperation and add a bucket there, e.g. `bucket1`. - - 1. Set the write password. - - 1. To keep this example simple we assume that the bucket is defined publicly readable. - -1. Now upload the file into this bucket, e.g. using curl (adapt the hostname, BucketFS port, bucket name and bucket write password). - -```bash -curl -X PUT -T virtualschema-jdbc-adapter-dist/target/virtualschema-jdbc-adapter-dist-3.1.0.jar \ - http://w:write-password@your.exasol.host.com:2580/bucket1/virtualschema-jdbc-adapter-dist-3.1.0.jar -``` - -If you later need to change the bucket passwords, select the bucket and click "Edit". - -See chapter 3.6.4. "The synchronous cluster file system BucketFS" in the EXASolution User Manual for more details about BucketFS. - -Check out Exasol's [BucketFS Explorer](https://github.com/exasol/bucketfs-explorer) as an alternative means of uploading your JAR archive. - -## Deploying JDBC Driver Files - -You have to upload the JDBC driver files of your remote database **twice**: - -* Upload all files of the JDBC driver into a bucket of your choice, so that they can be accessed from the adapter script. - This happens the same way as described above for the adapter JAR. You can use the same bucket. -* Upload all files of the JDBC driver as a JDBC driver in EXAOperation - - In EXAOperation go to Software -> JDBC Drivers - - Add the JDBC driver by specifying the JDBC main class and the prefix of the JDBC connection string - - Upload all files (one by one) to the specific JDBC to the newly added JDBC driver. - -Note that some JDBC drivers consist of several files and that you have to upload all of them. To find out which JAR you need, consult the [user guide page](user_guide.md). - -## Deploying the Adapter Script - -Create a schema to hold the adapter script. - -```sql -CREATE SCHEMA ADAPTER; -``` - -The SQL statement below creates the adapter script, defines the Java class that serves as entry point and tells the UDF framework where to find the libraries (JAR files) for Virtual Schema and database driver. - -```sql -CREATE SCHEMA ADAPTER; - -CREATE JAVA ADAPTER SCRIPT ADAPTER.JDBC_ADAPTER AS - %scriptclass com.exasol.adapter.RequestDispatcher; - %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-3.1.0.jar; - %jar /buckets/your-bucket-fs/your-bucket/.jar; -/ -``` - -The [user guide page](user_guide.md) has example statements for the individual dialects. \ No newline at end of file diff --git a/doc/user-guide/user_guide.md b/doc/user-guide/user_guide.md index a3cbbabe9..f570af95f 100644 --- a/doc/user-guide/user_guide.md +++ b/doc/user-guide/user_guide.md @@ -5,8 +5,6 @@ The purpose of this page is to provide detailed instructions for each of the sup * How does the **CREATE VIRTUAL SCHEMA** statement look like, i.e. which properties are required. * **Data source specific notes**, like authentication with Kerberos, supported capabilities or things to consider regarding the data type mapping. -As an entry point we recommend you follow the [step-by-step deployment guide](deploying_the_virtual_schema_adapter.md) which will link to this page whenever needed. - ## Before you Start Please note that the syntax for creating adapter scripts is not recognized by all SQL clients. @@ -55,10 +53,44 @@ Please note that the syntax for creating adapter scripts is not recognized by al ## Getting Started +This page contains common information applicable to all the dialects. You can also check more [detailed guides for each dialect](#list-of-supported-dialects) in the dialects' documentation. + Before you can start using the JDBC adapter for virtual schemas you have to deploy the adapter and the JDBC driver of your data source in your Exasol database. -Please follow the [step-by-step deployment guide](deploying_the_virtual_schema_adapter.md). + +## Deploying JDBC Driver Files + +You have to upload the JDBC driver files of your remote database **twice** (except the BigQuery dialect): + +* Upload all files of the JDBC driver into a bucket of your choice, so that they can be accessed from the adapter script. + +* Upload all files of the JDBC driver as a JDBC driver in EXAOperation + - In EXAOperation go to Software -> JDBC Drivers + - Add the JDBC driver by specifying the JDBC main class and the prefix of the JDBC connection string + - Upload all files (one by one) to the specific JDBC to the newly added JDBC driver. + +Note that some JDBC drivers consist of several files and that you have to upload all of them. +To find out which JAR you need, check the individual dialects' documentation pages. + +## Deploying the Adapter Script + +Create a schema to hold the adapter script. + +```sql +CREATE SCHEMA SCHEMA_FOR_VS_SCRIPT; +``` + +The SQL statement below creates the adapter script, defines the Java class that serves as entry point and tells the UDF framework where to find the libraries (JAR files) for Virtual Schema and database driver. + +```sql +CREATE JAVA ADAPTER SCRIPT SCHEMA_FOR_VS_SCRIPT.JDBC_ADAPTER_SCRIPT AS + %scriptclass com.exasol.adapter.RequestDispatcher; + %jar /buckets/your-bucket-fs/your-bucket/virtualschema-jdbc-adapter-dist-3.1.0.jar; + %jar /buckets/your-bucket-fs/your-bucket/.jar; +/ +``` ## Using the Adapter + The following statements demonstrate how you can use virtual schemas with the JDBC adapter to connect to a Hive system. Please scroll down to see a list of all properties supported by the JDBC adapter. @@ -66,18 +98,18 @@ First we create a virtual schema using the JDBC adapter. The adapter will retrie The metadata (virtual tables, columns and data types) are then cached in Exasol. ```sql -CREATE CONNECTION hive_conn TO 'jdbc:hive2://localhost:10000/default' USER 'hive-usr' IDENTIFIED BY 'hive-pwd'; +CREATE CONNECTION JDBC_CONNECTION_HIVE TO 'jdbc:hive2://localhost:10000/default' USER 'hive-usr' IDENTIFIED BY 'hive-pwd'; -CREATE VIRTUAL SCHEMA hive USING adapter.jdbc_adapter WITH +CREATE VIRTUAL SCHEMA VIRTUAL_SCHEMA_HIVE USING SCHEMA_FOR_VS_SCRIPT.JDBC_ADAPTER_SCRIPT WITH SQL_DIALECT = 'HIVE' - CONNECTION_NAME = 'HIVE_CONN' + CONNECTION_NAME = 'JDBC_CONNECTION_HIVE' SCHEMA_NAME = 'default'; ``` We can now explore the tables in the virtual schema, just like for a regular schema: ```sql -OPEN SCHEMA hive; +OPEN SCHEMA VIRTUAL_SCHEMA_HIVE; SELECT * FROM cat; DESCRIBE clicks; ``` @@ -102,30 +134,31 @@ SELECT * from clicks JOIN native_schema.users on clicks.userid = users.id; You can refresh the schemas metadata, e.g. if tables were added in the remote system: ```sql -ALTER VIRTUAL SCHEMA hive REFRESH; -ALTER VIRTUAL SCHEMA hive REFRESH TABLES t1 t2; -- refresh only these tables +ALTER VIRTUAL SCHEMA VIRTUAL_SCHEMA_HIVE REFRESH; +ALTER VIRTUAL SCHEMA VIRTUAL_SCHEMA_HIVE REFRESH TABLES t1 t2; -- refresh only these tables ``` Or set properties. Depending on the adapter and the property you set this might update the metadata or not. In our example the metadata are affected, because afterwards the virtual schema will only expose two virtual tables. ```sql -ALTER VIRTUAL SCHEMA hive SET TABLE_FILTER='CUSTOMERS, CLICKS'; +ALTER VIRTUAL SCHEMA VIRTUAL_SCHEMA_HIVE SET TABLE_FILTER='CUSTOMERS, CLICKS'; ``` Finally you can unset properties: ```sql -ALTER VIRTUAL SCHEMA hive SET TABLE_FILTER=null; +ALTER VIRTUAL SCHEMA VIRTUAL_SCHEMA_HIVE SET TABLE_FILTER=null; ``` Or drop the virtual schema: ```sql -DROP VIRTUAL SCHEMA hive CASCADE; +DROP VIRTUAL SCHEMA VIRTUAL_SCHEMA_HIVE CASCADE; ``` ### Adapter Properties + The following properties can be used to control the behavior of the JDBC adapter. As you see above, these properties can be defined in `CREATE VIRTUAL SCHEMA` or changed afterwards via `ALTER VIRTUAL SCHEMA SET`. Note that properties are always strings, like `TABLE_FILTER='T1,T2'`. diff --git a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialect.java b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialect.java index c188ad53b..5db6af92f 100644 --- a/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialect.java +++ b/src/main/java/com/exasol/adapter/dialects/bigquery/BigQuerySqlDialect.java @@ -54,7 +54,7 @@ protected QueryRewriter createQueryRewriter() { if (this.properties.containsKey(BIGQUERY_ENABLE_IMPORT_PROPERTY) && "true".equalsIgnoreCase(this.properties.get(BIGQUERY_ENABLE_IMPORT_PROPERTY))) { LOGGER.warning("Attention: IMPORT is activated for the BIGQUERY dialect. " - + "Please be aware that it using IMPORT with this dialect requires disabling important security features and is therefore not recommended!"); + + "Please be aware that using IMPORT with this dialect requires disabling important security features and is therefore not recommended!"); return new BaseQueryRewriter(this, this.remoteMetadataReader, this.connection); } else { return new BigQueryQueryRewriter(this, this.remoteMetadataReader, this.connection); From bb98a5f015b52770d8d8a2f5f360c57bf87491ea Mon Sep 17 00:00:00 2001 From: Anastasiia Sergienko <46891819+AnastasiiaSergienko@users.noreply.github.com> Date: Mon, 17 Feb 2020 16:03:44 +0100 Subject: [PATCH 5/6] Apply suggestions from code review MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-Authored-By: Sebastian Bär --- doc/dialects/bigquery.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/doc/dialects/bigquery.md b/doc/dialects/bigquery.md index e47665534..b6052fe7c 100644 --- a/doc/dialects/bigquery.md +++ b/doc/dialects/bigquery.md @@ -28,7 +28,7 @@ CREATE SCHEMA SCHEMA_FOR_VS_SCRIPT; The SQL statement below creates the adapter script, defines the Java class that serves as entry point and tells the UDF framework where to find the libraries (JAR files) for Virtual Schema and database driver. -List all the jars from Magnitude Simba JDBC driver. +List all the JAR files from Magnitude Simba JDBC driver. ```sql CREATE JAVA ADAPTER SCRIPT SCHEMA_FOR_VS_SCRIPT.ADAPTER_SCRIPT_BIGQUERY AS @@ -98,7 +98,7 @@ If you need to use currently unsupported data types or find a way around known l ## Performance Please be aware that the current implementation of the dialect can only handle result sets with limited size (a few thousand rows). -If you need to proceed a large amount of data, please, contact our support team. Another implementation of the dialect with a speed improvement(using IMPORT INTO) is available, but not documented and announced officially on our GitHub due to: +If you need to process a large amount of data, please contact our support team. Another implementation of the dialect with a performance improvement (using `IMPORT INTO`) is available, but not documented for self-service because of 1. the complex installation process 2. security risks (a user has to disable the drivers' security manager to use it) @@ -111,4 +111,4 @@ In the following matrix you find combinations of JDBC driver and dialect version Virtual Schema Version| Big Query Version | Driver Name | Driver Version ----------------------|---------------------|---------------------------------------------|------------------------ - 3.0.2 | Google BigQuery 2.0 | Magnitude Simba JDBC driver for BigQuery | 1.2.2.1004 \ No newline at end of file + 3.0.2 | Google BigQuery 2.0 | Magnitude Simba JDBC driver for BigQuery | 1.2.2.1004 From c92dc783fd84e11cfcffd9ee1903f09a3b1ce95c Mon Sep 17 00:00:00 2001 From: Anastasiia Sergienko Date: Mon, 17 Feb 2020 16:05:19 +0100 Subject: [PATCH 6/6] #314: fixed review findings --- doc/dialects/bigquery.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/doc/dialects/bigquery.md b/doc/dialects/bigquery.md index e47665534..61c91fdb9 100644 --- a/doc/dialects/bigquery.md +++ b/doc/dialects/bigquery.md @@ -103,8 +103,6 @@ If you need to proceed a large amount of data, please, contact our support team. 1. the complex installation process 2. security risks (a user has to disable the drivers' security manager to use it) -Please, try to avoid using IMPORT implementation if it is possible. Be aware of the security risks. - ## Testing information In the following matrix you find combinations of JDBC driver and dialect version that we tested.