Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Replace hdp2.6 and cdh5.15 with hdp3.1 #18931

Merged
merged 5 commits into from
Sep 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
92 changes: 17 additions & 75 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -902,14 +902,15 @@ jobs:
cat <<EOF > .github/test-pt-matrix.yaml
config:
- default
- hdp3
# TODO: config-apache-hive3
suite:
- suite-1
- suite-2
- suite-3
# suite-4 does not exist
- suite-5
- suite-6-non-generic
- suite-7-non-generic
- suite-8-non-generic
- suite-azure
- suite-delta-lake-databricks91
- suite-delta-lake-databricks104
Expand All @@ -920,53 +921,45 @@ jobs:
- suite-clients
- suite-functions
- suite-tpch
- suite-tpcds
- suite-storage-formats-detailed
- suite-parquet
- suite-oauth2
- suite-ldap
- suite-compatibility
- suite-all-connectors-smoke
- suite-delta-lake-oss
- suite-kafka
- suite-cassandra
- suite-clickhouse
- suite-mysql
- suite-iceberg
- suite-hudi
- suite-ignite
exclude:
- config: default
ignore exclusion if: >-
${{ github.event_name != 'pull_request'
|| github.event.pull_request.head.repo.full_name == github.repository
|| contains(github.event.pull_request.labels.*.name, 'tests:all')
|| contains(github.event.pull_request.labels.*.name, 'tests:hive')
}}

- suite: suite-azure
config: default
- suite: suite-azure
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' ||
secrets.AZURE_ABFS_CONTAINER != '' ||
secrets.AZURE_ABFS_ACCOUNT != '' ||
secrets.AZURE_ABFS_ACCESSKEY != '' }}

- suite: suite-gcs
config: default
- suite: suite-gcs
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.GCP_CREDENTIALS_KEY != '' }}

- suite: suite-delta-lake-databricks91
config: hdp3
- suite: suite-delta-lake-databricks91
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }}
- suite: suite-delta-lake-databricks104
config: hdp3
- suite: suite-delta-lake-databricks104
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }}
- suite: suite-delta-lake-databricks113
config: hdp3
- suite: suite-delta-lake-databricks113
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }}
- suite: suite-delta-lake-databricks122
config: hdp3
- suite: suite-delta-lake-databricks122
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }}
- suite: suite-delta-lake-databricks133
config: hdp3
- suite: suite-delta-lake-databricks133
ignore exclusion if: >-
${{ env.CI_SKIP_SECRETS_PRESENCE_CHECKS != '' || secrets.DATABRICKS_TOKEN != '' }}
Expand All @@ -983,60 +976,9 @@ jobs:
# value of the property, and the exclusion will apply normally.
- "false"
include:
# this suite is not meant to be run with different configs
- config: default
suite: suite-6-non-generic
# this suite is not meant to be run with different configs
- config: default
suite: suite-7-non-generic
# this suite is not meant to be run with different configs
- config: default
suite: suite-8-non-generic
# this suite is not meant to be run with different configs
- config: default
suite: suite-tpcds
# this suite is not meant to be run with different configs
- config: default
suite: suite-parquet
# this suite is not meant to be run with different configs
- config: default
suite: suite-oauth2
# this suite is not meant to be run with different configs
- config: default
suite: suite-ldap
# this suite is not meant to be run with different configs
- config: default
suite: suite-compatibility
# this suite is designed specifically for apache-hive3. TODO remove the suite once we can run all regular tests on apache-hive3.
- config: apache-hive3
suite: suite-hms-only
# this suite is not meant to be run with different configs
- config: default
suite: suite-all-connectors-smoke
# this suite is not meant to be run with different configs
- config: default
suite: suite-delta-lake-oss
# this suite is not meant to be run with different configs
- config: default
suite: suite-kafka
# this suite is not meant to be run with different configs
- config: default
suite: suite-cassandra
# this suite is not meant to be run with different configs
- config: default
suite: suite-clickhouse
# this suite is not meant to be run with different configs
- config: default
suite: suite-mysql
# this suite is not meant to be run with different configs
- config: default
suite: suite-iceberg
# this suite is not meant to be run with different configs
- config: default
suite: suite-hudi
# this suite is not meant to be run with different configs
- config: default
suite: suite-ignite
EOF
- name: Build PT matrix (all)
if: |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ public class Hadoop
{
private static final Logger log = Logger.get(Hadoop.class);

private static final String IMAGE = "ghcr.io/trinodb/testing/hdp2.6-hive:" + getDockerImagesVersion();
private static final String IMAGE = "ghcr.io/trinodb/testing/hdp3.1-hive:" + getDockerImagesVersion();

private static final int HDFS_PORT = 9000;

Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ public void testAnalyzeCorruptColumnStatisticsOnEmptyTable()

// ANALYZE and drop_stats are unsupported for tables having broken column statistics
assertThatThrownBy(() -> query("ANALYZE " + tableName))
.hasMessage("%s: Socket is closed by peer.", hiveMinioDataLake.getHiveHadoop().getHiveMetastoreEndpoint())
.hasMessage("Unexpected 2 statistics for 1 columns")
.hasStackTraceContaining("ThriftHiveMetastore.setTableColumnStatistics");

assertThatThrownBy(() -> query("CALL system.drop_stats('tpch', '" + tableName + "')"))
Expand All @@ -72,9 +72,33 @@ private void prepareBrokenColumnStatisticsTable(String tableName)

// Insert duplicated row to simulate broken column statistics status https://github.com/trinodb/trino/issues/13787
assertEquals(onMetastore("SELECT COUNT(1) FROM TAB_COL_STATS WHERE db_name = 'tpch' AND table_name = '" + tableName + "'"), "1");
onMetastore("INSERT INTO TAB_COL_STATS " +
"SELECT cs_id + 1, db_name, table_name, column_name, column_type, tbl_id, long_low_value, long_high_value, double_high_value, double_low_value, big_decimal_low_value, big_decimal_high_value, num_nulls, num_distincts, avg_col_len, max_col_len, num_trues, num_falses, last_analyzed " +
"FROM TAB_COL_STATS WHERE db_name = 'tpch' AND table_name = '" + tableName + "'");
onMetastore("""
INSERT INTO TAB_COL_STATS
SELECT
cs_id + 1,
cat_name,
db_name,
table_name,
column_name,
column_type,
tbl_id,
long_low_value,
long_high_value,
double_high_value,
double_low_value,
big_decimal_low_value,
big_decimal_high_value,
num_nulls,
num_distincts,
bit_vector,
avg_col_len,
max_col_len,
num_trues,
num_falses,
last_analyzed
FROM TAB_COL_STATS
WHERE db_name = 'tpch' AND table_name = '%s'
""".formatted(tableName));
assertEquals(onMetastore("SELECT COUNT(1) FROM TAB_COL_STATS WHERE db_name = 'tpch' AND table_name = '" + tableName + "'"), "2");
}

Expand Down Expand Up @@ -103,9 +127,34 @@ private void prepareBrokenPartitionStatisticsTable(String tableName)

// Insert duplicated row to simulate broken partition statistics status https://github.com/trinodb/trino/issues/13787
assertEquals(onMetastore("SELECT COUNT(1) FROM PART_COL_STATS WHERE db_name = 'tpch' AND table_name = '" + tableName + "'"), "1");
onMetastore("INSERT INTO PART_COL_STATS " +
"SELECT cs_id + 1, db_name, table_name, partition_name, column_name, column_type, part_id, long_low_value, long_high_value, double_high_value, double_low_value, big_decimal_low_value, big_decimal_high_value, num_nulls, num_distincts, avg_col_len, max_col_len, num_trues, num_falses, last_analyzed " +
"FROM PART_COL_STATS WHERE db_name = 'tpch' AND table_name = '" + tableName + "'");
onMetastore("""
nineinchnick marked this conversation as resolved.
Show resolved Hide resolved
INSERT INTO PART_COL_STATS
SELECT
cs_id + 1,
cat_name,
db_name,
table_name,
partition_name,
column_name,
column_type,
part_id,
long_low_value,
long_high_value,
double_high_value,
double_low_value,
big_decimal_low_value,
big_decimal_high_value,
num_nulls,
num_distincts,
bit_vector,
avg_col_len,
max_col_len,
num_trues,
num_falses,
last_analyzed
FROM PART_COL_STATS
WHERE db_name = 'tpch' AND table_name = '%s'
""".formatted(tableName));
assertEquals(onMetastore("SELECT COUNT(1) FROM PART_COL_STATS WHERE db_name = 'tpch' AND table_name = '" + tableName + "'"), "2");
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ public class HiveHadoop
{
private static final Logger log = Logger.get(HiveHadoop.class);

public static final String DEFAULT_IMAGE = "ghcr.io/trinodb/testing/hdp2.6-hive:" + TestingProperties.getDockerImagesVersion();
public static final String HIVE3_IMAGE = "ghcr.io/trinodb/testing/hdp3.1-hive:" + TestingProperties.getDockerImagesVersion();

public static final String HOST_NAME = "hadoop-master";
Expand Down Expand Up @@ -104,7 +103,7 @@ public static class Builder
{
private Builder()
{
this.image = DEFAULT_IMAGE;
this.image = HIVE3_IMAGE;
this.hostName = HOST_NAME;
this.exposePorts = ImmutableSet.of(HIVE_METASTORE_PORT);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ public class HiveMinioDataLake

public HiveMinioDataLake(String bucketName)
{
this(bucketName, HiveHadoop.DEFAULT_IMAGE);
this(bucketName, HiveHadoop.HIVE3_IMAGE);
}

public HiveMinioDataLake(String bucketName, String hiveHadoopImage)
Expand Down
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@
<dep.jmh.version>1.37</dep.jmh.version>
<dep.junit.version>5.10.0</dep.junit.version>

<dep.docker.images.version>81</dep.docker.images.version>
<dep.docker.images.version>86</dep.docker.images.version>

<!--
America/Bahia_Banderas has:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
public final class EnvironmentDefaults
{
public static final String DOCKER_IMAGES_VERSION = TestingProperties.getDockerImagesVersion();
public static final String HADOOP_BASE_IMAGE = "ghcr.io/trinodb/testing/hdp2.6-hive";
public static final String HADOOP_BASE_IMAGE = "ghcr.io/trinodb/testing/hdp3.1-hive";
public static final String HADOOP_IMAGES_VERSION = DOCKER_IMAGES_VERSION;
public static final String TEMPTO_ENVIRONMENT_CONFIG = "/dev/null";

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,8 @@ public static DockerContainer createHadoopContainer(DockerFiles dockerFiles, Por
.withCopyFileToContainer(forHostPath(dockerFiles.getDockerFilesHostPath("health-checks/hadoop-health-check.sh")), CONTAINER_HEALTH_D + "hadoop-health-check.sh")
.withCopyFileToContainer(forHostPath(dockerFiles.getDockerFilesHostPath("common/hadoop/hadoop-run.sh")), "/usr/local/hadoop-run.sh")
.withCopyFileToContainer(forHostPath(dockerFiles.getDockerFilesHostPath("common/hadoop/apply-config-overrides.sh")), CONTAINER_HADOOP_INIT_D + "00-apply-config-overrides.sh")
// When hive performs implicit coercion to/from timestamp for ORC files, it depends on timezone of the HiveServer
.withEnv("TZ", "UTC")
.withCommand("/usr/local/hadoop-run.sh")
.withExposedLogPaths("/var/log/hadoop-yarn", "/var/log/hadoop-hdfs", "/var/log/hive", "/var/log/container-health.log")
.withStartupCheckStrategy(new IsRunningStartupCheckStrategy())
Expand All @@ -95,12 +97,10 @@ public static DockerContainer createHadoopContainer(DockerFiles dockerFiles, Por
portBinder.exposePort(container, 8088);
portBinder.exposePort(container, 9000);
portBinder.exposePort(container, 9083); // Metastore Thrift
portBinder.exposePort(container, 9864); // DataNode Web UI since Hadoop 3
portBinder.exposePort(container, 9870); // NameNode Web UI since Hadoop 3
portBinder.exposePort(container, 9864); // DataNode Web UI
portBinder.exposePort(container, 9870); // NameNode Web UI
portBinder.exposePort(container, 10000); // HiveServer2
portBinder.exposePort(container, 19888);
portBinder.exposePort(container, 50070); // NameNode Web UI prior to Hadoop 3
portBinder.exposePort(container, 50075); // DataNode Web UI prior to Hadoop 3

return container;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,13 +48,12 @@ public HadoopKerberosKms(DockerFiles dockerFiles, EnvironmentConfig environmentC
@Override
public void extendEnvironment(Environment.Builder builder)
{
// TODO (https://github.com/trinodb/trino/issues/1652) create images with HDP and KMS
String dockerImageName = "ghcr.io/trinodb/testing/cdh5.15-hive-kerberized-kms:" + hadoopImagesVersion;
String dockerImageName = "ghcr.io/trinodb/testing/hdp3.1-hive-kerberized-kms:" + hadoopImagesVersion;

builder.configureContainer(HADOOP, container -> {
container.setDockerImageName(dockerImageName);
container
.withCopyFileToContainer(forHostPath(configDir.getPath("kms-core-site.xml")), "/etc/hadoop-kms/conf/core-site.xml");
.withCopyFileToContainer(forHostPath(configDir.getPath("kms-core-site.xml")), "/opt/hadoop/etc/hadoop/core-site.xml");
});

builder.configureContainer(COORDINATOR,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ public void extendEnvironment(Environment.Builder builder)
HADOOP,
container ->
container
.withCopyFileToContainer(forHostPath(configDir.getPath("kms-acls.xml")), "/etc/hadoop-kms/conf/kms-acls.xml")
.withCopyFileToContainer(forHostPath(configDir.getPath("kms-acls.xml")), "/opt/hadoop/etc/kms-acls.xml")
.withCopyFileToContainer(forHostPath(configDir.getPath("hiveserver2-site.xml")), "/etc/hive/conf/hiveserver2-site.xml"));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,6 @@ public String getHadoopBaseImage()
@Override
public String getTemptoEnvironmentConfigFile()
{
return "/docker/presto-product-tests/conf/tempto/tempto-configuration-for-hive3.yaml,/docker/presto-product-tests/conf/tempto/tempto-configuration-for-hms-only.yaml";
return "/docker/presto-product-tests/conf/tempto/tempto-configuration-for-hms-only.yaml";
}
}
Loading