Skip to content

Commit

Permalink
Merge branch 'main' into pr/spark/resolver
Browse files Browse the repository at this point in the history
Signed-off-by: Peng Huo <[email protected]>
  • Loading branch information
penghuo committed Mar 9, 2023
2 parents 58478e7 + 98fb969 commit ef27088
Show file tree
Hide file tree
Showing 30 changed files with 467 additions and 166 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/sql-test-and-build-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,11 @@ jobs:
matrix:
entry:
- { os: ubuntu-latest, java: 11 }
- { os: windows-latest, java: 11, os_build_args: -x doctest -x integTest -x jacocoTestReport -PbuildPlatform=windows }
- { os: macos-latest, java: 11, os_build_args: -x doctest -x integTest -x jacocoTestReport }
- { os: windows-latest, java: 11, os_build_args: -x doctest -PbuildPlatform=windows }
- { os: macos-latest, java: 11}
- { os: ubuntu-latest, java: 17 }
- { os: windows-latest, java: 17, os_build_args: -x doctest -x integTest -x jacocoTestReport -PbuildPlatform=windows }
- { os: macos-latest, java: 17, os_build_args: -x doctest -x integTest -x jacocoTestReport }
- { os: windows-latest, java: 17, os_build_args: -x doctest -PbuildPlatform=windows }
- { os: macos-latest, java: 17 }
runs-on: ${{ matrix.entry.os }}

steps:
Expand Down
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
*.class
*.http
.settings/
# Mobile Tools for Java (J2ME)
.mtj.tmp/
Expand Down Expand Up @@ -33,7 +34,6 @@ gen/
# git mergetool artifact
*.orig
gen
*.tokens

# Python
*/.venv
Expand All @@ -46,3 +46,4 @@ gen
/.prom.pid.lock

.java-version
.worktrees
8 changes: 7 additions & 1 deletion build-tools/sqlplugin-coverage.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
* cluster is stopped and dump it to a file. Luckily our current security policy seems to allow this. This will also probably
* break if there are multiple nodes in the integTestCluster. But for now... it sorta works.
*/
import org.apache.tools.ant.taskdefs.condition.Os
apply plugin: 'jacoco'

// Get gradle to generate the required jvm agent arg for us using a dummy tasks of type Test. Unfortunately Elastic's
Expand Down Expand Up @@ -45,7 +46,12 @@ integTest.runner {
}

testClusters.integTest {
jvmArgs " ${dummyIntegTest.jacoco.getAsJvmArg()}"
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
// Replacing build with absolute path to fix the error "error opening zip file or JAR manifest missing : /build/tmp/expandedArchives/..../jacocoagent.jar"
jvmArgs " ${dummyIntegTest.jacoco.getAsJvmArg()}".replace('build',"${buildDir}")
} else {
jvmArgs " ${dummyIntegTest.jacoco.getAsJvmArg()}".replace('javaagent:','javaagent:/')
}
systemProperty 'com.sun.management.jmxremote', "true"
systemProperty 'com.sun.management.jmxremote.authenticate', "false"
systemProperty 'com.sun.management.jmxremote.port', "7777"
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package com.wiredforcode.gradle.spawn

import org.apache.tools.ant.taskdefs.condition.Os
import org.gradle.api.tasks.TaskAction

class KillProcessTask extends DefaultSpawnTask {
Expand All @@ -12,7 +13,13 @@ class KillProcessTask extends DefaultSpawnTask {
}

def pid = pidFile.text
def process = "kill $pid".execute()
def killCommandLine
if (Os.isFamily(Os.FAMILY_WINDOWS)) {
killCommandLine = Arrays.asList("taskkill", "/F", "/T", "/PID", "$pid")
} else {
killCommandLine = Arrays.asList("kill", "$pid")
}
def process = killCommandLine.execute()

try {
process.waitFor()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,9 +95,18 @@ class SpawnProcessTask extends DefaultSpawnTask {
}

private int extractPidFromProcess(Process process) {
def pidField = process.class.getDeclaredField('pid')
pidField.accessible = true

return pidField.getInt(process)
def pid
try {
// works since java 9
def pidMethod = process.class.getDeclaredMethod('pid')
pidMethod.setAccessible(true)
pid = pidMethod.invoke(process)
} catch (ignored) {
// fallback to UNIX-only implementation
def pidField = process.class.getDeclaredField('pid')
pidField.accessible = true
pid = pidField.getInt(process)
}
return pid
}
}
10 changes: 5 additions & 5 deletions core/src/main/java/org/opensearch/sql/analysis/Analyzer.java
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@
import org.opensearch.sql.data.model.ExprMissingValue;
import org.opensearch.sql.data.type.ExprCoreType;
import org.opensearch.sql.datasource.DataSourceService;
import org.opensearch.sql.datasource.model.DataSource;
import org.opensearch.sql.datasource.model.DataSourceMetadata;
import org.opensearch.sql.exception.SemanticCheckException;
import org.opensearch.sql.expression.DSL;
import org.opensearch.sql.expression.Expression;
Expand Down Expand Up @@ -134,9 +134,9 @@ public LogicalPlan analyze(UnresolvedPlan unresolved, AnalysisContext context) {
@Override
public LogicalPlan visitRelation(Relation node, AnalysisContext context) {
QualifiedName qualifiedName = node.getTableQualifiedName();
Set<String> allowedDataSourceNames = dataSourceService.getDataSources()
Set<String> allowedDataSourceNames = dataSourceService.getDataSourceMetadataSet()
.stream()
.map(DataSource::getName)
.map(DataSourceMetadata::getName)
.collect(Collectors.toSet());
DataSourceSchemaIdentifierNameResolver dataSourceSchemaIdentifierNameResolver
= new DataSourceSchemaIdentifierNameResolver(qualifiedName.getParts(),
Expand Down Expand Up @@ -182,9 +182,9 @@ public LogicalPlan visitRelationSubquery(RelationSubquery node, AnalysisContext
@Override
public LogicalPlan visitTableFunction(TableFunction node, AnalysisContext context) {
QualifiedName qualifiedName = node.getFunctionName();
Set<String> allowedDataSourceNames = dataSourceService.getDataSources()
Set<String> allowedDataSourceNames = dataSourceService.getDataSourceMetadataSet()
.stream()
.map(DataSource::getName)
.map(DataSourceMetadata::getName)
.collect(Collectors.toSet());
DataSourceSchemaIdentifierNameResolver dataSourceSchemaIdentifierNameResolver
= new DataSourceSchemaIdentifierNameResolver(qualifiedName.getParts(),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
/*
*
* * Copyright OpenSearch Contributors
* * SPDX-License-Identifier: Apache-2.0
*
*/

package org.opensearch.sql.datasource;

import java.util.List;
import java.util.Optional;
import javax.xml.crypto.Data;
import org.opensearch.sql.datasource.model.DataSource;
import org.opensearch.sql.datasource.model.DataSourceMetadata;

/**
* Interface for DataSourceMetadata Storage
* which will be only used by DataSourceService for Storage.
*/
public interface DataSourceMetadataStorage {

/**
* Returns all dataSource Metadata objects. The returned objects won't contain
* any of the credential info.
*
* @return list of {@link DataSourceMetadata}.
*/
List<DataSourceMetadata> getDataSourceMetadata();


/**
* Gets {@link DataSourceMetadata} corresponding to the
* datasourceName from underlying storage.
*
* @param datasourceName name of the {@link DataSource}.
*/
Optional<DataSourceMetadata> getDataSourceMetadata(String datasourceName);


/**
* Stores {@link DataSourceMetadata} in underlying storage.
*
* @param dataSourceMetadata {@link DataSourceMetadata}.
*/
void createDataSourceMetadata(DataSourceMetadata dataSourceMetadata);


/**
* Updates {@link DataSourceMetadata} in underlying storage.
*
* @param dataSourceMetadata {@link DataSourceMetadata}.
*/
void updateDataSourceMetadata(DataSourceMetadata dataSourceMetadata);


/**
* Deletes {@link DataSourceMetadata} corresponding to the
* datasourceName from underlying storage.
*
* @param datasourceName name of the {@link DataSource}.
*/
void deleteDataSourceMetadata(String datasourceName);

}
Original file line number Diff line number Diff line change
Expand Up @@ -15,26 +15,49 @@
public interface DataSourceService {

/**
* Returns all DataSource objects.
* Returns {@link DataSource} corresponding to the DataSource name.
*
* @return set of {@link DataSource}.
* @param dataSourceName Name of the {@link DataSource}.
* @return {@link DataSource}.
*/
Set<DataSource> getDataSources();
DataSource getDataSource(String dataSourceName);


/**
* Returns {@link DataSource} with corresponding to the DataSource name.
* Returns all dataSource Metadata objects. The returned objects won't contain
* any of the credential info.
*
* @param dataSourceName Name of the {@link DataSource}.
* @return {@link DataSource}.
* @return set of {@link DataSourceMetadata}.
*/
DataSource getDataSource(String dataSourceName);
Set<DataSourceMetadata> getDataSourceMetadataSet();

/**
* Register {@link DataSource} defined by {@link DataSourceMetadata}.
*
* @param metadatas list of {@link DataSourceMetadata}.
*/
void addDataSource(DataSourceMetadata... metadatas);
void createDataSource(DataSourceMetadata... metadatas);

/**
* Updates {@link DataSource} corresponding to dataSourceMetadata.
*
* @param dataSourceMetadata {@link DataSourceMetadata}.
*/
void updateDataSource(DataSourceMetadata dataSourceMetadata);


/**
* Deletes {@link DataSource} corresponding to the DataSource name.
*
* @param dataSourceName name of the {@link DataSource}.
*/
void deleteDataSource(String dataSourceName);

/**
* This method is to bootstrap
* datasources during the startup of the plugin.
*/
void bootstrapDataSources();

/**
* remove all the registered {@link DataSource}.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@

import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
Expand Down Expand Up @@ -45,8 +47,12 @@ public DataSourceServiceImpl(Set<DataSourceFactory> dataSourceFactories) {
}

@Override
public Set<DataSource> getDataSources() {
return Set.copyOf(dataSourceMap.values());
public Set<DataSourceMetadata> getDataSourceMetadataSet() {
return dataSourceMap.values().stream()
.map(dataSource
-> new DataSourceMetadata(dataSource.getName(),
dataSource.getConnectorType(), ImmutableMap.of()))
.collect(Collectors.toSet());
}

@Override
Expand All @@ -59,7 +65,7 @@ public DataSource getDataSource(String dataSourceName) {
}

@Override
public void addDataSource(DataSourceMetadata... metadatas) {
public void createDataSource(DataSourceMetadata... metadatas) {
for (DataSourceMetadata metadata : metadatas) {
validateDataSourceMetaData(metadata);
dataSourceMap.put(
Expand All @@ -68,6 +74,21 @@ public void addDataSource(DataSourceMetadata... metadatas) {
}
}

@Override
public void updateDataSource(DataSourceMetadata dataSourceMetadata) {
throw new UnsupportedOperationException("will be supported in future");
}

@Override
public void deleteDataSource(String dataSourceName) {
throw new UnsupportedOperationException("will be supported in future");
}

@Override
public void bootstrapDataSources() {
throw new UnsupportedOperationException("will be supported in future");
}

@Override
public void clear() {
dataSourceMap.clear();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,19 @@
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableMap;
import java.util.Map;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
import org.opensearch.sql.datasource.DataSourceService;

@JsonIgnoreProperties(ignoreUnknown = true)
@Getter
@Setter
@AllArgsConstructor
@NoArgsConstructor
@EqualsAndHashCode
public class DataSourceMetadata {

Expand All @@ -39,10 +44,7 @@ public class DataSourceMetadata {
* {@link DataSource} to {@link DataSourceService}.
*/
public static DataSourceMetadata defaultOpenSearchDataSourceMetadata() {
DataSourceMetadata dataSourceMetadata = new DataSourceMetadata();
dataSourceMetadata.setName(DEFAULT_DATASOURCE_NAME);
dataSourceMetadata.setConnector(DataSourceType.OPENSEARCH);
dataSourceMetadata.setProperties(ImmutableMap.of());
return dataSourceMetadata;
return new DataSourceMetadata(DEFAULT_DATASOURCE_NAME,
DataSourceType.OPENSEARCH, ImmutableMap.of());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import org.opensearch.sql.data.model.ExprValue;
import org.opensearch.sql.data.model.ExprValueUtils;
import org.opensearch.sql.datasource.DataSourceService;
import org.opensearch.sql.datasource.model.DataSource;
import org.opensearch.sql.datasource.model.DataSourceMetadata;
import org.opensearch.sql.storage.TableScanOperator;

/**
Expand Down Expand Up @@ -47,14 +47,15 @@ public String explain() {
@Override
public void open() {
List<ExprValue> exprValues = new ArrayList<>();
Set<DataSource> dataSources = dataSourceService.getDataSources();
for (DataSource dataSource : dataSources) {
Set<DataSourceMetadata> dataSourceMetadataSet
= dataSourceService.getDataSourceMetadataSet();
for (DataSourceMetadata dataSourceMetadata : dataSourceMetadataSet) {
exprValues.add(
new ExprTupleValue(new LinkedHashMap<>(ImmutableMap.of(
"DATASOURCE_NAME",
ExprValueUtils.stringValue(dataSource.getName()),
ExprValueUtils.stringValue(dataSourceMetadata.getName()),
"CONNECTOR_TYPE",
ExprValueUtils.stringValue(dataSource.getConnectorType().name())))));
ExprValueUtils.stringValue(dataSourceMetadata.getConnector().name())))));
}
iterator = exprValues.iterator();
}
Expand Down
Loading

0 comments on commit ef27088

Please sign in to comment.