Skip to content

Commit

Permalink
Merge branch '2.x' into merge-2.x-after-release
Browse files Browse the repository at this point in the history
Signed-off-by: Chen Dai <[email protected]>
  • Loading branch information
dai-chen committed Nov 4, 2022
2 parents 73787b7 + b56edc7 commit fbd6c7a
Show file tree
Hide file tree
Showing 254 changed files with 7,763 additions and 1,570 deletions.
11 changes: 6 additions & 5 deletions .github/workflows/sql-odbc-main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,14 +13,15 @@ env:
CI_OUTPUT_PATH: "sql-odbc/ci-output"
ODBC_LIB_PATH: "./build/odbc/lib"
ODBC_BIN_PATH: "./build/odbc/bin"
ODBC_BUILD_PATH: "./build/odbc/build"
AWS_SDK_INSTALL_PATH: "./build/aws-sdk/install"
ODBC_BUILD_PATH: "./build/odbc/cmake"
VCPKG_X64_INSTALL_PATH: ".\\src\\vcpkg_installed\\x64-windows"
VCPKG_X86_INSTALL_PATH: ".\\src\\vcpkg_installed\\x86-windows"

# Tests are disabled (commented out) in all jobs because they are fail and/or outdated
# Keeping them for the brighten future when we can re-activate them
jobs:
build-mac:
runs-on: macos-10.15
runs-on: macos-12
defaults:
run:
working-directory: sql-odbc
Expand Down Expand Up @@ -103,7 +104,7 @@ jobs:
- name: build-installer
if: success()
run: |
.\scripts\build_installer.ps1 Release Win32 .\src $Env:ODBC_BUILD_PATH $Env:AWS_SDK_INSTALL_PATH
.\scripts\build_installer.ps1 Release Win32 .\src $Env:ODBC_BUILD_PATH $Env:VCPKG_X86_INSTALL_PATH
#- name: test
# run: |
# cp .\\libraries\\VisualLeakDetector\\bin32\\*.* .\\bin32\\Release
Expand Down Expand Up @@ -148,7 +149,7 @@ jobs:
- name: build-installer
if: success()
run: |
.\scripts\build_installer.ps1 Release x64 .\src $Env:ODBC_BUILD_PATH $Env:AWS_SDK_INSTALL_PATH
.\scripts\build_installer.ps1 Release x64 .\src $Env:ODBC_BUILD_PATH $Env:VCPKG_X64_INSTALL_PATH
#- name: test
# run: |
# cp .\\libraries\\VisualLeakDetector\\bin64\\*.* .\\bin64\\Release
Expand Down
11 changes: 9 additions & 2 deletions .github/workflows/sql-test-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,13 @@ name: SQL Plugin Tests

on:
workflow_dispatch:
inputs:
name:
required: false
type: string

run-name:
${{ inputs.name == '' && format('{0} @ {1}', github.ref_name, github.sha) || inputs.name }}

jobs:
build:
Expand Down Expand Up @@ -64,10 +71,10 @@ jobs:
- name: Verify test results
run: |
if [[ -e failures.log ]]
if [[ -e report.log ]]
then
echo "## FAILED TESTS :facepalm::warning::bangbang:" >> $GITHUB_STEP_SUMMARY
cat failures.log >> $GITHUB_STEP_SUMMARY
cat report.log >> $GITHUB_STEP_SUMMARY
exit 1
fi
Expand Down
1 change: 1 addition & 0 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ buildscript {
opensearch_version = System.getProperty("opensearch.version", "2.4.0-SNAPSHOT")
spring_version = "5.3.22"
jackson_version = "2.13.4"
jackson_databind_version = "2.13.4.2"
isSnapshot = "true" == System.getProperty("build.snapshot", "true")
buildVersionQualifier = System.getProperty("build.version_qualifier", "")
version_tokens = opensearch_version.tokenize('-')
Expand Down
2 changes: 1 addition & 1 deletion core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ dependencies {
api group: 'com.facebook.presto', name: 'presto-matching', version: '0.240'
api group: 'org.apache.commons', name: 'commons-math3', version: '3.6.1'
api "com.fasterxml.jackson.core:jackson-core:${jackson_version}"
api "com.fasterxml.jackson.core:jackson-databind:${jackson_version}"
api "com.fasterxml.jackson.core:jackson-databind:${jackson_databind_version}"
api "com.fasterxml.jackson.core:jackson-annotations:${jackson_version}"
api project(':common')

Expand Down
21 changes: 21 additions & 0 deletions core/src/main/java/org/opensearch/sql/CatalogSchemaName.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
/*
*
* * Copyright OpenSearch Contributors
* * SPDX-License-Identifier: Apache-2.0
*
*/

package org.opensearch.sql;

import lombok.Getter;
import lombok.RequiredArgsConstructor;

@Getter
@RequiredArgsConstructor
public class CatalogSchemaName {

private final String catalogName;

private final String schemaName;

}
53 changes: 42 additions & 11 deletions core/src/main/java/org/opensearch/sql/analysis/Analyzer.java
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,18 @@
import static org.opensearch.sql.ast.tree.Sort.SortOrder.ASC;
import static org.opensearch.sql.ast.tree.Sort.SortOrder.DESC;
import static org.opensearch.sql.data.type.ExprCoreType.STRUCT;
import static org.opensearch.sql.utils.MLCommonsConstants.ACTION;
import static org.opensearch.sql.utils.MLCommonsConstants.MODELID;
import static org.opensearch.sql.utils.MLCommonsConstants.PREDICT;
import static org.opensearch.sql.utils.MLCommonsConstants.RCF_ANOMALOUS;
import static org.opensearch.sql.utils.MLCommonsConstants.RCF_ANOMALY_GRADE;
import static org.opensearch.sql.utils.MLCommonsConstants.RCF_SCORE;
import static org.opensearch.sql.utils.MLCommonsConstants.RCF_TIMESTAMP;
import static org.opensearch.sql.utils.MLCommonsConstants.STATUS;
import static org.opensearch.sql.utils.MLCommonsConstants.TASKID;
import static org.opensearch.sql.utils.MLCommonsConstants.TIME_FIELD;
import static org.opensearch.sql.utils.MLCommonsConstants.TRAIN;
import static org.opensearch.sql.utils.MLCommonsConstants.TRAINANDPREDICT;
import static org.opensearch.sql.utils.SystemIndexUtils.CATALOGS_TABLE_NAME;

import com.google.common.collect.ImmutableList;
Expand All @@ -30,7 +37,7 @@
import java.util.stream.Collectors;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.opensearch.sql.analysis.model.CatalogSchemaIdentifierName;
import org.opensearch.sql.CatalogSchemaName;
import org.opensearch.sql.analysis.symbol.Namespace;
import org.opensearch.sql.analysis.symbol.Symbol;
import org.opensearch.sql.ast.AbstractNodeVisitor;
Expand All @@ -50,6 +57,7 @@
import org.opensearch.sql.ast.tree.Head;
import org.opensearch.sql.ast.tree.Kmeans;
import org.opensearch.sql.ast.tree.Limit;
import org.opensearch.sql.ast.tree.ML;
import org.opensearch.sql.ast.tree.Parse;
import org.opensearch.sql.ast.tree.Project;
import org.opensearch.sql.ast.tree.RareTopN;
Expand Down Expand Up @@ -83,6 +91,7 @@
import org.opensearch.sql.planner.logical.LogicalEval;
import org.opensearch.sql.planner.logical.LogicalFilter;
import org.opensearch.sql.planner.logical.LogicalLimit;
import org.opensearch.sql.planner.logical.LogicalML;
import org.opensearch.sql.planner.logical.LogicalMLCommons;
import org.opensearch.sql.planner.logical.LogicalPlan;
import org.opensearch.sql.planner.logical.LogicalProject;
Expand Down Expand Up @@ -137,19 +146,21 @@ public LogicalPlan visitRelation(Relation node, AnalysisContext context) {
.stream()
.map(Catalog::getName)
.collect(Collectors.toSet());
CatalogSchemaIdentifierName catalogSchemaIdentifierName
= new CatalogSchemaIdentifierName(qualifiedName.getParts(), allowedCatalogNames);
String tableName = catalogSchemaIdentifierName.getIdentifierName();
CatalogSchemaIdentifierNameResolver catalogSchemaIdentifierNameResolver
= new CatalogSchemaIdentifierNameResolver(qualifiedName.getParts(), allowedCatalogNames);
String tableName = catalogSchemaIdentifierNameResolver.getIdentifierName();
context.push();
TypeEnvironment curEnv = context.peek();
Table table;
if (CATALOGS_TABLE_NAME.equals(tableName)) {
table = new CatalogTable(catalogService);
} else {
table = catalogService
.getCatalog(catalogSchemaIdentifierName.getCatalogName())
.getCatalog(catalogSchemaIdentifierNameResolver.getCatalogName())
.getStorageEngine()
.getTable(tableName);
.getTable(new CatalogSchemaName(catalogSchemaIdentifierNameResolver.getCatalogName(),
catalogSchemaIdentifierNameResolver.getSchemaName()),
catalogSchemaIdentifierNameResolver.getIdentifierName());
}
table.getFieldTypes().forEach((k, v) -> curEnv.define(new Symbol(Namespace.FIELD_NAME, k), v));

Expand Down Expand Up @@ -181,17 +192,24 @@ public LogicalPlan visitTableFunction(TableFunction node, AnalysisContext contex
.stream()
.map(Catalog::getName)
.collect(Collectors.toSet());
CatalogSchemaIdentifierName catalogSchemaIdentifierName
= new CatalogSchemaIdentifierName(qualifiedName.getParts(), allowedCatalogNames);
CatalogSchemaIdentifierNameResolver catalogSchemaIdentifierNameResolver
= new CatalogSchemaIdentifierNameResolver(qualifiedName.getParts(), allowedCatalogNames);

FunctionName functionName = FunctionName.of(catalogSchemaIdentifierName.getIdentifierName());
FunctionName functionName
= FunctionName.of(catalogSchemaIdentifierNameResolver.getIdentifierName());
List<Expression> arguments = node.getArguments().stream()
.map(unresolvedExpression -> this.expressionAnalyzer.analyze(unresolvedExpression, context))
.collect(Collectors.toList());
TableFunctionImplementation tableFunctionImplementation
= (TableFunctionImplementation) repository.compile(
catalogSchemaIdentifierName.getCatalogName(), functionName, arguments);
return new LogicalRelation(catalogSchemaIdentifierName.getIdentifierName(),
catalogSchemaIdentifierNameResolver.getCatalogName(), functionName, arguments);
context.push();
TypeEnvironment curEnv = context.peek();
Table table = tableFunctionImplementation.applyArguments();
table.getFieldTypes().forEach((k, v) -> curEnv.define(new Symbol(Namespace.FIELD_NAME, k), v));
curEnv.define(new Symbol(Namespace.INDEX_NAME,
catalogSchemaIdentifierNameResolver.getIdentifierName()), STRUCT);
return new LogicalRelation(catalogSchemaIdentifierNameResolver.getIdentifierName(),
tableFunctionImplementation.applyArguments());
}

Expand Down Expand Up @@ -503,6 +521,19 @@ public LogicalPlan visitAD(AD node, AnalysisContext context) {
return new LogicalAD(child, options);
}

/**
* Build {@link LogicalML} for ml command.
*/
@Override
public LogicalPlan visitML(ML node, AnalysisContext context) {
LogicalPlan child = node.getChild().get(0).accept(this, context);
TypeEnvironment currentEnv = context.peek();
node.getOutputSchema(currentEnv).entrySet().stream()
.forEach(v -> currentEnv.define(new Symbol(Namespace.FIELD_NAME, v.getKey()), v.getValue()));

return new LogicalML(child, node.getArguments());
}

/**
* The first argument is always "asc", others are optional.
* Given nullFirst argument, use its value. Otherwise just use DEFAULT_ASC/DESC.
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
/*
*
* * Copyright OpenSearch Contributors
* * SPDX-License-Identifier: Apache-2.0
*
*/

package org.opensearch.sql.analysis;

import java.util.List;
import java.util.Set;

public class CatalogSchemaIdentifierNameResolver {

public static final String DEFAULT_CATALOG_NAME = "@opensearch";
public static final String DEFAULT_SCHEMA_NAME = "default";
public static final String INFORMATION_SCHEMA_NAME = "information_schema";

private String catalogName = DEFAULT_CATALOG_NAME;
private String schemaName = DEFAULT_SCHEMA_NAME;
private String identifierName;

private static final String DOT = ".";

/**
* Data model for capturing catalog, schema and identifier from
* fully qualifiedName. In the current state, it is used to capture
* CatalogSchemaTable name and CatalogSchemaFunction in case of table
* functions.
*
* @param parts parts of qualifiedName.
* @param allowedCatalogs allowedCatalogs.
*/
public CatalogSchemaIdentifierNameResolver(List<String> parts, Set<String> allowedCatalogs) {
List<String> remainingParts = captureSchemaName(captureCatalogName(parts, allowedCatalogs));
identifierName = String.join(DOT, remainingParts);
}

public String getIdentifierName() {
return identifierName;
}

public String getCatalogName() {
return catalogName;
}

public String getSchemaName() {
return schemaName;
}


// Capture catalog name and return remaining parts(schema name and table name)
// from the fully qualified name.
private List<String> captureCatalogName(List<String> parts, Set<String> allowedCatalogs) {
if (parts.size() > 1 && allowedCatalogs.contains(parts.get(0))
|| DEFAULT_CATALOG_NAME.equals(parts.get(0))) {
catalogName = parts.get(0);
return parts.subList(1, parts.size());
} else {
return parts;
}
}

// Capture schema name and return the remaining parts(table name )
// in the fully qualified name.
private List<String> captureSchemaName(List<String> parts) {
if (parts.size() > 1
&& (DEFAULT_SCHEMA_NAME.equals(parts.get(0))
|| INFORMATION_SCHEMA_NAME.contains(parts.get(0)))) {
schemaName = parts.get(0);
return parts.subList(1, parts.size());
} else {
return parts;
}
}


}
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@

package org.opensearch.sql.analysis;

import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.util.ArrayList;
Expand All @@ -21,6 +22,7 @@
import org.opensearch.sql.ast.expression.AggregateFunction;
import org.opensearch.sql.ast.expression.AllFields;
import org.opensearch.sql.ast.expression.And;
import org.opensearch.sql.ast.expression.Argument;
import org.opensearch.sql.ast.expression.Case;
import org.opensearch.sql.ast.expression.Cast;
import org.opensearch.sql.ast.expression.Compare;
Expand Down Expand Up @@ -151,9 +153,13 @@ public Expression visitAggregateFunction(AggregateFunction node, AnalysisContext
Optional<BuiltinFunctionName> builtinFunctionName =
BuiltinFunctionName.ofAggregation(node.getFuncName());
if (builtinFunctionName.isPresent()) {
Expression arg = node.getField().accept(this, context);
ImmutableList.Builder<Expression> builder = ImmutableList.builder();
builder.add(node.getField().accept(this, context));
for (UnresolvedExpression arg : node.getArgList()) {
builder.add(arg.accept(this, context));
}
Aggregator aggregator = (Aggregator) repository.compile(
builtinFunctionName.get().getName(), Collections.singletonList(arg));
builtinFunctionName.get().getName(), builder.build());
aggregator.distinct(node.getDistinct());
if (node.condition() != null) {
aggregator.condition(analyze(node.condition(), context));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@

package org.opensearch.sql.analysis;

import static org.opensearch.sql.analysis.symbol.Namespace.FIELD_NAME;

import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Optional;
Expand Down Expand Up @@ -82,7 +84,7 @@ public void define(Symbol symbol, ExprType type) {
* @param ref {@link ReferenceExpression}
*/
public void define(ReferenceExpression ref) {
define(new Symbol(Namespace.FIELD_NAME, ref.getAttr()), ref.type());
define(new Symbol(FIELD_NAME, ref.getAttr()), ref.type());
}

public void remove(Symbol symbol) {
Expand All @@ -93,6 +95,14 @@ public void remove(Symbol symbol) {
* Remove ref.
*/
public void remove(ReferenceExpression ref) {
remove(new Symbol(Namespace.FIELD_NAME, ref.getAttr()));
remove(new Symbol(FIELD_NAME, ref.getAttr()));
}

/**
* Clear all fields in the current environment.
*/
public void clearAllFields() {
lookupAllFields(FIELD_NAME).keySet().stream()
.forEach(v -> remove(new Symbol(Namespace.FIELD_NAME, v)));
}
}
Loading

0 comments on commit fbd6c7a

Please sign in to comment.